repo_name
stringlengths 5
114
| repo_url
stringlengths 24
133
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| branch_name
stringclasses 209
values | visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 9.83k
683M
⌀ | star_events_count
int64 0
22.6k
| fork_events_count
int64 0
4.15k
| gha_license_id
stringclasses 17
values | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_language
stringclasses 115
values | files
listlengths 1
13.2k
| num_files
int64 1
13.2k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
akjadoon/sets-to-spotify
|
https://github.com/akjadoon/sets-to-spotify
|
4180382a25325c16fc3e50784943eb3a360bed13
|
2808f1560de01b19eb53364808e369145bae2ef6
|
5af9c5b7ec866c62da5394be1c6cadce45dfc4be
|
refs/heads/master
| 2023-01-23T22:29:51.093503 | 2019-12-03T05:30:50 | 2019-12-03T05:30:50 | 221,070,747 | 1 | 0 | null | 2019-11-11T21:10:58 | 2019-12-03T05:31:16 | 2023-01-04T13:04:32 |
JavaScript
|
[
{
"alpha_fraction": 0.49878934025764465,
"alphanum_fraction": 0.5108959078788757,
"avg_line_length": 24.875,
"blob_id": "d8330d4d50cd18046a6fcd92111ee3fe5ebc836d",
"content_id": "f46b4b63c781e26f663464fbf42af61939f03408",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 413,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 16,
"path": "/frontend/mix-to-playlist/src/components/InstructionCard.js",
"repo_name": "akjadoon/sets-to-spotify",
"src_encoding": "UTF-8",
"text": "import React from 'react';\nimport './InstructionCard.css';\n\n\nconst InstructionCard = (props) => {\n return(\n <div className=\"Card\">\n <img src={props.bgimg} alt={props.bgimg}/>\n <div style={{padding: \"0px 20px\", fontSize: \"20px\"}}>\n <p><strong>{props.text}</strong></p>\n </div>\n </div>\n )\n}\n\nexport default InstructionCard;"
},
{
"alpha_fraction": 0.5464040637016296,
"alphanum_fraction": 0.5508803129196167,
"avg_line_length": 30.923809051513672,
"blob_id": "bbfeadb86a2e5a4716fec759ebde44fb5381bdb4",
"content_id": "d8391c6b6ec7bfd36df66ca06b4ded91789d4ab8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 3355,
"license_type": "no_license",
"max_line_length": 136,
"num_lines": 105,
"path": "/frontend/mix-to-playlist/src/components/HomePage.js",
"repo_name": "akjadoon/sets-to-spotify",
"src_encoding": "UTF-8",
"text": "import React from 'react';\nimport { Redirect } from 'react-router-dom'\nimport axios from 'axios'\nimport queryString from 'query-string'\n\nimport './HomePage.css';\n\nimport Layout from './Layout'\nimport Drawer from './Drawer'\n\naxios.defaults.timeout = 70000;\nconst yt_regex = new RegExp(\"http(?:s?):\\\\/\\\\/(?:www\\\\.)?youtu(?:be\\\\.com\\\\/watch\\\\?v=|\\\\.be\\\\/)([\\\\w\\\\-\\\\_]*)(&(amp;)?[\\\\w\\\\?=]*)?\");\n\n\nclass HomePage extends React.Component {\n constructor(props){\n super(props);\n this.state = {\n authError: false,\n drawerOpen: false, \n inputText: \"\",\n tracks: [],\n video_title: \"\",\n inputError: false,\n loading: false,\n noTracksFound: false\n }\n }\n\n componentDidMount(){\n axios.get(process.env.REACT_APP_API_BASE_URL + '/token?code=' + \n queryString.parse(this.props.location.search).code,\n {withCredentials: true})\n .then((res) => {\n if (res.status !== 200)\n this.setState({authError: true});\n }) \n }\n\n validateAndSubmit = () => {\n this.setState({\n drawerOpen: false,\n tracks: null,\n inputError: false,\n loading: true,\n noTracksFound: false})\n if (yt_regex.test(this.state.inputText)){\n axios.get(process.env.REACT_APP_API_BASE_URL + '/tracks?link=' + \n this.state.inputText, {withCredentials: true})\n .then((res) => {\n this.setState({\n drawerOpen: true,\n tracks: res.data['tracks'],\n video_title: res.data['video_title'],\n loading: false})\n })\n .catch((error)=>{\n this.setState({noTracksFound: true,\n loading: false})\n })\n } else {\n this.setState({inputError: true, loading: false})\n }\n }\n handleInputChange = (e) => {\n this.setState({inputText: e.target.value})\n }\n render(){\n if (this.state.authError)\n return <Redirect to=\"/auth_error\"/>;\n\n var inputErrorMessage = null;\n if (this.state.inputError)\n inputErrorMessage = <p style={{color: \"red\"}}>Not a valid YouTube link</p>\n\n var loadingSpinner = null;\n if (this.state.loading)\n loadingSpinner = (<React.Fragment>\n <div class=\"lds-facebook\"><div></div><div></div><div></div></div>\n <p>Looking for your tracklist</p>\n </React.Fragment>);\n\n var noTracksFoundError = null;\n if (this.state.noTracksFound)\n noTracksFoundError = <p>Could not find any tracks for your mix.</p>\n const bar = (\n <div className=\"input-container\">\n <div style={{display: \"flex\", marginTop: \"-30px\", width: \"100%\", verticalAlign:\"top\", height: \"60px\" }}>\n <input id=\"link-input\" type=\"text\" placeholder=\"Paste your YouTube URL here...\" onChange={this.handleInputChange}/>\n <button className=\"submit-btn\" onClick={this.validateAndSubmit}>Submit</button>\n </div>\n {inputErrorMessage}\n {noTracksFoundError}\n {loadingSpinner}\n <Drawer open={this.state.drawerOpen} tracks={this.state.tracks} video_title={this.state.video_title}/>\n </div>\n );\n return(\n <Layout bottom={bar}/>\n )\n }\n\n}\n\nexport default HomePage;"
},
{
"alpha_fraction": 0.6099098920822144,
"alphanum_fraction": 0.6175675392150879,
"avg_line_length": 26.75,
"blob_id": "c6f0ae6b0af4d0c377734206793acf594c1671e7",
"content_id": "03fd3aded92a08a2892ed9c1c940d4e7600b4f23",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 2220,
"license_type": "no_license",
"max_line_length": 156,
"num_lines": 80,
"path": "/frontend/mix-to-playlist/src/App.js",
"repo_name": "akjadoon/sets-to-spotify",
"src_encoding": "UTF-8",
"text": "import React from 'react';\nimport axios from 'axios'\nimport { BrowserRouter as Router, Route, Redirect} from \"react-router-dom\";\n\nimport './App.css'\n\nimport HomePage from './components/HomePage'\nimport InstructionCard from './components/InstructionCard'\nimport Layout from './components/Layout'\n\n\nfunction App() {\n return (\n <Router>\n <Route exact path=\"/\" component={LandingPage}/>\n <Route path=\"/logged_in\" component={HomePage} />\n <Route path=\"/auth_error\" component={AuthError}/>\n </Router>\n );\n}\n\n\nclass LandingPage extends React.Component{\n constructor(props) {\n super(props);\n this.state = {\n redirect_url: 0,\n authError: false\n };\n }\n\n componentDidMount(){\n axios.get(process.env.REACT_APP_API_BASE_URL + '/authorize')\n .then((res) => {\n if (res.status !== 200){\n this.setState({authError: true})\n }\n this.setState({redirect_url: res.data['redirect_url']});\n })\n }\n redirect = () =>{\n window.location.replace(this.state.redirect_url);\n } \n render(){\n if (this.state.authError)\n return <Redirect to=\"/auth_error\"/>;\n\n const page_top = (\n <div style={{width: \"100%\", height: \"100%\", backgroundColor: \"rgba(0, 0, 0, 0.4)\", display: \"flex\", flexDirection: \"column\", justifyContent: \"center\"}}>\n <p className=\"call-to-action\">\n {\"Save tracks from your favorite YouTube DJ sets as Spotify Playlists\"}\n </p>\n </div>\n );\n const page_bottom = (\n <React.Fragment>\n <div className=\"PageContainer\">\n <div className=\"CardContainer\">\n <InstructionCard text=\"Copy the link of a DJ set on YouTube\" bgimg=\"youtube-logotype.png\"/>\n <InstructionCard text=\"Paste your link in the bar on the next page\" bgimg=\"paste.png\"/> \n <InstructionCard text=\"The tool searches for a tracklist and creates your playlist\" bgimg=\"chip.png\"/>\n </div>\n <button className=\"log-in-btn\" onClick={this.redirect}>Log in with Spotify</button>\n </div>\n </React.Fragment>\n )\n return(\n <Layout top={page_top} bottom={page_bottom}/>\n )\n }\n}\n\n\nconst AuthError = () =>(\n <h2>Authorization Failed</h2>\n);\n\n\n\nexport default App;\n"
},
{
"alpha_fraction": 0.5914552807807922,
"alphanum_fraction": 0.6074766516685486,
"avg_line_length": 20.428571701049805,
"blob_id": "a152fb03bc39f1ef6608a2a117f0eb3d11233b29",
"content_id": "ef325c5f1657d84d5e345c8eb4ba18dd6187eb83",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 749,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 35,
"path": "/frontend/mix-to-playlist/src/components/Layout.js",
"repo_name": "akjadoon/sets-to-spotify",
"src_encoding": "UTF-8",
"text": "import React from 'react'\nimport Header from './Header'\n\n\nconst pageStyle = {\n height: \"100%\",\n backgroundColor: \"whitesmoke\",\n minHeight: \"100vh\",\n margin: \"0\", \n display: \"flex\",\n flexDirection: \"column\"\n}\n\nconst pageTopStyle = {\n height: \"50vh\",\n width:\"100%\",\n backgroundImage: \"url(gray-turntable-min.jpg)\",\n backgroundSize: \"cover\",\n backgroundRepeat: \"no-repeat\",\n backgroundPosition: \"center\"\n}\n\nconst Layout = (props) => (\n <div style={pageStyle}>\n <Header/>\n <div style={{textAlign: \"center\", display: \"flex\", flexDirection: \"column\", alignItems: \"center\"}}>\n <div style={pageTopStyle}>\n {props.top}\n </div>\n {props.bottom}\n </div>\n </div>\n);\n\nexport default Layout;"
},
{
"alpha_fraction": 0.4790697693824768,
"alphanum_fraction": 0.6976743936538696,
"avg_line_length": 16.200000762939453,
"blob_id": "a0ce0281fe605141f5d7490989097b0abe17c6dd",
"content_id": "63db0dfc53afc454062abbe654e28e63b3ff8912",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 430,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 25,
"path": "/backend/requirements.txt",
"repo_name": "akjadoon/sets-to-spotify",
"src_encoding": "UTF-8",
"text": "cachetools==3.1.1\ncertifi==2019.9.11\nchardet==3.0.4\nClick==7.0\ndnspython==1.16.0\nFlask==1.1.1\nFlask-Cors==3.0.8\ngoogle-api-python-client==1.7.11\ngoogle-auth==1.7.0\ngoogle-auth-httplib2==0.0.3\nhttplib2==0.14.0\nidna==2.8\nitsdangerous==1.1.0\nJinja2==2.10.3\nMarkupSafe==1.1.1\npyasn1==0.4.7\npyasn1-modules==0.2.7\npymongo==3.9.0\nregex==2019.11.1\nrequests==2.22.0\nrsa==4.0\nsix==1.13.0\nuritemplate==3.0.0\nurllib3==1.25.7\nWerkzeug==0.16.0\n"
},
{
"alpha_fraction": 0.6552140116691589,
"alphanum_fraction": 0.6612417101860046,
"avg_line_length": 25.774192810058594,
"blob_id": "7c4366707a367b294b55e1fa2cdf14b434f2cdc6",
"content_id": "15b070ae6d16a1b0bf80da20982e7b6dc995753a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1659,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 62,
"path": "/backend/youtube.py",
"repo_name": "akjadoon/sets-to-spotify",
"src_encoding": "UTF-8",
"text": "import os\nfrom urllib.parse import urlparse, parse_qs\n\n\nimport regex\nimport googleapiclient.discovery\n\nDEFAULT_KWARGS_COMMENTS = {\n \"part\": \"snippet\",\n \"maxResults\": 80,\n \"order\": \"relevance\",\n \"textFormat\": \"plainText\"\n}\n\n# Disable OAuthlib's HTTPS verification when running locally.\n# *DO NOT* leave this option enabled in production.\nos.environ[\"OAUTHLIB_INSECURE_TRANSPORT\"] = \"1\"\n\napi_service_name = \"youtube\"\napi_version = \"v3\"\nDEVELOPER_KEY = os.getenv(\"YOUTUBE_DATA_API_KEY\")\n\n\ndef extract_video_id(url):\n try:\n video_id = parse_qs(urlparse(url).query)['v'][0]\n return video_id\n except Exception as e:\n print(f\"Could not extract video id, {e}\")\n return None\n\n\ndef get_yt_comments(url):\n video_id = extract_video_id(url)\n youtube = googleapiclient.discovery.build(api_service_name, api_version, developerKey = DEVELOPER_KEY)\n\n request = youtube.commentThreads().list(\n **DEFAULT_KWARGS_COMMENTS,\n videoId=video_id\n )\n response = request.execute()\n\n return [\n comment[\"snippet\"][\"topLevelComment\"][\"snippet\"][\"textDisplay\"]\n for comment in response[\"items\"]\n ]\n\n\ndef get_yt_video_info(url):\n video_id = extract_video_id(url)\n youtube = googleapiclient.discovery.build(api_service_name, api_version, developerKey = DEVELOPER_KEY)\n\n request = youtube.videos().list(\n part=\"snippet\",\n id=video_id\n )\n response = request.execute()\n return response['items'][0]['snippet']['title'], response['items'][0]['snippet']['description']\n\n\nif __name__==\"__main__\":\n print(get_yt_video_info(\"https://www.youtube.com/watch?v=A9sOb_r6Hy0\"))"
},
{
"alpha_fraction": 0.5965406894683838,
"alphanum_fraction": 0.6087610721588135,
"avg_line_length": 34,
"blob_id": "222c31dff2f134df9bcb49bc3ba979e5eb07fef0",
"content_id": "db966e96f6be3e581c89ae6dbb254ac48ae88e64",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5319,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 152,
"path": "/backend/app.py",
"repo_name": "akjadoon/sets-to-spotify",
"src_encoding": "UTF-8",
"text": "import requests\nimport base64\nimport os\nimport json\nimport datetime\nimport base64\nfrom urllib.parse import urlencode\n\nimport pymongo\nimport dns\nfrom flask import Flask, redirect, request, make_response, jsonify\nfrom flask_cors import CORS\n\nfrom extract_tracklists import scan_yt\nfrom spotify import get_spotify_tracks, create_spotify_playlist\n\nCLIENT_ID = os.getenv(\"MTP_SPOTIFY_CLIENT_ID\")\nCLIENT_SECRET = os.getenv(\"MTP_SPOTIFY_CLIENT_SECRET\")\nSTATIC_BASE_URL = os.getenv(\"MTP_STATIC_BASE_URL\")\nDB_CONNECTION_STRING = os.getenv(\"MTP_DB_CONNECTION_STRING\")\ndb_client = pymongo.MongoClient(DB_CONNECTION_STRING)\ndb = db_client.s2sdb\n\napp = Flask(__name__)\ncors = CORS(app, resources={r\"/*\": {\"origins\": STATIC_BASE_URL}}, supports_credentials=True)\napp.config['CORS_HEADERS'] = 'Content-Type'\n\n\ndef refresh_token_decorator(f):\n def refresh_token_wrapper():\n access_token = request.cookies.get(\"token\")\n resp = make_response()\n if not access_token:\n body = {\n \"grant_type\": \"refresh_token\",\n \"refresh_token\": request.cookies.get(\"refresh_token\")\n }\n headers = {\n \"Authorization\": \"Basic \" + str(base64.b64encode(f\"{CLIENT_ID}:{CLIENT_SECRET}\".encode(\"utf-8\")), \"utf-8\")\n }\n print(body)\n print\n res = requests.post(\"https://accounts.spotify.com/api/token\", data=body, headers=headers)\n if res.status_code != 200:\n return \"Invalid Authorization code\", res.status_code\n \n content = json.loads(res.content)\n access_token = content['access_token']\n resp.set_cookie(\"token\", content['access_token'], max_age=3600)\n resp.headers.add('Access-Control-Request-Headers', 'Cookie, Set-Cookie')\n return f(access_token, resp)\n return refresh_token_wrapper\n\n\[email protected]('/authorize')\ndef auth():\n param_string = urlencode({\n \"client_id\": CLIENT_ID,\n \"redirect_uri\": f\"{STATIC_BASE_URL}/logged_in\",\n \"scope\": \"user-read-private playlist-modify-public\",\n \"response_type\": \"code\"\n })\n response = jsonify({\"redirect_url\": f\"https://accounts.spotify.com/authorize?{param_string}\"})\n return response\n\n\[email protected]('/token')\ndef token():\n if not 'code' in request.args:\n return \"No Spotify Authorization code recieved from client\", 400\n\n body = {\n \"client_id\": CLIENT_ID,\n \"client_secret\": CLIENT_SECRET,\n \"grant_type\": \"authorization_code\",\n \"code\": request.args['code'],\n \"redirect_uri\": f\"{STATIC_BASE_URL}/logged_in\" # not redirected, only for verification\n }\n\n res = requests.post(\"https://accounts.spotify.com/api/token\", data=body)\n if res.status_code != 200:\n print(res.content)\n return \"Invalid Authorization code\", 400\n \n content = json.loads(res.content)\n resp = make_response()\n resp.set_cookie(\"token\", content['access_token'], max_age=3600)\n resp.set_cookie(\"refresh_token\", content['refresh_token'])\n resp.headers.add('Access-Control-Request-Headers', 'Cookie, Set-Cookie')\n return resp, 200\n\n\[email protected]('/tracks', methods=['GET'])\n@refresh_token_decorator\ndef tracks(access_token, resp):\n logs = {\"at\": datetime.datetime.now()}\n if 'link' in request.args:\n logs['link'] = request.args['link']\n title, track_names = scan_yt(request.args['link'])\n logs['yt_title'] = title\n if not track_names:\n return modify_response(resp, \"Could not find a tracklist for your mix\", 400, logs)\n logs['yt_tracks'] = {str(i): t for i, t in enumerate(track_names)} \n spotify_tracks = get_spotify_tracks(track_names, access_token)\n if not spotify_tracks:\n return modify_response(resp, \"Failed to find tracks on spotify\", 400, logs)\n logs['spotify_tracks'] = {\n str(i): {\n \"spotify_id\": track.id,\n \"track\": track.name,\n \"artists\": {\n str(j): artist \n for j, artist in enumerate(track.artists)}\n }\n for i, track in enumerate(spotify_tracks)} \n return modify_response(resp,\n json.dumps({'video_title': title, 'tracks': [t.serialize() for t in spotify_tracks]}),\n 200,\n logs,\n mimetype='application/json')\n return modify_response(resp, \"No link in request\", 400, logs)\n\n\[email protected]('/playlist', methods=['POST'])\ndef playlist():\n data = request.get_json()\n success = create_spotify_playlist(data['track_ids'], data['video_title'], request.cookies.get('token'))\n if not success:\n return \"Failed to create playlist\", 400\n return \"okay\", 201\n\n\[email protected]('/hello')\ndef hello():\n return \"Hello World\"\n\n\ndef modify_response(response, data, status_code, logs, mimetype=None):\n if status_code != 200:\n logs[\"failure_message\"] = data\n print(logs)\n db[\"get_playlist_logs\"].insert_one(logs)\n\n response.data = data\n response.status_code = status_code\n if mimetype:\n response.mimetype = mimetype\n return response\n\n\nif __name__ == \"__main__\":\n app.run(host='127.0.0.1', port=5000, debug=True)"
},
{
"alpha_fraction": 0.5220155119895935,
"alphanum_fraction": 0.54344242811203,
"avg_line_length": 32.98400115966797,
"blob_id": "7d7e3abd4306369b1af1b2dbde7acbb97b2e1f6b",
"content_id": "a29b3d7712a7c516d719bf922d96b46808bb7b04",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4271,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 125,
"path": "/backend/extract_tracklists.py",
"repo_name": "akjadoon/sets-to-spotify",
"src_encoding": "UTF-8",
"text": "import requests\nimport regex\nimport time\n\nfrom youtube import get_yt_comments, get_yt_video_info\n\nMIN_TRACKS = 5\nyt_mobile_regex = r\"http(?:s?):\\/\\/(?:www\\.)?youtu.be/(\\S*)\"\ncontains_tracklist_regex = r\"(?:tracklist|track list|tracks)[^\\n]*\\n\"\n\ncomment_regexps = {\n \"single_track_per_line\": [\n (r\"\\d{1,2}[\\.| -—]\\W*\\d{1,2}:\\d{2}:\\d{2}\\W*\\s(.*(?:-|—).*)\", \"1. (XX:XX:XX) \"),\n (r\"\\d{1,2}[\\.| -—]\\W*\\d{1,2}:\\d{2}\\W*\\s(.*(?:-|—).*)\", \"1. (XX:XX) \"),\n (r\"[\\[\\(]\\d{1,2}[\\]\\)]\\W*\\s(.*(?:-|—).*)\", \"[XX]\"),\n (r\"[\\[\\(]?\\d{1,2}:\\d{2}:\\d{2}[\\]\\)]?\\W*\\s(.*(?:-|—).*)\", \"(XX:XX:XX)\"),\n (r\"[\\[\\(]?\\d{1,2}:\\d{2}[\\]\\)]?\\W*\\s(.*(?:-|—).*)\", \"(XX:XX)\"),\n (r\"\\d{1,2}\\.\\W*\\s(.*(?:-|—).*)\", \"01.\"),\n (r\"\\d{2}\\W*\\s(.*(?:-|—).*)\", \"01 \")\n ],\n \"multi_track_per_line\": [\n (r\"\\d{1,2}\\.\", \"1. song1 2.song2\"),\n (r\"[\\[\\(]?\\d{1,2}:\\d{2}:\\d{2}[\\]\\)]?\", \"(00:00:00) song1 (00:00:00) song2\"),\n (r\"[\\[\\(]?\\d{1,2}:\\d{2}[\\]\\)]?\", \"(00:00) song1 (00:00) song2\"),\n ],\n \"single_track_multi_line\": [\n (r\"^[\\[\\(]?\\d{1,2}:\\d{2}:\\d{2}[\\]\\)]?\\W*\\n(.*)\\n\", \"(00:00:00)\\n\"),\n (r\"^[\\[\\(]?\\d{1,2}:\\d{2}[\\]\\)]?\\W*\\n(.*)\\n\", \"(00:00)\\n\"),\n ],\n \"single_track_per_line_description_only\": [\n (r\"^([^-\\n]* - [^-\\n]*)(?: - (?:[^-\\n]*))?\\n\", \"The tracklist:\\n\")\n ]\n}\n\n\ndef scan_yt_comments(url):\n for comment in get_yt_comments(url):\n if len(comment.split(\"\\n\")) == 1:\n tracklist = find_single_line_tracklist(comment)\n else:\n tracklist = find_multi_line_tracklist(comment)\n if tracklist:\n return [track.replace(\"-\", \"\") for track in tracklist]\n print(\"Tracklist not found in comments\")\n return None\n\n\ndef scan_yt_description(description):\n tracklist = find_multi_line_tracklist(description)\n if tracklist:\n return tracklist\n\n try:\n _, tracklist_text = regex.split(contains_tracklist_regex, description, flags=regex.IGNORECASE)\n print(\"contains track list\")\n tracklist = match(tracklist_text, comment_regexps[\"single_track_per_line_description_only\"], regex.MULTILINE)\n except ValueError:\n print(\"No Tracklist in description\")\n pass\n if tracklist:\n return tracklist\n\n return None\n\n\ndef scan_yt(url):\n match = regex.match(yt_mobile_regex, url)\n if match:\n url = f\"https://youtube.com/watch?v={match.group(1)}\"\n\n tracklist = scan_yt_comments(url)\n title, description = get_yt_video_info(url)\n if not tracklist:\n tracklist = scan_yt_description(description)\n if not tracklist:\n return title, None\n return title, [\n process_track(track) for track in tracklist\n ]\n\n\ndef process_track(track):\n # Remove remix info\n track = regex.sub(\"(\\(.*\\)|\\[.*\\])\", \"\", track).strip()\n # Remove any extra track info and replace unicode — with -\n track = \"\".join(track.replace(\"—\", \"-\").split(\"-\")[:2])\n #Remove multiple spaces\n track = regex.sub(' +', ' ', track)\n # remove feat\n # track = track.replace(\"feat\", \"\").replace\n return track\n\n\ndef find_single_line_tracklist(comment):\n for pattern, _ in comment_regexps[\"multi_track_per_line\"]:\n result = regex.split(pattern, comment)\n result_str = \"\".join(result)\n dash_count, em_dash_count = result_str.count(\"-\"), result_str.count(\"—\")\n if len(result) > MIN_TRACKS and (dash_count > MIN_TRACKS or em_dash_count > MIN_TRACKS):\n return result[1:]\n return None\n \n\ndef find_multi_line_tracklist(comment, regexps=None):\n for pattern, _ in comment_regexps[\"single_track_per_line\"]:\n matches = regex.findall(pattern, comment)\n if len(matches) > MIN_TRACKS:\n return matches\n\n for pattern, _ in comment_regexps[\"single_track_multi_line\"]:\n matches = regex.findall(pattern, comment)\n if len(matches) > MIN_TRACKS:\n return matches\n\n return None\n\ndef match(comment, regexps, flags):\n for pattern, _ in regexps:\n matches = regex.findall(pattern, comment, flags=flags)\n if len(matches) > MIN_TRACKS:\n return matches\n return None\n\nif __name__ == \"__main__\":\n print(scan_yt(\"https://www.youtube.com/watch?v=AJvCnFqSViA&t=742s\"))"
},
{
"alpha_fraction": 0.5964969396591187,
"alphanum_fraction": 0.5997405052185059,
"avg_line_length": 27.537036895751953,
"blob_id": "0e7e4b945aa8e78fc3652fea9453439c6bb7b220",
"content_id": "44e172fcd08a9731b4cf0420572c0ef94d762c0f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3083,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 108,
"path": "/backend/spotify.py",
"repo_name": "akjadoon/sets-to-spotify",
"src_encoding": "UTF-8",
"text": "import requests\nimport json\nimport logging\nfrom enum import Enum\nfrom typing import List\n\n\nlogging.basicConfig(level=logging.INFO)\n\nFAILURE = \"Failure\"\nSUCCESS = \"Success\"\n\nHEADERS = {\n \"Accept\": \"application/json\", \n \"Content-Type\": \"application/json\", \n \"Authorization\": \"\"\n}\n\n\nclass SpotifyTrack:\n def __init__(self, _id:str, name: str, artists: List[str]):\n self.id = _id\n self.name = name\n self.artists = artists\n\n def serialize(self):\n return {\n \"id\": self.id,\n \"name\": self.name,\n \"artists\": self.artists\n }\n\n\ndef get_spotify_tracks(names, token):\n HEADERS['Authorization'] = \"Bearer \" + token\n if not isinstance(names, List):\n names = [names]\n tracks = []\n for name in names:\n param_str = name.replace(\"-\", \"\").replace(\" \", \"%20\")\n url = f\"https://api.spotify.com/v1/search?q={param_str}&type=track\"\n try:\n res = requests.get(url, headers=HEADERS)\n info = json.loads(res.content)['tracks']['items'][0]\n artists = [artist['name'] for artist in info['artists']]\n tracks.append(SpotifyTrack(info['id'], info['name'], artists))\n logging.info(f\"Matched {name} to song:{info['name']}, artists: {','.join(artists)}\")\n except Exception as e:\n logging.error(f\"Song: {name} not found, {repr(e)}\")\n return tracks\n\n\ndef create_playlist(user_id, name):\n data = {\n \"name\": name,\n \"public\": True,\n \"description\": \"\"\n }\n url = f\"https://api.spotify.com/v1/users/{user_id}/playlists\"\n res = requests.post(url, headers=HEADERS, json=data)\n try:\n return json.loads(res.content)['id']\n except Exception as e:\n logging.error(f\"Failed to Create Playlist, {e}\")\n return FAILURE\n\n\ndef get_user_id():\n res = requests.get(\"https://api.spotify.com/v1/me\", headers=HEADERS)\n try: \n return json.loads(res.content)['id']\n except Exception as e:\n logging.error(f\"Failed to get spotify user id, {e}\")\n return FAILURE\n\n\ndef add_tracks(playlist_id, track_ids):\n url = f\"https://api.spotify.com/v1/playlists/{playlist_id}/tracks\"\n data = {\n \"uris\": [f\"spotify:track:{track_id}\" for track_id in track_ids]\n }\n res = requests.post(url, headers=HEADERS, json=data)\n if res.status_code != 201:\n logging.error(f\"Adding tracks to playlist id: {playlist_id} failed, HTTP Status Code {res.status_code}\")\n return False\n return True\n\n\ndef create_spotify_playlist(track_ids, playlist_name, token):\n HEADERS['Authorization'] = \"Bearer \" + token\n if not track_ids:\n logging.error(\"No tracks found on Spotify\")\n return False\n\n playlist_id = create_playlist(get_user_id(), playlist_name)\n if playlist_id == FAILURE:\n return False\n return add_tracks(playlist_id, track_ids)\n\n\nif __name__ == \"__main__\":\n tracks = [\n \"ac dc thunderstruck\",\n \"kiss rock and roll\",\n \"jimi hendrix all along the watch tower\"\n ]\n\n get_track_ids(tracks)\n\n"
}
] | 9 |
limindeyou/ihadlearnpython
|
https://github.com/limindeyou/ihadlearnpython
|
6111e4a79cbf00a25d0d585825fb88b1c3e4d144
|
5f10c6b3ee014adf51b5cfc9ea35d8e824336156
|
a14851e33e0bd35b6f7700aa04692e68be1a9b17
|
refs/heads/master
| 2020-04-04T22:37:55.821623 | 2018-11-06T06:22:13 | 2018-11-06T06:22:13 | 156,330,075 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6666666865348816,
"alphanum_fraction": 0.6666666865348816,
"avg_line_length": 13.5,
"blob_id": "74b6328367798ee0303a2261f32e72961920b535",
"content_id": "50cd49bbcbc0419eb7e01695356a6a907c45a6ba",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 30,
"license_type": "no_license",
"max_line_length": 21,
"num_lines": 2,
"path": "/helloworld.py",
"repo_name": "limindeyou/ihadlearnpython",
"src_encoding": "UTF-8",
"text": "\nprint(\"hello world\");\nhello;\n"
},
{
"alpha_fraction": 0.3541666567325592,
"alphanum_fraction": 0.4166666567325592,
"avg_line_length": 8.600000381469727,
"blob_id": "4c7d70b67ca88485ed5f117bc06742c145cadcd0",
"content_id": "6f78182ce865e58c2f0ef47c57f912fb9fa2f3f2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 48,
"license_type": "no_license",
"max_line_length": 15,
"num_lines": 5,
"path": "/readme.txt",
"repo_name": "limindeyou/ihadlearnpython",
"src_encoding": "UTF-8",
"text": "<<<<<<< HEAD\n100\n=======\ndsajkj\n>>>>>>> feature\n"
}
] | 2 |
avib230/TEST_Ist
|
https://github.com/avib230/TEST_Ist
|
3f4b8e919b83d2f2a7f44428f3090b20166fabe6
|
f166349f16340c1449581dd5998cf83513de7d12
|
2f7b83218183eeda0e22642c19580c5f05f3dad4
|
refs/heads/master
| 2022-11-25T14:41:47.976524 | 2020-07-31T11:01:44 | 2020-07-31T11:01:44 | 284,013,363 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7272727489471436,
"alphanum_fraction": 0.7272727489471436,
"avg_line_length": 32,
"blob_id": "f46ecd4f5ce711e849551f5dae781697f9f088fd",
"content_id": "9939c6d82fb14e55b54cf566eb65ac0aa7cd984b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 33,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 1,
"path": "/test.py",
"repo_name": "avib230/TEST_Ist",
"src_encoding": "UTF-8",
"text": "# This is second file to commit\n"
},
{
"alpha_fraction": 0.734375,
"alphanum_fraction": 0.734375,
"avg_line_length": 20.33333396911621,
"blob_id": "3f3d22902060b6c91fdd956dd2adb43b2afaae52",
"content_id": "9b33773c2a4335f0c2c1ee7685346977398dac44",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 64,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 3,
"path": "/README.md",
"repo_name": "avib230/TEST_Ist",
"src_encoding": "UTF-8",
"text": "# TEST_Ist\n\n# This is my First gibt commit file for testing git\n"
}
] | 2 |
Nantor/BUZZ-to-YDKJ
|
https://github.com/Nantor/BUZZ-to-YDKJ
|
b743388b15d8d92527c313123f8a459be0fb0a69
|
bd11a4f84da22e143daf657e15d3fa766b7561bc
|
02c129aef4cb6751bd919cb525fe0526a96cb913
|
refs/heads/master
| 2021-06-25T03:00:30.356302 | 2020-02-19T14:43:32 | 2020-02-19T14:43:32 | 100,156,139 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5747460126876831,
"alphanum_fraction": 0.5994194746017456,
"avg_line_length": 24.518518447875977,
"blob_id": "c0af561c23b714360a8511e742924cef53fb55f9",
"content_id": "ac61c763ce1ca3481bbcd9d7918d23a1e8ba5e3a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1384,
"license_type": "permissive",
"max_line_length": 162,
"num_lines": 54,
"path": "/README.md",
"repo_name": "Nantor/BUZZ-to-YDKJ",
"src_encoding": "UTF-8",
"text": "# Play YDKJ 🕹 with big red buttons 🔴\nYour don't want to play yor favorite game \"You don't know Jack\" with big bulky keyboard.\\\nYou want to get rid of the trouble, that your best fiend and biggest enemy can interfere with you.\\\nYou are thinking:\n>_WTF, I playing a quiz-show, where are the big red button's!_\n\nThan this is your place.\n\nHere you can get a little script, where you can use the playstation 2 _BUZZ_ USB-controller (not bluetooth) with the PC version of the game \"You don't know Jack\".\n\n## Dependencies\n- Python >=3.5\n- \"libhidapi-hidraw0\" or \"libhidapi-libusb0\"\n- pip packages: hid, keyboard\n\n## Features\n- key mapping of controller buttons to the game keys (done)\n- add logic to prevent interfering (done)\n- custom configuration (done)\n- documentation (WIP)\n- use LED's (done)\n- other features like: auto run as root, in background and ... (WIP)\n\n## Usage\n\n## Configuration\n\nDefault\n```json\n{\n \"YDKJ\": {\n \"answer1\": \"1\",\n \"answer2\": \"2\",\n \"answer3\": \"3\",\n \"answer4\": \"4\",\n \"nail\": \"n\",\n \"player1\": \"q\",\n \"player2\": \"b\",\n \"player3\": \"p\"\n },\n \"controller\": {\n \"1\": \"player1\",\n \"2\": \"player2\",\n \"3\": \"player3\",\n \"4\": \"nail\",\n \"product_id\": 4096,\n \"vendor_id\": 1356\n },\n \"general\": {\n \"blinking_speed\": 0.1,\n \"blinking_times\": 3\n }\n}\n```\n"
},
{
"alpha_fraction": 0.5196969509124756,
"alphanum_fraction": 0.550000011920929,
"avg_line_length": 25.93877601623535,
"blob_id": "dbc317dd2cb08b78784ddb7882ee41eca7db56f7",
"content_id": "82190cfc3c7838f8963e4a0b290952162c7e07a6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1320,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 49,
"path": "/setup.sh",
"repo_name": "Nantor/BUZZ-to-YDKJ",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\n# force to run as root\nif [ \"$(id -u)\" != \"0\" ]; then\n sudo \"$0\" \"$@\"\n exit $?\nfi\n\n# initial Parameter:\n# Buzz controller device default properteis\nVENDOR_ID=054C\nPRODUCT_ID=1000\n\n# install dependencies\napt update\napt install -y python3-pip libhidapi-hidraw0\npip3 install hid keyboard\n\n# detect Buzz controller\nLSUSB=$(lsusb)\nif ! (echo \"$LSUSB\" | grep -iq \"$VENDOR_ID:$PRODUCT_ID\"); then\n __BUZZ_COUNT=$(echo \"$LSUSB\" | grep -ci \"buzz\")\n if [ \"$__BUZZ_COUNT\" = \"1\" ]; then\n __BUZZ_DEV=$(echo \"$LSUSB\" | grep -i \"buzz\")\n VENDOR_ID=$(echo \"$__BUZZ_DEV\" | cut -c24-27)\n PRODUCT_ID=$(echo \"$__BUZZ_DEV\" | cut -c29-33)\n else\n __COUNT=$(echo \"$LSUSB\" | wc -l)\n while :; do\n echo \"No Buzz Controller found, please choose: \"\n echo \" 0) abort and exit\"\n echo \"$LSUSB\" | cut -c34- | nl -s') '\n read -r __SELECT\n if [[ ! \"$__SELECT\" =~ [^0-9] ]] && [[ $__SELECT -le $__COUNT ]]; then\n if [[ $__SELECT -eq 0 ]]; then\n exit 1\n fi\n __SELECTED=$(echo \"$LSUSB\" | sed \"${__SELECT}q;d\")\n VENDOR_ID=$(echo \"$__SELECTED\" | cut -c24-27)\n PRODUCT_ID=$(echo \"$__SELECTED\" | cut -c29-33)\n break\n fi\n done\n fi\nfi\n\ncat >buzz.json <<EOF\n{\"controller\":{\"vendor_id\":$((16#$VENDOR_ID)),\"product_id\":$((16#$PRODUCT_ID))}}\nEOF\n"
},
{
"alpha_fraction": 0.5174262523651123,
"alphanum_fraction": 0.5482573509216309,
"avg_line_length": 38.56060791015625,
"blob_id": "00b6f9d6917a4e88ba85a4a3401d92a999ffca7d",
"content_id": "d0ee4b042ad70836ea47cec43c590b0d612d7fe3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10444,
"license_type": "permissive",
"max_line_length": 112,
"num_lines": 264,
"path": "/buzz.py",
"repo_name": "Nantor/BUZZ-to-YDKJ",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\nimport ctypes\nimport json\nimport math\nimport threading\nimport time\n\nimport hid\nimport keyboard\n\n_default_config = {\n 'general': {\n 'blinking_times': 3,\n 'blinking_speed': .1,\n },\n 'controller': {\n 'vendor_id': 0x054c,\n 'product_id': 0x1000,\n '1': 'player1',\n '2': 'player2',\n '3': 'player3',\n '4': 'nail',\n },\n 'YDKJ': {\n 'player1': 'q',\n 'player2': 'b',\n 'player3': 'p',\n 'nail': 'n',\n 'answer1': '1',\n 'answer2': '2',\n 'answer3': '3',\n 'answer4': '4',\n },\n}\n\n\ndef _merge_config(original, override):\n if type(override) == type(original):\n if type(original) is dict:\n z = {}\n for k in original:\n z[k] = _merge_config(original[k], override.get(k, None))\n return z\n else:\n return override\n return original\n\n\nclass BuzzControllerMapper(threading.Thread):\n __light_array = [0x00 for _ in range(8)]\n __button_state = None\n\n def __init__(self, configuration_file='buzz.json'):\n super(BuzzControllerMapper, self).__init__()\n\n # instantiate stop event\n self._stop_event = threading.Event()\n\n # instantiate the device class\n if os.path.isfile(configuration_file):\n configuration = _merge_config(_default_config, json.load(open(configuration_file)))\n else:\n configuration = _default_config\n\n hid.enumerate()\n self._dev = hid.hidapi.hid_open(configuration['controller']['vendor_id'],\n configuration['controller']['product_id'], None)\n if self._dev is None:\n info = hid.hidapi.hid_enumerate(configuration['controller']['vendor_id'],\n configuration['controller']['product_id'])\n d = info\n while d:\n if d.contents.as_dict()['vendor_id'] == configuration['controller']['vendor_id'] and \\\n d.contents.as_dict()['product_id'] == configuration['controller']['product_id']:\n self._dev = d\n break\n d = d.contents.next\n hid.hidapi.hid_free_enumeration(info)\n\n if self._dev is None:\n raise ValueError('No Device found with vendor_id \"{}\" and product_id \"{}\".'.format(\n configuration['controller']['vendor_id'], configuration['controller']['product_id']))\n\n # set the Buzz controller in none blocking mode\n hid.hidapi.hid_set_nonblocking(self._dev, 1)\n\n # clear the Buzz controller LEDs\n hid.hidapi.hid_write(self._dev, bytes(self.__light_array), len(self.__light_array))\n data = ctypes.create_string_buffer(5)\n hid.hidapi.hid_read(self._dev, data, 5)\n self.__button_state = data.raw\n\n # setup Buzz controller mapping\n self._controller_mapping = tuple(configuration['controller'][str(i)] for i in range(1, 5))\n self._player_controller = tuple(\n filter(lambda i: configuration['controller'][str(i + 1)] != 'nail', (i for i in range(4)))\n )\n self._player_controller_button_mapping = ('{!s}', 'answer4', 'answer3', 'answer2', 'answer1')\n self._nail_controller_button_mapping = ('nail', None, 'player3', 'player2', 'player1')\n\n self._ydkj_buttons = configuration['YDKJ']\n self._blinking_times = configuration['general']['blinking_times']\n self._blinking_speed = configuration['general']['blinking_speed']\n\n def _light_set(self, controllers, status):\n if self._dev is None:\n raise Exception('Controller closed')\n s = 0xFF if status else 0x00\n for controller in controllers:\n self.__light_array[controller + 2] = s\n hid.hidapi.hid_write(self._dev, bytes(self.__light_array), len(self.__light_array))\n\n def _blink(self, controllers):\n self._light_set(controllers, True)\n for _ in range(self._blinking_times):\n time.sleep(self._blinking_speed * .3)\n self._light_set(controllers, False)\n time.sleep(self._blinking_speed)\n self._light_set(controllers, True)\n time.sleep(self._blinking_speed * .3)\n self._light_set(controllers, False)\n\n def _flash(self, controller):\n self._light_set((controller,), True)\n time.sleep(self._blinking_speed * 1.3 * (self._blinking_times + 1))\n self._light_set((controller,), False)\n\n def _handle_buttons_pressed(self, button):\n controller = int(math.floor(button / 5))\n controller_button = button % 5\n\n if self._controller_mapping[controller] is not 'nail':\n button_type = self._player_controller_button_mapping[controller_button] \\\n .format(self._controller_mapping[controller])\n else:\n button_type = self._nail_controller_button_mapping[controller_button]\n\n if button_type is not None:\n keyboard.send(self._ydkj_buttons[button_type])\n\n if button_type in self._controller_mapping:\n if self._controller_mapping[controller] is 'nail':\n controller = self._controller_mapping.index(button_type)\n flash = threading.Thread(name='Flash-' + str(controller), target=self._flash, args=[controller])\n flash.start()\n if button_type == 'nail':\n blinking = threading.Thread(name='Nailed',\n target=self._blink,\n args=[self._player_controller])\n blinking.start()\n\n def _map_controller_buttons(self):\n if self._dev is None:\n raise Exception('Controller closed')\n data = ctypes.create_string_buffer(5)\n size = hid.hidapi.hid_read_timeout(self._dev, data, 5, 1000)\n if size > 0 and data.raw != self.__button_state:\n # red buzzer - controller 1\n if self.__button_state[2] & 0x01 == 0 and data.raw[2] & 0x01 != 0:\n self._handle_buttons_pressed(0)\n # yellow button - controller 1\n if self.__button_state[2] & 0x02 == 0 and data.raw[2] & 0x02 != 0:\n self._handle_buttons_pressed(1)\n # green button - controller 1\n if self.__button_state[2] & 0x04 == 0 and data.raw[2] & 0x04 != 0:\n self._handle_buttons_pressed(2)\n # orange button - controller 1\n if self.__button_state[2] & 0x08 == 0 and data.raw[2] & 0x08 != 0:\n self._handle_buttons_pressed(3)\n # blue button - controller 1\n if self.__button_state[2] & 0x10 == 0 and data.raw[2] & 0x10 != 0:\n self._handle_buttons_pressed(4)\n\n # red buzzer - controller 2\n if self.__button_state[2] & 0x20 == 0 and data.raw[2] & 0x20 != 0:\n self._handle_buttons_pressed(5)\n # yellow button - controller 2\n if self.__button_state[2] & 0x40 == 0 and data.raw[2] & 0x40 != 0:\n self._handle_buttons_pressed(6)\n # green button - controller 2\n if self.__button_state[2] & 0x80 == 0 and data.raw[2] & 0x80 != 0:\n self._handle_buttons_pressed(7)\n # orange button - controller 2\n if self.__button_state[3] & 0x01 == 0 and data.raw[3] & 0x01 != 0:\n self._handle_buttons_pressed(8)\n # blue button - controller 2\n if self.__button_state[3] & 0x02 == 0 and data.raw[3] & 0x02 != 0:\n self._handle_buttons_pressed(9)\n\n # red buzzer - controller 3\n if self.__button_state[3] & 0x04 == 0 and data.raw[3] & 0x04 != 0:\n self._handle_buttons_pressed(10)\n # yellow button - controller 3\n if self.__button_state[3] & 0x08 == 0 and data.raw[3] & 0x08 != 0:\n self._handle_buttons_pressed(11)\n # green button - controller 3\n if self.__button_state[3] & 0x10 == 0 and data.raw[3] & 0x10 != 0:\n self._handle_buttons_pressed(12)\n # orange button - controller 3\n if self.__button_state[3] & 0x20 == 0 and data.raw[3] & 0x20 != 0:\n self._handle_buttons_pressed(13)\n # blue button - controller 3\n if self.__button_state[3] & 0x40 == 0 and data.raw[3] & 0x40 != 0:\n self._handle_buttons_pressed(14)\n\n # red buzzer - controller 4\n if self.__button_state[3] & 0x80 == 0 and data.raw[3] & 0x80 != 0:\n self._handle_buttons_pressed(15)\n # yellow button - controller 4\n if self.__button_state[4] & 0x01 == 0 and data.raw[4] & 0x01 != 0:\n self._handle_buttons_pressed(16)\n # green button - controller 4\n if self.__button_state[4] & 0x02 == 0 and data.raw[4] & 0x02 != 0:\n self._handle_buttons_pressed(17)\n # orange button - controller 4\n if self.__button_state[4] & 0x04 == 0 and data.raw[4] & 0x04 != 0:\n self._handle_buttons_pressed(18)\n # blue button - controller 4\n if self.__button_state[4] & 0x08 == 0 and data.raw[4] & 0x08 != 0:\n self._handle_buttons_pressed(19)\n\n self.__button_state = data.raw\n\n def stop(self):\n self._stop_event.set()\n self._dev = None\n\n def stopped(self):\n return self._stop_event.is_set()\n\n def run(self):\n hid.hidapi.hid_set_nonblocking(self._dev, 0)\n while not self.stopped():\n self._map_controller_buttons()\n hid.hidapi.hid_close(self._dev)\n\n\nif __name__ == '__main__':\n import os\n import stat\n\n pwd = os.path.dirname(os.path.realpath(__file__))\n\n if os.getuid() != 0:\n raise PermissionError('Need to be root for running.')\n\n pid_file = os.path.join(pwd, 'buzz.pid')\n mode = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH | stat.S_IWOTH\n\n buzz = BuzzControllerMapper(os.path.join(pwd, 'buzz.json'))\n\n with open(pid_file, 'w') as f:\n f.write(str(os.getpid()))\n os.chmod(pid_file, mode)\n\n buzz.start()\n\n while buzz.is_alive() and os.path.exists(pid_file):\n time.sleep(1)\n\n buzz.stop()\n if os.path.exists(pid_file):\n os.remove(pid_file)\n buzz.join(10)\n"
}
] | 3 |
GeraldWu23/CytonemeSignaling
|
https://github.com/GeraldWu23/CytonemeSignaling
|
0fa505ed7e3e4eef890176c7e102d16189b71434
|
fa61e60c733799286757176ff4d2e9f87aeb6214
|
49b9fd7d3a47eebb6fe3b3b94ba1353a4c48c4a8
|
refs/heads/master
| 2020-06-15T06:50:23.526918 | 2020-02-12T06:16:19 | 2020-02-12T06:16:19 | 195,230,828 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5163094401359558,
"alphanum_fraction": 0.5330848097801208,
"avg_line_length": 28.013513565063477,
"blob_id": "96e548537989ad083bed49ee4aad91f1284e039b",
"content_id": "9e7d91585ec15e4c8982b6bfbf1d35cef126dd48",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2146,
"license_type": "no_license",
"max_line_length": 156,
"num_lines": 74,
"path": "/fit_curve_yolk.py",
"repo_name": "GeraldWu23/CytonemeSignaling",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Nov 18 16:38:59 2019\n\n@author: wukak\n\"\"\"\n\nimport numpy as np\nfrom numpy import exp\nfrom scipy.optimize import curve_fit\nimport matplotlib.pyplot as plt\n\n\n\n\n\nclass fit_model_yolk:\n ''' model fitting with given points \n \n self.Boltzmann(): give Boltzmann output with specified x, sigma, b\n self.fit(): \n '''\n def __init__(self, xlist, ylist):\n self.xlist = xlist\n self.ylist = ylist\n \n def Boltzmann(self, x, sigma, b):\n '''\n sigma stands for threshold\n b stands for sharpness\n '''\n return 1 / (1 + exp( -(x-sigma) / b))\n \n \n def fit(self,func):\n ''' Optimal values for the parameters so that the sum of the squared residuals of f(xdata, *popt) - ydata is minimized '''\n xlist, ylist = self.xlist, self.ylist\n try:\n # The estimated covariance of popt\n popt, pcov = curve_fit(func, np.array(xlist), np.array(ylist), p0=(132, 68))\n except:\n print(\"Can't fit\")\n return False\n return popt\n \n \n def plot_model(self, func, start, end, num = 50, title = None, graph = True, inputc='C0', predictc='C1', inputname='input', predictname='predict'): \n ''' train the model and plot the model with input points '''\n xlist, ylist = self.xlist, self.ylist\n popt = self.fit(func)\n \n try: # False will be not subscriptable\n popt[0]\n except:\n return False\n\n \n if graph:\n pred_y = func(np.array(np.linspace(start,end,num)), popt[0], popt[1])\n plt.plot(xlist, ylist, marker = 'o',c=inputc, label = inputname + \n ' threshold : '+str(round(popt[0],3)))\n plt.plot(np.linspace(start, end, num), pred_y,c = predictc, marker = '.',\n label = predictname+' sharpness: '+str(round(popt[1],3)))\n plt.legend(fontsize = 12)\n if title:\n plt.title(title)\n \n return popt[0], popt[1]\n \n \n \nif __name__ == '__main__':\n \n pass"
},
{
"alpha_fraction": 0.562447726726532,
"alphanum_fraction": 0.5900351405143738,
"avg_line_length": 28.709999084472656,
"blob_id": "6a875eaf06d1965b94ca12742125bde2e28b6103",
"content_id": "646bcd333457f0450fbdb544ddefa174bef1cdb4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5981,
"license_type": "no_license",
"max_line_length": 141,
"num_lines": 200,
"path": "/graphread.py",
"repo_name": "GeraldWu23/CytonemeSignaling",
"src_encoding": "UTF-8",
"text": "py# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Nov 14 20:09:55 2019\n\n@author: wukak\n\"\"\"\n\nimport cv2\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.linear_model import LinearRegression\nMAXINT = 10000\nBGintensity = 190\nfrom fit_curve_yolk import fit_model_yolk\n\n\n\n\ndef showimg(imgBGR):\n plt.figure(figsize=(12,8))\n imgRGB = cv2.cvtColor(imgBGR, cv2.COLOR_BGR2RGB)\n plt.imshow(imgRGB, origin='lower')\n return imgRGB\n \ndef showimggray(imgBGR):\n plt.figure(figsize=(12,8))\n imgGRAY = cv2.cvtColor(imgBGR, cv2.COLOR_BGR2GRAY)\n plt.imshow(imgGRAY, cmap=plt.cm.gray)\n return imgGRAY\n\n\n \n\n\n\n'''\n get the img of the yolk\n \n yolk_thr needs to be decided according to the original img\n \n NOTE: the x y in plot() is different from x y in imshow()\n'''\ndef getyolk(img):\n imgGRAY = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n yolk_thr = 170\n maxp = 170\n minp = min(map(min, imgGRAY))\n xtrain = []\n ytrain = []\n minx = MAXINT\n miny = MAXINT\n maxx = 0\n maxy = 0\n \n # label the pixels and get target points\n for row in range(len(imgGRAY)):\n for col in range(len(imgGRAY[row])):\n if imgGRAY[row][col] > yolk_thr:\n# imgGRAY[row][col] = 255\n pass\n elif imgGRAY[row][col] > (maxp+minp)/2:\n# imgGRAY[row][col] = 180 # the colour of the light part\n minx = min(row, minx)\n miny = min(col, miny)\n maxx = max(row, maxx)\n maxy = max(col, maxy)\n else:\n# imgGRAY[row][col] = 30 # the colour of the dark part\n xtrain.append(row)\n ytrain.append(col)\n minx = min(row, minx)\n miny = min(col, miny)\n maxx = max(row, maxx)\n maxy = max(col, maxy)\n \n \n #yolk_centre = ((maxx+minx)/2, (maxy+miny)/2)\n return xtrain, ytrain, imgGRAY, (minx, miny, maxx, maxy)\n\n\n'''\n linear regression\n'''\ndef train(xtrain, ytrain):\n linreg = LinearRegression()\n linreg.fit(np.array(xtrain).reshape(-1,1), ytrain)\n return linreg.intercept_, linreg.coef_\n \n \n'''\n rotate images\n'''\ndef rotate(img, centre, angle, show = False):\n rotm = cv2.getRotationMatrix2D(centre, angle, 1)\n rotimg = cv2.warpAffine(img, rotm, (len(img[0]),len(img)))\n \n # clean the corners\n for row in range(len(img)):\n for col in range(len(img[0])):\n if rotimg[row][col] == 0:\n rotimg[row][col] = BGintensity\n \n if show:\n showimg(rotimg)\n \n return rotimg\n\n'''\n get intensities of a range\n'''\ndef intensity(img, minx, miny, maxx, maxy):\n # col wise\n intensity_list = []\n for y in range(miny, maxy):\n intensity_list.append(sum(img[minx : maxx, y]) / (maxx - minx))\n \n return intensity_list \n \n'''\n fit the intensity list with two boltzmann models\n'''\ndef bound_sharpness(intensity_list):\n \n minvalue = max(intensity_list)\n minpoint = 0\n \n # get lowest point\n for i in range(len(intensity_list)):\n if intensity_list[i] < minvalue:\n minvalue = intensity_list[i]\n minpoint = i\n intensity_list = [(i - min(intensity_list)) / (max(intensity_list) - min(intensity_list)) for i in intensity_list]\n \n firsthalf = fit_model_yolk([i for i in range(minpoint)][::5], intensity_list[:minpoint][::-5])\n sigma0, b0 = firsthalf.fit(firsthalf.Boltzmann)\n \n \n secondhalf = fit_model_yolk([i for i in range(minpoint,len(intensity_list))][::5], intensity_list[minpoint:][::5])\n sigma1, b1 = secondhalf.fit(secondhalf.Boltzmann)\n# plt.plot(intensity_list[minpoint:][::5])\n\n return b0, b1, sigma0, sigma1, minpoint\n\n\ndef analyse(image_path):\n img = cv2.imread(image_path) # BGR image\n# showimg(img)\n \n # yolk info\n xtrain, ytrain, imgGRAY, scale = getyolk(img)\n b, a = train(xtrain, ytrain) # intercept, coef\n angle = -np.arctan(a)/np.pi*180\n minx, miny, maxx, maxy = scale # scale to locate the centre of the original image\n centre = ((minx+maxx)/2, (miny+maxy)/2)\n imgGRAY_rot = rotate(imgGRAY, centre, angle, show=False)\n #showimg(imgGRAY_rot)\n \n plt.figure()\n \n # intensity list\n print(maxx, minx)\n inten_li = intensity(imgGRAY_rot, minx, miny, maxx, maxy)\n inten_li_norm = [(i - min(inten_li)) / (max(inten_li) - min(inten_li)) for i in inten_li] \n \n # fit boltzmann\n b0, b1, sigma0, sigma1, minpoint = bound_sharpness(inten_li_norm)\n threshold0 = (round(minpoint - sigma0 +miny,1), round(max(inten_li[:minpoint])/2 + min(inten_li[:minpoint])/2,1))\n threshold1 = (round(sigma1 + miny,1), round(max(inten_li[minpoint:])/2 + min(inten_li[minpoint:])/2),1)\n minpoint += miny # adjust to practical scale\n print('threshold0: ' + str(threshold0))\n print('threshold1: ' + str(threshold1))\n \n \n # visualisation\n plt.plot(inten_li_norm)\n \n \n plt.xticks(np.arange(0, len(inten_li_norm), 200), np.arange(miny, maxy, 200))\n plt.yticks(np.arange(0,1.2,0.2), [round(i*(max(inten_li) - min(inten_li))+min(inten_li),2) for i in np.arange(0,1.2,0.2)])\n \n plt.text(0,0,'sharpness0 is '+str(round(b0,3))+'\\nand sharpness1 is '+str(round(b1,3)))\n plt.title('normalise gray level')\n plt.plot(threshold0[0]-miny, inten_li_norm[int(threshold0[0]-miny)], 'ro')\n plt.plot(threshold1[0]-miny, inten_li_norm[int(threshold1[0]-miny)], 'go')\n plt.annotate(s=str(threshold0), xy = (threshold0[0]-miny, inten_li_norm[int(threshold0[0]-miny)]), xytext=(threshold0[0]-miny-200, 0.45))\n plt.annotate(s=str(threshold1), xy = (threshold1[0]-miny, inten_li_norm[int(threshold1[0]-miny)]), xytext=(threshold1[0]-miny-50, 0.45))\n \n \n return True\n \n\n\n'''\n __main__\n'''\n\nif __name__ == '__main__':\n\n\n analyse('D://CytonemeSignaling//1b.tif')\n \n \n \n \n \n \n \n "
},
{
"alpha_fraction": 0.5572766661643982,
"alphanum_fraction": 0.5851345062255859,
"avg_line_length": 28.47857093811035,
"blob_id": "eb1915a7c0019a65f5999627642bd1312278f42a",
"content_id": "1cdfa59812ef56352ffd9c4030babaac33da337e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8328,
"license_type": "no_license",
"max_line_length": 142,
"num_lines": 280,
"path": "/nonlinear_graphread.py",
"repo_name": "GeraldWu23/CytonemeSignaling",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Nov 21 20:28:31 2019\n\n@author: wukak\n\"\"\"\n\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Nov 14 20:09:55 2019\n\n@author: wukak\n\"\"\"\n\nimport cv2\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.linear_model import LinearRegression\nMAXINT = 10000\nBGintensity = 190\nfrom fit_curve_yolk import fit_model_yolk\n\n\n\n\ndef showimg(imgBGR):\n plt.figure(figsize=(12,8))\n imgRGB = cv2.cvtColor(imgBGR, cv2.COLOR_BGR2RGB)\n plt.imshow(imgRGB, origin='lower')\n return imgRGB\n \ndef showimggray(imgBGR):\n plt.figure(figsize=(12,8))\n imgGRAY = cv2.cvtColor(imgBGR, cv2.COLOR_BGR2GRAY)\n plt.imshow(imgGRAY, cmap=plt.cm.gray)\n return imgGRAY\n\n\n \n\n\n\n'''\n get the img of the yolk\n \n yolk_thr needs to be decided according to the original img\n \n NOTE: the x y in plot() is different from x y in imshow()\n'''\ndef getyolk(img):\n imgGRAY = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n yolk_thr = 170\n maxp = 170\n minp = min(map(min, imgGRAY))\n xtrain = []\n ytrain = []\n minx = MAXINT\n miny = MAXINT\n maxx = 0\n maxy = 0\n \n # label the pixels and get target points\n for row in range(len(imgGRAY)):\n for col in range(len(imgGRAY[row])):\n if imgGRAY[row][col] > yolk_thr:\n# imgGRAY[row][col] = 255\n pass\n elif imgGRAY[row][col] > (maxp+minp)/2:\n# imgGRAY[row][col] = 180 # the colour of the light part\n minx = min(row, minx)\n miny = min(col, miny)\n maxx = max(row, maxx)\n maxy = max(col, maxy)\n else:\n# imgGRAY[row][col] = 30 # the colour of the dark part\n xtrain.append(row)\n ytrain.append(col)\n minx = min(row, minx)\n miny = min(col, miny)\n maxx = max(row, maxx)\n maxy = max(col, maxy)\n \n \n #yolk_centre = ((maxx+minx)/2, (maxy+miny)/2)\n return xtrain, ytrain, imgGRAY, (minx, miny, maxx, maxy)\n\n\n'''\n linear regression\n'''\ndef train(xtrain, ytrain):\n linreg = LinearRegression()\n linreg.fit(np.array(xtrain).reshape(-1,1), ytrain)\n return linreg.intercept_, linreg.coef_\n \n \n'''\n rotate images\n'''\ndef rotate(img, centre, angle, show = False):\n rotm = cv2.getRotationMatrix2D(centre, angle, 1)\n rotimg = cv2.warpAffine(img, rotm, (len(img[0]),len(img)))\n \n # clean the corners\n for row in range(len(img)):\n for col in range(len(img[0])):\n if rotimg[row][col] == 0:\n rotimg[row][col] = BGintensity\n \n if show:\n showimg(rotimg)\n \n return rotimg\n\n\n\n'''\n get intensities of a range\n'''\ndef intensity(imgGRAY, meetpointx, meetpointy, ori_coef, length=100):\n line_x, line_y = orthogonalline(meetpointx, meetpointy, ori_coef, length, graph = True)\n inten_li = []\n\n for i in range(len(line_x)):\n inten_li.append(imgGRAY[int(line_y[i])][int(line_x[i])]) # x, y of opencv\n \n return inten_li\n\n'''\n get an orthogonal line\n'''\ndef orthogonalline(meetpointx, meetpointy, ori_coef, length=100, graph = True):\n\n # plot the meeting point\n plt.plot(meetpointy, meetpointx, 'bo') # x, y of opencv\n ortho_coef = -1/(1/ori_coef) # this ori_coef is for opencv, while ortho_coef is for plt\n ortho_intercept = meetpointx - meetpointy * ortho_coef\n \n \n line_y = np.linspace(int(meetpointy - length/2), int(meetpointy + length/2), num = length) # x, y of opencv\n line_x = [i * ortho_coef + ortho_intercept for i in line_y]\n\n \n if graph:\n\n plt.plot(line_y, line_x, 'r')\n \n return np.array(line_x).reshape((-1,)), line_y\n\n\n\n'''\n get average intensities\n'''\n\ndef average_intensity(imgGRAY, ytop, ybottom, ori_coef, ori_intercept, num=6, length=400):\n xtop = (ytop - ori_intercept)/ori_coef\n xbottom = (ybottom - ori_intercept)/ori_coef\n \n # lines\n# print(ytop,xtop)\n# print(ybottom, xbottom)\n xlist = np.linspace(xbottom, xtop, num = num + 2)\n ylist = np.linspace(ybottom, ytop, num = num + 2)\n \n # intensity\n inten_stat = np.array([0.0]*length)\n \n for i in range(len(xlist)):\n # ignore the first and the last line\n if i > 0 and i < (len(xlist) - 1):\n \n inten_stat += np.array(intensity(imgGRAY, xlist[i], ylist[i], ori_coef, length = length)) / num\n return inten_stat[::-1] # order from left to right in img\n \n \n\n'''\n fit the intensity list with two boltzmann models\n'''\ndef bound_sharpness(intensity_list):\n \n minvalue = max(intensity_list)\n minpoint = 0\n \n # get lowest point\n for i in range(len(intensity_list)):\n if intensity_list[i] < minvalue:\n minvalue = intensity_list[i]\n minpoint = i\n intensity_list = [(i - min(intensity_list)) / (max(intensity_list) - min(intensity_list)) for i in intensity_list]\n \n try:\n \n firsthalf = fit_model_yolk([i for i in range(minpoint)][::5], intensity_list[:minpoint][::-5])\n sigma0, b0 = firsthalf.fit(firsthalf.Boltzmann)\n secondhalf = fit_model_yolk([i for i in range(minpoint,len(intensity_list))][::5], intensity_list[minpoint:][::5])\n sigma1, b1 = secondhalf.fit(secondhalf.Boltzmann)\n return b0, b1, sigma0, sigma1, minpoint\n except:\n return None\n \n\n\n#def analyse(image_path):\n# img = cv2.imread(image_path) # BGR image\n## showimg(img)\n# \n# # yolk info\n# xtrain, ytrain, imgGRAY, scale = getyolk(img)\n# b, a = train(xtrain, ytrain) # intercept, coef\n# angle = -np.arctan(a)/np.pi*180\n# minx, miny, maxx, maxy = scale\n# centre = ((minx+maxx)/2, (miny+maxy)/2)\n# imgGRAY_rot = rotate(imgGRAY, centre, angle, show=False)\n# #showimg(imgGRAY_rot)\n# \n# plt.figure()\n# \n# # intensity list\n# print(maxx, minx)\n# inten_li = average_intensity(imgGRAY, maxx, minx, a, b, num=10, length=400)\n# inten_li_norm = [(i - min(inten_li)) / (max(inten_li) - min(inten_li)) for i in inten_li] \n# \n# # fit boltzmann\n# b0, b1, sigma0, sigma1, minpoint = bound_sharpness(inten_li_norm)\n# threshold0 = (round(minpoint - sigma0 +miny,1), round(max(inten_li[:minpoint])/2 + min(inten_li[:minpoint])/2,1))\n# threshold1 = (round(sigma1 + miny,1), round(max(inten_li[minpoint:])/2 + min(inten_li[minpoint:])/2),1)\n# minpoint += miny # adjust to practical scale\n# print('threshold0: ' + str(threshold0))\n# print('threshold1: ' + str(threshold1))\n# \n# \n# # visualisation\n# plt.plot(inten_li_norm)\n# \n# \n# plt.xticks(np.arange(0, len(inten_li_norm), 50), np.arange(minpoint-200, minpoint+200, 50))\n# plt.yticks(np.arange(0,1.2,0.2), [round(i*(max(inten_li) - min(inten_li))+min(inten_li),2) for i in np.arange(0,1.2,0.2)])\n# \n# plt.text(0,0,'sharpness0 is '+str(round(b0,3))+'\\nand sharpness1 is '+str(round(b1,3)))\n# plt.title('normalise gray level')\n# plt.plot(threshold0[0]-miny, inten_li_norm[int(threshold0[0]-miny)], 'ro')\n# plt.plot(threshold1[0]-miny, inten_li_norm[int(threshold1[0]-miny)], 'go')\n# plt.annotate(s=str(threshold0), xy = (threshold0[0]-miny, inten_li_norm[int(threshold0[0]-miny)]), xytext=(threshold0[0]-miny-200, 0.45))\n# plt.annotate(s=str(threshold1), xy = (threshold1[0]-miny, inten_li_norm[int(threshold1[0]-miny)]), xytext=(threshold1[0]-miny-50, 0.45))\n# \n# \n# return True\n \n\n\n'''\n __main__\n'''\n\nif __name__ == '__main__':\n\n img = cv2.imread('D://CytonemeSignaling//1b.tif')\n xtrain, ytrain, imgGRAY, (minx, miny, maxx, maxy) = getyolk(img)\n intercept,coef = train(xtrain, ytrain)\n \n \n \n \n\n# plt.figure(figsize=(10,6)) \n# showimg(imgGRAY)\n ytop = max(ytrain)\n ybottom = min(ytrain)\n \n plt.figure()\n showimg(imgGRAY)\n inten_li = average_intensity(imgGRAY, ytop, ybottom, coef[0], intercept, num=20, length=100)\n inten_li_norm = [(i - min(inten_li)) / (max(inten_li) - min(inten_li)) for i in inten_li] \n print(bound_sharpness(inten_li_norm))\n# analyse('D://CytonemeSignaling//1b.tif')\n\n plt.plot(1064, 907)\n plt.plot(854, 377)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "
},
{
"alpha_fraction": 0.5217170119285583,
"alphanum_fraction": 0.5463330149650574,
"avg_line_length": 33.236934661865234,
"blob_id": "52f30df797dace8fcfda5c3b04355cdf381091a4",
"content_id": "8e7051d050a933a230484105d0fe9a66db9ecec1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9831,
"license_type": "no_license",
"max_line_length": 179,
"num_lines": 287,
"path": "/classifier.py",
"repo_name": "GeraldWu23/CytonemeSignaling",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Jun 27 15:38:34 2019\n\n@author: wukak\n\"\"\"\n\nfrom sklearn.svm import SVC\nfrom readfile import *\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom cvxopt import matrix as matrix\nfrom cvxopt import solvers\nimport subprocess as sp\n\n\n\n''' TwoClassifier \n \n choose two labels from [0,1,2] and get the boundary of these two classes \n\n'''\n\nclass TwoClassifier:\n def __init__(self, path, lab0, lab1, kernel = 'linear', gamma='auto', C = 1, degree = 3): # lab0 and lab1 are labels of classes\n self.reader = Readdat(path)\n self.lab0 = lab0\n self.lab1 = lab1\n self.kernel = kernel\n self.gamma = gamma\n self.C = C\n self.degree = degree\n \n # collect data by range and labels\n data0, label0 = self.reader.classify(self.lab0, data=self.reader.Xtr, label=self.reader.ttr) # train on training data\n data1, label1 = self.reader.classify(self.lab1, data=self.reader.Xtr, label=self.reader.ttr) # train on training data\n \n # stack two classes\n self.data = np.vstack([data0, data1])\n self.label = np.hstack([label0, label1])\n \n # shuffle the stack\n rand = np.random.permutation(len(self.label))\n self.data = self.data[rand]\n self.label = self.label[rand]\n self.label = [1 if i==lab0 else -1 for i in self.label] # map the original lab0 to 1 and lab1 to -1 for svm\n \n \n def train(self):\n # for linear, gamma doesn't affect the classifier\n svm = SVC(kernel = self.kernel, gamma = self.gamma, C = self.C, degree = self.degree) \n# print(self.degree)\n svm.fit(self.data, self.label) \n return svm\n \n\n''' ThreeClassifier\n\n get 3 classes classifier\n \n def map_back():\n pred_mapped(): the prediction with label 1 and -1 which are mapped from 0/1/2\n predict(): return the prediction table with the three classifier\n''' \n\nclass ThreeClassifier:\n def __init__(self, path, kernel = 'linear', gamma='auto', C = 1, degree = 3):\n self.kernel = kernel\n self.gamma = gamma\n self.C = C\n self.degree = degree\n self.cla01 = TwoClassifier(path, 0, 1, self.kernel,gamma=self.gamma, C=self.C, degree=self.degree).train()\n self.cla02 = TwoClassifier(path, 0, 2, self.kernel,gamma=self.gamma, C=self.C, degree=self.degree).train()\n self.cla12 = TwoClassifier(path, 1, 2, self.kernel,gamma=self.gamma, C=self.C, degree=self.degree).train()\n self.reader = Readdat(path)\n self.Xte = self.reader.Xte\n self.tte = self.reader.tte\n \n \n def map_back(self, pred_mapped, lab0, lab1): # map 1,-1 to original label 0,1,2\n return [lab0 if i==1 else lab1 for i in pred_mapped]\n \n \n def predict(self):\n # build classifiers\n pred_mapped01 = self.cla01.predict(self.reader.Xte)\n pred_mapped12 = self.cla12.predict(self.reader.Xte)\n prediction = []\n \n result = np.vstack([self.map_back(pred_mapped01,0,1.5), self.map_back(pred_mapped12,0.5,2)]) # for cla01, bigger than 0 can be 1 or 2, for cla12, less than 2 can be 0 or 1\n \n # overall prediction\n correct = 0\n size = len(result[0]) # the length of test data\n \n for i in range(size):\n\n cell_pred = round(np.average(result[:,i])) # return the most approximate int from the result\n prediction.append(cell_pred) \n if cell_pred == self.reader.tte[i]:\n correct += 1\n \n return np.vstack([result, prediction, self.reader.tte]) # return the prediction and the real labels \n \n \n def view_predict(self):\n prediction = np.column_stack((self.Xte, self.predict()[2,:])) # get the data and their prediction\n \n prediction_0 = prediction[np.where(prediction[:,2] == 0)]\n prediction_1 = prediction[np.where(prediction[:,2] == 1)]\n prediction_2 = prediction[np.where(prediction[:,2] == 2)]\n \n plt.figure(figsize=(7,7))\n plt.scatter(prediction_0[:,0], prediction_0[:,1], c='C0', marker='.', label = '0')\n plt.scatter(prediction_1[:,0], prediction_1[:,1], c='C1', marker='.', label = '1')\n plt.scatter(prediction_2[:,0], prediction_2[:,1], c='C2', marker='.', label = '2')\n plt.legend()\n plt.title('PREDICTION')\n# plt.xlim(-1,2)\n# plt.ylim(-1,2)\n# plt.clf()\n \n return \n \n \n''' plot hyperplane of linear svm \n \n svm = svm\n min_x = minimum x\n max_x = maximum x\n linestyle = linestyle(default='dashed')\n\n svm has two coefficient w[0] and w[1] with which we can get the k and the b of the margin(a straight line)\n'''\ndef plot_hyperplane(svm, min_x, max_x, linestyle='dashed'): # plot the boundary with svm, only valid when kernel is linear\n w = svm.coef_[0]\n intercept = svm.intercept_[0]\n k = -w[0]/w[1]\n \n xx = np.linspace(min_x, max_x) # make sure the line is long enough\n yy = k * xx - intercept/w[1]\n plt.plot(xx, yy, linestyle=linestyle) # add 'k' for black line\n \n\n\n''' svm script '''\n\ndef linearKrl(a, b):\n return np.dot(a, b)\n\ndef rbfKrl(x, y, gamma=1):\n \"\"\"RBF kernel with precision gamma.\"\"\"\n d = x-y\n return np.exp(-gamma*np.dot(d, d))\n\n\nclass svm_sc:\n def __init__(self, Xtr, ttr, Xte, kernel = 'linearKrl', C = 1, graph = False):\n self.Xtr = Xtr\n self.ttr = ttr\n self.Xte = Xte\n self.kernel = kernel\n self.C = C\n self.threshold = 1e-4\n self.slabel = min(self.ttr) # smaller label before mapped\n self.blabel = max(self.ttr) # bigger label before mapped\n \n \n # labels have to be 1 or -1 in svm\n if self.blabel != 1 or self.slabel != -1:\n self.ttr = np.array([1 if i == self.blabel else -1 for i in self.ttr])\n \n \n lenX = len(self.Xtr) # length of training set\n \n # set the parametres in cvxopt\n K = np.zeros((lenX,lenX)) # kernel<xn, xm>\n for i in range(lenX):\n for j in range(i,lenX):\n K[i,j] = self.kernel(self.Xtr[i], self.Xtr[j])\n K[j,i] = self.kernel(self.Xtr[j], self.Xtr[i])\n P = matrix(np.outer(self.ttr, self.ttr) * K) # tntm<xn, xm>\n q = matrix((-1) * np.ones(lenX)) # -1s\n G = matrix(np.vstack((((-1) * np.eye(lenX)),np.eye(lenX)))) # -1, 1 (C >= an >= 0)\n h = matrix((np.hstack((np.zeros(lenX),(self.C*np.ones(lenX)))))) # 0s, Cs\n A = matrix(self.ttr, (1,lenX), 'd') # Zigma at = 0\n b = matrix(np.zeros(1)) # 0\n \n \n # solve and get alphas(the weight of punishment of each point)\n try:\n sol = solvers.qp(P,q,G,h,A,b)\n except:\n print(A,lenX,A.typecode)\n return\n alphas = np.array(sol['x'])\n \n \n # get support vectors\n # a point with punishment weight larger than a \n edge_ind = (alphas>self.threshold).reshape(-1,) # indicator of sv\n ind = np.arange(len(alphas))[edge_ind] # index of sv\n\n try:\n t_sv = self.ttr[edge_ind] # label of sv\n except:\n return edge_ind\n \n print('\\n\\nnumber of support vectors is: ' + str(np.sum(edge_ind)))\n print('number of points is: ' + str(len(self.ttr)) + '\\n')\n \n alp_sv = alphas[edge_ind] # weight of sv(punishment)\n X_sv = self.Xtr[edge_ind] # sv\n\n # get the bias\n b_lst = []\n for i in ind:\n bi = self.ttr[i] - alp_sv * self.ttr[i] * K[i,edge_ind] # sigma(an) * tn * K(x) + b = 1\n b_lst.append(bi)\n b = np.mean(b_lst)\n norm = np.linalg.norm(b)\n b /= norm\n \n \n # calculate weight if linear \n if self.kernel == linearKrl:\n w = (alp_sv * t_sv).dot(X_sv).sum(axis=0)\n else:\n w = None\n \n \n # for test data nonlinear hyperplane is not a line\n pred_val = []\n if self.kernel == linearKrl:\n pred_val = (self.Xte * w).sum(axis=1) + b\n else: \n for i_te in range(len(self.Xte)):\n pred = 0\n for alp, t, X in zip(alp_sv, t_sv, X_sv):\n pred += (alp * t)* self.kernel(self.Xte[i_te], X)\n pred += b\n pred_val.append(pred)\n \n \n # get sig\n sig = np.sign(pred_val).reshape(-1,)\n \n # visualise the result\n if graph == True:\n plt.figure(figsize=(7,7))\n plt.plot(self.Xte[sig==-1,0], self.Xte[sig==-1,1], '.', color ='#591D67', label = self.slabel)\n plt.plot(self.Xte[sig== 1,0], self.Xte[sig== 1,1], '.', color ='#FDD225', label = self.blabel)\n plt.legend()\n \n \n return \n \n \n \n\n\nif __name__ == '__main__':\n path4 = 'D:/CytonemeSignaling/testDataStudySharpness_linear/SameSharpness/coords_00100.dat'\n# test = Readdat(path4)\n# \n# Xtr = test.Xtr\n# ttr = test.ttr\n# Xte = test.Xte\n# tte = test.tte\n# data1, label1 = test.classify(1, Xtr, ttr)\n# data2, label2 = test.classify(2, Xtr, ttr)\n# Xtr12 = np.vstack([data1, data2])\n# ttr12 = np.hstack([label1,label2])\n# \n# \n#\n# rand = np.random.permutation(len(ttr12))\n# Xtr12 = Xtr12[rand]\n# ttr12 = ttr12[rand]\n# ttr12_mapped = np.array([1 if i==1 else -1 for i in ttr12]) # map the original lab0 to 1 and lab1 to -1 for svm\n#\n# a = svm_sc(Xtr12, ttr12, Xte, linearKrl, C=0.1, graph=True)\n# \n# test.scatter(Xtr12, ttr12)\n# \n# # clear memory\n# tmp = sp.call('cls',shell=True)\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.4899712800979614,
"alphanum_fraction": 0.5247288942337036,
"avg_line_length": 36.08942413330078,
"blob_id": "3280007aa61e7d5f8217b42a7077e2fead9f9ed0",
"content_id": "b48cffc72d09f24bd1cc87e263dca1fbc346eed1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 38639,
"license_type": "no_license",
"max_line_length": 156,
"num_lines": 1040,
"path": "/script.py",
"repo_name": "GeraldWu23/CytonemeSignaling",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Jun 26 16:56:15 2019\n\n@author: wukak\n\"\"\"\n\nimport sys\nsys.path.append('D:/CytonemeSignaling/')\nfrom readfile import Readdat\nfrom classifier import ThreeClassifier, plot_hyperplane\nfrom sklearn.metrics import auc\nfrom math import tan, pi, atan\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport time\nfrom fit_curve import fit_model\nimport matplotlib\nimport subprocess as sp\n\n\nclass Analyse:\n '''\n analyse a file, input its path \n \n self.visualise_data() : visualise the data with two boundaries represented by a dashed line\n self.view_predict : view data with their predicted label\n self.view_ROC_AUC() : view the ROC curves and AUC values of this dataset \n self.view_correctness() : view the graph with the misclassified points coloured\n self.view_TPR_theta() : view TPR against theta\n '''\n \n def __init__(self, path, kernel = 'linear', gamma = 'auto', C = 1, degree =3):\n self.path = path\n \n self.kernel = kernel\n # the kernel the classifier uses(rbf, default = linear, polynomial)\n \n self.gamma = gamma\n # kernel coefficient for poly and rbf \n \n self.C = C\n # Penalty parameter C of the error term of classifier\n \n self.degree = degree\n # degree for polynomial kernel, ignored by the others\n \n self.reader = Readdat(path) \n # reader of the dataset\n \n self.classifier = ThreeClassifier(path, self.kernel, C = self.C, gamma = self.gamma, degree=self.degree) \n # train a classifier for this dataset\n \n self.X = self.reader.data\n \n self.t = self.reader.label\n \n self.Xte = self.reader.Xte \n # test data\n \n self.tte = self.reader.tte \n # label of test data\n \n self.prediction = self.classifier.predict() \n # the 0 row is the result of cla01\n # the 1 row is the result of cla12\n # the 2 row is the overall prediction\n # the 3 row is the original label\n \n self.svm01 = self.classifier.cla01\n self.svm12 = self.classifier.cla12\n # two svms used to identify the boundaries\n \n if kernel == 'linear':\n w01 = self.svm01.coef_[0]\n intercept01 = self.svm01.intercept_[0]\n self.k01 = -w01[0]/w01[1]\n self.b01 = - intercept01/w01[1]\n w12 = self.svm12.coef_[0]\n intercept12 = self.svm12.intercept_[0]\n self.k12 = -w12[0]/w12[1]\n self.b12 = - intercept12/w12[1]\n # the infomation of two boundaries\n \n def accuracy(self):\n ''' the accuracy of the prediction '''\n return np.mean(self.prediction[2,:] == self.prediction[3,:])\n \n def visualise_data(self):\n '''\n visualise the data with two boundaries represented by a dashed line \n '''\n \n plt.figure(figsize=(7,7))\n data0,label0 = self.reader.classify(0)\n data1,label1 = self.reader.classify(1)\n data2,label2 = self.reader.classify(2)\n \n # scatter them with labels\n plt.scatter(data0[:,0], data0[:,1], c=['C0']*len(data0), marker = '.', label = 0)\n plt.scatter(data1[:,0], data1[:,1], c=['C1']*len(data1), marker = '.', label = 1)\n plt.scatter(data2[:,0], data2[:,1], c=['C2']*len(data2), marker = '.', label = 2)\n plt.legend()\n# plt.xlim(-1,2)\n# plt.ylim(-1,2)\n \n if self.kernel == 'linear':\n plot_hyperplane(self.svm01, 0, 2, 'dashed')\n plot_hyperplane(self.svm12, 0, 2, 'dashed')\n plt.title('VISUALISE DATA')\n plt.show()\n # visualise the result of the dataset and the prediction \n\n \n def view_predict(self):\n '''\n view data with their predicted label\n \n '''\n return self.classifier.view_predict()\n \n \n def view_data_predict(self):\n '''\n do self.visualise_data() and self.view_predict()\n \n ''' \n \n data0,label0 = self.reader.classify(0)\n data1,label1 = self.reader.classify(1)\n data2,label2 = self.reader.classify(2)\n \n # visualise data\n \n fig, axs = plt.subplots(nrows = 1, ncols = 2)\n fig.set_figheight(6)\n fig.set_figwidth(13)\n \n axs[0].scatter(data0[:,0], data0[:,1], c=['C0']*len(data0), marker = '.', label = 0)\n axs[0].scatter(data1[:,0], data1[:,1], c=['C1']*len(data1), marker = '.', label = 1)\n axs[0].scatter(data2[:,0], data2[:,1], c=['C2']*len(data2), marker = '.', label = 2)\n axs[0].legend(fontsize = 12, loc=2)\n# axs[0].set_xlim(-1,2)\n# axs[0].set_ylim(-1,2)\n axs[0].set_title('All data'+' kernel: '+ self.kernel , fontsize = 15)\n \n \n # view_predict\n \n prediction = np.column_stack((ana.Xte, ana.classifier.predict()[2,:]))\n prediction_0 = prediction[np.where(prediction[:,2] == 0)]\n prediction_1 = prediction[np.where(prediction[:,2] == 1)]\n prediction_2 = prediction[np.where(prediction[:,2] == 2)]\n \n axs[1].scatter(prediction_0[:,0], prediction_0[:,1], c='C0', marker='.', label = '0')\n axs[1].scatter(prediction_1[:,0], prediction_1[:,1], c='C1', marker='.', label = '1')\n axs[1].scatter(prediction_2[:,0], prediction_2[:,1], c='C2', marker='.', label = '2')\n axs[1].legend(fontsize = 12, loc=2)\n axs[1].set_title('PREDICTION accuracy: '+str(self.accuracy())+ ' C: '+str(self.C)+' gamma: '+str(self.gamma), fontsize = 12)\n# axs[1].set_xlim(-1,2)\n# axs[1].set_ylim(-1,2)\n \n return True\n \n \n def get_boundary_ROC(self, k, b, tar_lab, data, label):\n '''\n get TPR and FPR for a specific boundary, the boundary depends on the input data\n \n must be linear kernel\n \n k = slope of the boundary\n b = bias of the boundary\n tar_lab = the label of the boundary, i.e. the points above the boundary is supposed to <= tar_lab\n data = test data\n label = actual label of test data\n \n '''\n \n if self.kernel != 'linear':\n print('Not Linear Kernel')\n return False\n \n TP = 0\n FP = 0\n TN = 0\n FN = 0\n for i in range(len(data)): # compare the actual position and the label\n x = data[i][0]\n y = data[i][1]\n \n # Positive: label smaller than target, Negative: label larger than target\n if y >= (k * x + b): # points above the line or on the line means that the prediction is smaller than target\n if label[i] <= tar_lab: # the actual label is less than or equal to target\n TP += 1\n else:\n FP += 1\n else: # points under the line means that the prediction is larger than target\n if label[i] > tar_lab: # actual label is larger than target\n TN += 1\n else:\n FN += 1\n \n TPR = TP / (TP + FN)\n FPR = FP / (FP + TN)\n \n return TPR, FPR\n # get tpr and fpr for a specific boundary\n \n \n\n def getTPR_FPR(self, b, tar_lab, data, label, angle=(-pi/2, pi/2)):\n '''\n return list of TPR and list FPR for a bias, the boundary will sweep through the space specified by b and angle(range)\n \n b = bias of boundary\n tar_lab = the label of the boundary, i.e. the points above the boundary is supposed to <= tar_lab\n data = test data\n label = actual label of test data\n angle = the range of changing k(default to be (-pi/2, pi/2))\n \n '''\n \n if self.kernel != 'linear':\n print('Not Linear Kernel')\n return False\n \n TPR = []\n FPR = []\n for ang in np.linspace(angle[0], angle[1], num = 100): # for each boundary, should be fixed if compare the sharpness\n \n k = tan(ang) # transfer angle to slope\n \n k_tpr, k_fpr = self.get_boundary_ROC(k, b, tar_lab, data, label) # get tpr and fpr for a boundary\n TPR.append(k_tpr) # TPR\n FPR.append(k_fpr) # FPR\n \n return TPR, FPR\n # return TPR and FPR list for plot and auc()\n \n \n def view_ROC_AUC(self):\n '''\n view the ROC curves and AUC values of this dataset \n \n '''\n # TPR and FPR\n tpr01, fpr01 = self.getTPR_FPR(self.b01, 0, self.Xte, self.tte) # get TPR, FPR list of boundary 01\n tpr12, fpr12 = self.getTPR_FPR(self.b12, 1, self.Xte, self.tte) # get TPR, FPR list of boundary 12\n \n \n # ROC curve\n plt.figure(figsize=(10,7))\n plt.plot(fpr01, tpr01)\n plt.plot(fpr12, tpr12)\n plt.title('ROC CURVE')\n # AUC\n auc01 = auc(fpr01, tpr01)\n auc12 = auc(fpr12, tpr12)\n print('AUC01: ' + str(auc01) + ' AUC12: ' + str(auc12))\n \n return True\n # plot ROC curves and AUCs of both boundaries\n \n \n \n def getMiscDis(self, k01, k12, data, label, b01 = 0, b12 = 0, origin = (0,0), angular = True):\n '''\n return the average distance from misclassified points to the boundary specified by k and b\n for linear classifier\n \n k = slope of the boundary\n tar_lab = the label of the boundary, i.e. the points above the boundary is supposed to <= tar_lab\n data = test data\n label = actual label of test data\n bias = bias of the boundary\n origin = angle against what origin, default to be (0,0)\n \n '''\n \n if self.kernel != 'linear':\n print('Not Linear Kernel')\n return False\n \n distance0 = []\n distance1 = []\n \n for i in range(len(data)): # compare the actual position and the label\n x = data[i][0]\n y = data[i][1]\n lab = label[i]\n rho = np.sqrt((x - origin[0])**2 + (y - origin[1])**2)\n line_y01 = k01 * x + b01 # estimated y on bound01\n line_y12 = k12 * x + b12 # estimated y on bound12\n \n # bound 01\n # if prediction and actual label are on different sides\n if (line_y01 <= y and lab > 0) or (line_y01 > y and lab <= 0) :\n \n # kx - y + b = 0\n point_dis = abs(k01 * x + (-1) * y + b01) / np.sqrt(k01**2 + (-1)**2)\n if point_dis/rho > 1 or point_dis/rho < -1:\n print(x,y,origin,rho,point_dis,k01,b01)\n continue\n \n # choose angular distance or raw distance\n if angular:\n distance0.append(np.arcsin(point_dis / rho)) # get the angle distance\n else:\n distance0.append(point_dis) # get the raw distance\n \n \n # bound 12\n # if prediction and actual label are on different sides\n if (line_y12 <= y and lab > 1) or (line_y12 > y and lab <= 1) :\n \n # kx - y + b = 0\n point_dis = abs(k12 * x + (-1) * y + b12) / np.sqrt(k12**2 + (-1)**2)\n if point_dis/rho > 1 or point_dis/rho < -1:\n print(x,y,origin,rho,point_dis,k12,b12)\n continue\n \n # choose angular distance or raw distance\n if angular:\n distance1.append(np.arcsin(point_dis / rho)) # get the angle distance\n else:\n distance1.append(point_dis) # get the raw distance\n \n \n return np.mean(distance0), np.mean(distance1) \n # return the average distance from misclassified points to the boundary\n \n \n \n def show_correctness(self, data):\n '''\n get correctness label of data(label compared to prediction)\n \n k = slope of the boundary\n b = bias of the boundary\n tar_lab = the label of the boundary, i.e. the points above the boundary is supposed to <= tar_lab\n data = test data\n label = actual label of test data\n '''\n \n \n cor_classified = []\n \n for i in range(len(data)): # compare the actual position and the label\n x = data[i][0]\n y = data[i][1]\n prediction = self.prediction[2,i]\n label = self.prediction[3,i]\n \n cor_classified.append([[x,y], prediction == label])\n \n return cor_classified \n # return a list to tell which cells are correctly classified and which are not\n \n \n def view_correctness(self, title='CORRECT CLASSIFIED CELLS: '):\n '''\n view the graph with the misclassified points coloured\n \n titile = title of the graph, default='CORRECT CLASSIFIED CELLS'\n \n '''\n \n \n corr = self.show_correctness(self.Xte) # correctness overall \n plt.figure(figsize = (7,7))\n data_True = np.array([dat[0] for dat in corr if dat[1]==True])\n data_False = np.array([dat[0] for dat in corr if dat[1]==False]) \n plt.scatter(data_True[:, 0],data_True[:, 1],c='gray', marker='.', label = 'True')\n plt.scatter(data_False[:,0],data_False[:,1],c='#fa5fa0', marker='.', label = 'False')\n plt.legend()\n# plt.xlim(-1,2)\n# plt.ylim(-1,2)\n if title:\n plt.title(title+self.kernel+' degree: '+str(self.degree))\n \n if self.kernel == 'linear':\n plot_hyperplane(self.svm01, 0, 2, 'dashed')\n plot_hyperplane(self.svm12, 0, 2, 'dashed')\n\n return True \n # plot correct classified cells \n \n \n def getTPR_theta(self, theta_list, data, label, tar_lab):\n '''\n return list of [theta, TPR], the boundary sweeps through the space based on theta_list, each of them crosses the origin\n \n best for linear kernel\n \n theta_list = thetas of the boundaries\n data = test data\n label = actual label of test data\n tar_lab = the label of the boundary, i.e. the points above the boundary is supposed to <= tar_lab\n \n '''\n \n TPR = []\n k_list = np.array([tan(theta/180 * pi) for theta in theta_list])\n \n for k in k_list: # for each boundary, should be fixed if compare the sharpness\n TP = 0\n FN = 0\n for i in range(len(data)): # compare the actual position and the label\n x = data[i][0]\n y = data[i][1]\n \n # Positive: label smaller than target, Negative: label larger than target\n # y = k * x cross the origin\n if y >= k * x: # points above the line or on the line means that the prediction is smaller than target\n if label[i] <= tar_lab: # the actual label is less than or equal to target\n TP += 1\n else: # points under the line means that the prediction is larger than target\n if label[i] <= tar_lab: # actual label is smaller than target\n FN += 1\n \n TPR.append(TP / (TP + FN)) # TPR\n \n \n return np.array([[theta,tpr] for theta,tpr in zip(theta_list, TPR)])\n # get theta tpr pair to view the tpr against theta\n \n \n def view_TPR_theta(self, theta_range=np.linspace(-90, 90, num = 100)):\n '''\n view TPR against Theta\n best for linear kernel\n \n theta_range = chosen angle of boundaries\n '''\n \n plt.figure(figsize=(10,7))\n \n # for boundary 01\n tpr_theta01 = self.getTPR_theta(theta_range, self.Xte, self.tte, 0)\n plt.scatter(tpr_theta01.T[0], tpr_theta01.T[1])\n plt.axvline(x=atan(self.k01),c='#ffaa33',linestyle='--', label = 'slope_01 = '+str(round(self.k01/pi*180,3))) # draw verticle line at arctan(k01) \n plt.legend(prop={'size': 15})\n plt.xlabel('Theta')\n plt.ylabel('TPR')\n# plt.xticks(np.linspace(-90, 90, 7),[theta for theta in np.linspace(-90, 90, 7 )])\n \n # for boundary 12\n tpr_theta12 = self.getTPR_theta(theta_range, self.Xte, self.tte, 1)\n plt.scatter(tpr_theta12.T[0], tpr_theta12.T[1])\n plt.axvline(x=atan(self.k12),c='#ffaa33',linestyle='--', label = 'slope_12 = '+str(round(self.k12/pi*180,3))) # draw verticle line at arctan(k01) \n plt.legend(prop={'size': 15})\n plt.xlabel('Theta')\n plt.ylabel('TPR')\n# plt.xticks(np.linspace(-pi/2, pi/2, 7),[theta for theta in np.linspace(-90, 90, 7 )])\n \n plt.title('TPR_theta')\n \n \n \n def TPR_delta_theta(self, theta_list, cell_label = True):\n '''\n return values for probability distribution(the portion of a label in an area)\n best for linear kernel\n \n theta_list = thetas of the boundaries (-90,90)\n cell_label = if True, calculate the increasement of True points(actual label) sweep by last move, if False, calculate the False points(actual label)\n '''\n\n # copy to a new data set\n# data = self.Xte.copy()\n# label = self.tte.copy()\n data = self.X.copy()\n label = self.t.copy()\n \n prob_01 = [] # probability of target point (True or False) for bound01\n prob_12 = [] # probability of target point (True or False) for bound12\n \n for t in [ang for ang in range(len(theta_list) - 1)][::-1]: # for each starting angle of a boundary\n \n # upper bound: y = tan(theta_list[t] / 180 * pi) * x\n # lower bound: y = tan(theta_list[t+1] / 180 * pi) * x\n\n count = 0 # number of point between those boundary\n tar_count_01 = 0 # number of target point between those boundary for 01\n tar_count_12 = 0 # number of target point between those boundary for 12\n remove_list = [] # the list of points(t) to remove\n \n for d in range(len(data)): # for each data, data should have the same size to label\n x = data[d][0]\n y = data[d][1]\n lab = label[d]\n upper_bound = tan(theta_list[t+1] / 180 * pi) * x\n lower_bound = tan(theta_list[t] / 180 * pi) * x\n \n \n \n if upper_bound >= y >= lower_bound: # if this point is in the space\n \n # point in the space\n count += 1 \n remove_list.append(d)\n \n # if target_label for boundary 01\n if (lab >= 1) == cell_label:\n tar_count_01 += 1 \n # if target_label for boundary 12\n if (lab >= 2) == cell_label:\n tar_count_12 += 1\n \n \n # collect the result\n try:\n prob_01.append(tar_count_01/count)\n except ZeroDivisionError:\n prob_01.append(1-cell_label)\n try:\n prob_12.append(tar_count_12/count) \n except ZeroDivisionError:\n prob_12.append(1-cell_label)\n \n # remove points in older spaces\n data = np.delete(data, remove_list, axis = 0)\n label = np.delete(label, remove_list)\n \n return prob_01, prob_12\n\n\n\n def view_TPR_delta_theta(self, cell_label = True, sample = 50, title = None):\n '''\n return values for probability distribution\n best for linear kernel\n \n theta_list = thetas of the boundaries (-90,90)\n cell_label = if True, calculate the increasement of True points(actual label) sweep by last move, if False, calculate the False points(actual label)\n sample = how many samples are chosen to form the curve\n title = title of graph\n '''\n \n plt.figure(figsize=(12,5))\n c01,c12 = self.TPR_delta_theta(np.linspace(0, 90, num = sample+1), cell_label=cell_label)\n plt.plot(c01, label = 'boundary 01')\n plt.plot(c12, label = 'boundary 12')\n if title:\n plt.title(title, fontsize=15)\n plt.legend(fontsize=12)\n plt.xticks(np.linspace(1, sample, num=10), [round(tick) for tick in np.linspace(0, 90, num=10)])\n plt.show()\n \n \n def view_boltzmann_model(self, graph = True, sample = 50, title = None):\n ''' \n get the TPR against the angle\n \n '''\n \n xticks = np.linspace(0, 90, num = sample) # windows no\n xticks_b = np.linspace(0, 90, num = sample+1) # boundary no\n \n c01, c12 = self.TPR_delta_theta(xticks_b)\n fit_01 = fit_model(xticks, c01)\n sigma01, b01 = fit_01.plot_model(xticks, c01, fit_01.Boltzmann, graph = False, inputc='C0', predictc='C2',\n inputname='Boundary 01', predictname='Model 01')\n fit_12 = fit_model(xticks, c12)\n sigma12, b12 = fit_12.plot_model(xticks, c12, fit_12.Boltzmann, graph = False, inputc='C1', predictc='C3',\n inputname='Boundary 12', predictname='Model 12')\n \n \n # show sigma and b\n print\n print(sigma01, b01)\n print(sigma12, b12)\n print\n print\n \n pred_y01 = fit_01.Boltzmann(xticks, sigma01, b01)\n pred_y12 = fit_12.Boltzmann(xticks, sigma12, b12)\n \n plt.figure(figsize=(12,5))\n plt.plot(xticks, c01, 'o', label = 'boundary 01', c = 'C0')\n plt.plot(xticks, c12, 'o', label = 'boundary 12', c = 'C3')\n plt.plot(xticks, pred_y01, label = 'model 01', c = 'C2')\n plt.plot(xticks, pred_y12, label = 'model 12', c = 'C1')\n plt.xticks(np.linspace(0, 90, num = 10)[::-1])\n plt.xlabel('angle')\n plt.ylabel('TPR')\n plt.text(0, 0.4, 'sigma01: '+str(round(sigma01,3)))\n plt.text(0, 0.3, 'b01: '+str(round(b01,3)))\n plt.text(0, 0.2, 'sigma12: '+str(round(sigma12,3)))\n plt.text(0, 0.1, 'b12: '+str(round(b12,3)))\n plt.legend()\n if title:\n plt.title(title, fontsize = 15)\n \n return True\n \n \n\ndef Paths(prefix): \n ''' choose one of the path set and return a generator generates one path each time '''\n \n for i in range(1, 101):\n number_code = str(i)\n while(len(number_code) < 5): number_code = '0'+number_code\n number_code = (prefix + '/coords_' + number_code + '.dat')\n yield number_code\n # yield a path in a group\n \n \n\n\nclass SharpnessMeasurement:\n ''' sharpness measured by different metrics through a dataset \n \n self.b_of_Boltzmann(): visualise sharpness by the b of Boltzmann distribution \n self.AUC(): visualise sharpness by the AUC(area under the (ROC)curve)\n self.distance(): visualise sharpness by the angular distance or the raw distance. \n angular distance/raw distance are controlled by angular=True/False\n self.distance_against_Boltzmann(): visualise the comparison of distance measuremance and Boltzmann measuremance\n (show both angular distance and raw distance)\n '''\n def __init__(self, path):\n self.path = path\n \n \n def b_of_Boltzmann(self, graph = True, title = 'no title', xlist = [round(0.5 - 0.05 * i, 2) for i in range(10)]):\n '''\n show b of Boltzmann\n \n graph = show the graph or not\n title = title\n xlist = default to be linear x\n \n '''\n gen = Paths(prefix = self.path)\n blist = [] # use b in Boltzmann as sharpness\n \n while(1):\n \n # generate path\n try:\n path = next(gen)\n except StopIteration:\n break\n try:\n \n ana = Analyse(path)\n except:\n continue\n \n print(path[-16:])\n \n # c01, c12 are TPR data for boundary_01 and boundary_12\n # fit the model\n c01,c12 = ana.TPR_delta_theta(np.linspace(0,90, num = 51), cell_label=True)\n \n xlist = np.linspace(-90,90,num=50)\n \n #plt.figure(figsize = (10,7))\n fit_01 = fit_model(xlist, c01)\n sigma01, b01 = fit_01.plot_model(xlist, c01, fit_01.Boltzmann, graph = False, inputc='C0', predictc='C2',\n inputname='Boundary 01', predictname='Model 01')\n fit_12 = fit_model(xlist, c12)\n sigma12, b12 = fit_12.plot_model(xlist, c12, fit_12.Boltzmann, graph = False, inputc='C1', predictc='C3',\n inputname='Boundary 12', predictname='Model 12')\n # plt.title('TPR against Theta', fontsize=15)\n \n blist.append([b01, b12])\n \n # if graph is not needed, return list of b including both bounds \n if not graph:\n return blist\n \n del path\n del gen\n print('\\n\\nFinish')\n \n # devide b01 and b12\n plt.figure(figsize=(12,5))\n plt.xlabel('sharp', fontsize=12)\n plt.ylabel('b in Boltzmann') \n blistT = np.array(blist).T\n plt.plot(blistT[0], label='b01')\n plt.plot(blistT[1], label='b12')\n plt.legend()\n plt.xticks([i*10 for i in range(10)], xlist)\n plt.title(title,fontsize=15) \n \n \n return blist\n \n \n def AUC(self, graph = True, title = 'no title', xlist = [round(0.5 - 0.05 * i, 2) for i in range(10)]):\n ''' describe the sharpness of the document with AUC \n \n graph = show the graph or not\n title = title of the graph\n xlist = the xlist specified for the graph\n '''\n \n gen = Paths(prefix=self.path)\n auclist = [] # use auc as sharpness\n \n while(1):\n try:\n path = next(gen)\n except:\n break\n \n ana = Analyse(path)\n TPR01,FPR01 = ana.getTPR_FPR(0, 0, ana.Xte, ana.tte, angle = (-pi/6, pi/2))\n TPR12,FPR12 = ana.getTPR_FPR(0, 1, ana.Xte, ana.tte, angle = (-pi/6, pi/2))\n auc01 = auc(FPR01, TPR01)\n auc12 = auc(FPR12, TPR12)\n \n auclist.append([auc01, auc12])\n print(path[-16:])\n \n print('\\n\\nFinish')\n \n # if graph is not needed, return auclist\n if not graph:\n return auclist\n \n # devide b01 and b12\n plt.figure(figsize=(12,5))\n plt.xlabel('sharp', fontsize=12)\n plt.ylabel('AUC') \n auclistT = np.array(auclist).T\n plt.plot(auclistT[0], label='b01')\n plt.plot(auclistT[1], label='b12')\n plt.legend()\n plt.xticks([i*10 for i in range(10)], xlist)\n plt.title(title,fontsize=15) \n \n \n return auclist\n \n \n def distance(self, graph = True, title = 'no title', xlist = [round(0.5 - 0.05 * i, 2) for i in range(10)], angular = True):\n ''' describe the sharpness of the document with angular distance or raw distance \n \n graph = show the graph or not\n title = title of the graph\n xlist = the xlist specified for the graph\n augular = use augualr distance or raw distance\n '''\n \n gen = Paths(prefix = self.path)\n dislist = [] # angular(or raw) distance as sharpness\n \n while(1):\n \n # generate path\n try:\n path = next(gen)\n except StopIteration:\n break\n try:\n \n ana = Analyse(path)\n except:\n continue\n \n print(path[-16:])\n \n # -------------------------- angular distance --------------------------------------\n \n # calculate the meeting point of y = k01 * x + b01 and y = k12 * x + b12 as the new origin\n ori_x = (ana.b01 - ana.b12) / (ana.k12 - ana.k01)\n ori_y = ori_x * ana.k01 + ana.b01\n \n # collect distances\n dis01, dis12= ana.getMiscDis(ana.k01, ana.k12, ana.Xte, ana.tte, ana.b01, ana.b12, origin = (ori_x, ori_y), angular=angular) \n dislist.append([dis01, dis12])\n \n print('\\n\\nFinish')\n \n # visualise\n # if graph not needed, return dislist \n if not graph:\n return dislist\n \n dislist = np.array(dislist).T\n \n plt.figure(figsize=(12,5))\n plt.xlabel('sharp', fontsize=12)\n plt.ylabel('distance') \n# dislistT = np.array(dislist).T\n plt.plot(dislist[0], label='b01')\n plt.plot(dislist[1], label='b12')\n plt.legend()\n plt.xticks([i*10 for i in range(10)], xlist)\n# plt.ylim((0,1))\n plt.title(title,fontsize=15) \n \n return dislist\n \n \n \n def distance_against_Boltzmann(self, title = 'no title'):\n ''' show the sharpness described by distance against that by Boltzmann \n \n graph = show the graph or not\n title = title of the graph\n xlist = the xlist specified for the graph\n '''\n \n # path generator\n gen = Paths(prefix=self.path)\n blist = [] # use b in Boltzmann as sharpness\n dislist_a = [] # angular distance as sharpness\n dislist_r = [] # raw distance as sharpness\n \n while(1):\n \n # generate path\n try:\n path = next(gen)\n except StopIteration:\n break\n try:\n \n ana = Analyse(path)\n except:\n continue\n \n print(path[-16:])\n \n \n # ---------------------------- b of Boltzmann -------------------------------------------\n \n # c01, c12 are TPR data for boundary_01 and boundary_12\n # fit the model\n c01,c12 = ana.TPR_delta_theta(np.linspace(-90,90, num = 51), cell_label=True)\n \n xlist = np.linspace(-90,90,num=50)\n \n #plt.figure(figsize = (10,7))\n fit_01 = fit_model(xlist, c01)\n sigma01, b01 = fit_01.plot_model(xlist, c01, fit_01.Boltzmann, graph = False, inputc='C0', predictc='C2',\n inputname='Boundary 01', predictname='Model 01')\n fit_12 = fit_model(xlist, c12)\n sigma12, b12 = fit_12.plot_model(xlist, c12, fit_12.Boltzmann, graph = False, inputc='C1', predictc='C3',\n inputname='Boundary 12', predictname='Model 12')\n # plt.title('TPR against Theta', fontsize=15)\n \n blist.append([b01, b12])\n \n \n # -------------------------- angular distance --------------------------------------\n \n # calculate the meeting point of y = k01 * x + b01 and y = k12 * x + b12 as the new origin\n ori_x = (ana.b01 - ana.b12) / (ana.k12 - ana.k01)\n ori_y = ori_x * ana.k01 + ana.b01\n # print(ori_x, ori_y, ana.b01, ana.b12, ana.k01, ana.k12)\n \n # collect distances\n dis01_a, dis12_a = ana.getMiscDis(ana.k01, ana.k12, ana.Xte, ana.tte, ana.b01, ana.b12, origin = (ori_x, ori_y), angular=True)\n dislist_a.append([dis01_a, dis12_a])\n \n dis01_r, dis12_r = ana.getMiscDis(ana.k01, ana.k12, ana.Xte, ana.tte, ana.b01, ana.b12, origin = (ori_x, ori_y), angular=False)\n dislist_r.append([dis01_r, dis12_r])\n \n print('\\n\\nFinish')\n \n \n # visualise\n \n blist = np.array(blist).T\n dislist_a = np.array(dislist_a).T\n dislist_r = np.array(dislist_r).T\n \n sharp01 = np.array(sorted(np.vstack([blist[0],dislist_a[0],dislist_r[0]]).T, key=lambda x:x[0])).T\n sharp12 = np.array(sorted(np.vstack([blist[1],dislist_a[1],dislist_r[1]]).T, key=lambda x:x[0])).T\n \n \n fig, [ax0,ax1] = plt.subplots(ncols = 2, figsize=(12,5))\n fig.suptitle(title,fontsize=18)\n \n \n color = 'tab:red'\n ax0.set_xlabel('b of Boltzmann')\n ax0.set_ylabel('angular distance', color=color)\n ax0.plot(sharp01[0], sharp01[1], color=color)\n ax0.tick_params(axis='y', labelcolor=color)\n \n ax01 = ax0.twinx() # instantiate a second axes that shares the same x-axis\n \n color = 'tab:blue'\n ax01.set_ylabel('raw distance', color=color) # we already handled the x-label with ax1\n ax01.plot(sharp01[0], sharp01[2], color=color)\n ax01.tick_params(axis='y', labelcolor=color)\n \n ax0.set_title('Boundary 01', fontsize=12)\n \n \n color = 'tab:red'\n ax1.set_xlabel('b of Boltzmann')\n ax1.set_ylabel('angular distance', color=color)\n ax1.plot(sharp12[0], sharp12[1], color=color)\n ax1.tick_params(axis='y', labelcolor=color)\n \n \n ax11 = ax1.twinx() # instantiate a second axes that shares the same x-axis\n \n color = 'tab:blue'\n ax11.set_ylabel('raw distance', color=color) # we already handled the x-label with ax1\n ax11.plot(sharp12[0], sharp12[2], color=color)\n ax11.tick_params(axis='y', labelcolor=color)\n \n ax1.set_title('Boundary 12', fontsize=12)\n \n \n fig.tight_layout() # otherwise the right y-label is slightly clipped\n \n plt.show()\n \n \nif __name__ == '__main__':\n \n path0 = 'D:/CytonemeSignaling/testDataSameSharpness/testDataSameSharpness/coords_00061.dat'\n path1 = 'D:/CytonemeSignaling/testDataSameLocation/testDataSameLocation/coords_00041.dat'\n path2 = 'D:/CytonemeSignaling/testDataNonlinearMonotonic/testDataNonlinearMonotonic/coords_00019.dat'\n path3 = 'D:/CytonemeSignaling/testDataNonlinearNonMonotonic/testDataNonlinearNonMonotonic/coords_00013.dat'\n path4 = 'D:/CytonemeSignaling/testDataStudySharpness_linear/SameSharpness/coords_00066.dat'\n testpath = 'D:/CytonemeSignaling/testText.txt'\n \n ana = Analyse(testpath)\n# xticks = np.linspace(0, 90, num = 50)\n# xticks_b = np.linspace(0, 90, num = 51)\n# c01, c12 = ana.TPR_delta_theta(xticks_b)\n# \n# cur = fit_model(xticks, c01)\n# cur.plot_model(xticks, c01, cur.Boltzmann)\n \n ana.view_TPR_delta_theta(title = 'TPR and Boltzmann in one dataset')\n ana.view_boltzmann_model(title = 'TPR and Boltzmann in one dataset')\n \n sp.call('cls', shell=True)\n \n \n \n ''' view data and prediction '''\n \n# ana = Analyse(path2, kernel='linear', C=0.1)\n## ana.view_data_predict()\n# ana.visualise_data()\n# ori_x = (ana.b01 - ana.b12) / (ana.k12 - ana.k01)\n# ori_y = ori_x * ana.k01 + ana.b01\n# print(ori_x, ori_y, ana.b01, ana.b12, ana.k01, ana.k12)\n \n \n ''' Show correctness ''' \n# ana0 = Analyse(path3, kernel='linear', C = 0.1)\n# ana0.visualise_data()\n# ana0.view_correctness()\n# ana0.view_predict()\n \n# ana1 = Analyse(path3, kernel='poly', C = 0.1)\n# ana1.view_correctness()\n \n# ana2 = Analyse(path3, kernel='linear', C = 1)\n# ana2.view_correctness()\n# ana2.view_predict()\n\n \n ''' Compare Kernels '''\n \n# def compare_kernels(path, C = 1, gamma = 'auto'):\n# ana_lin = Analyse(path, kernel = 'linear', C=C, gamma = gamma)\n# print(ana_lin.C)\n# ana_poly= Analyse(path, kernel = 'poly', C=C, gamma = gamma)\n# ana_rbf = Analyse(path, kernel = 'rbf', C=C, gamma = gamma)\n# \n# lin_acc = ana_lin.accuracy()\n# pol_acc = ana_poly.accuracy()\n# rbf_acc = ana_rbf.accuracy()\n# \n# print('lin: ' + str(lin_acc))\n# print('poly ' + str(pol_acc))\n# print('rbf: ' + str(rbf_acc))\n# print('\\n')\n# \n# return [lin_acc, pol_acc, rbf_acc]\n#\n# # get stats\n# com_ker_nonlin_mon = []\n# com_ker_nonlin_nonmon = []\n# \n# gen_mon = Paths('D:/CytonemeSignaling/testDataNonlinearMonotonic/testDataNonlinearMonotonic')\n# gen_nonmon = Paths(prefix='D:/CytonemeSignaling/testDataNonlinearNonMonotonic/testDataNonlinearNonMonotonic')\n# \n# # nonlinear_nonmonotonic\n# while(1):\n# try:\n# path = next(gen_mon)\n# except:\n# break\n# \n# com_ker_nonlin_mon.append(compare_kernels(path,C=0.01))\n# del gen_mon\n# \n# # nonlinear_monotonic\n# while(1):\n# try:\n# path = next(gen_nonmon)\n# except:\n# break\n# \n# com_ker_nonlin_nonmon.append(compare_kernels(path, C=0.01))\n# del gen_nonmon\n# \n# \n#\n# fig, axs = plt.subplots(2)\n# fig.set_figheight(15)\n# fig.set_figwidth(20)\n# axs[0].plot(np.array(com_ker_nonlin_mon).T[0], label = 'Linear')\n# axs[0].plot(np.array(com_ker_nonlin_mon).T[1], label = 'Polynomial')\n# axs[0].plot(np.array(com_ker_nonlin_mon).T[2], label = 'RBF')\n# axs[0].legend(fontsize = 12, loc=2)\n# axs[0].set_ylabel('accuracy')\n# axs[0].set_title('Nonlinear Monotonic C=0.01')\n# axs[1].plot(np.array(com_ker_nonlin_nonmon).T[0], label = 'Linear')\n# axs[1].plot(np.array(com_ker_nonlin_nonmon).T[1], label = 'Polynomial')\n# axs[1].plot(np.array(com_ker_nonlin_nonmon).T[2], label = 'RBF')\n# axs[1].set_ylabel('accuracy')\n# axs[1].set_title('Nonlinear NonMonotonic C=0.01')\n# axs[1].legend(fontsize = 12, loc=2)\n\n\n\n \n ''' Sharpness measurement '''\n# test = SharpnessMeasurement('D:/CytonemeSignaling/testDataNonlinearMonotonic/testDataNonlinearMonotonic')\n# test.distance_against_Boltzmann(title='SameSharpness linear')\n# del test\n# gc.collect()\n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.50715172290802,
"alphanum_fraction": 0.5233209133148193,
"avg_line_length": 33.39130401611328,
"blob_id": "206612428c02fc73b72fbde9a848347c6a58adc7",
"content_id": "9faace66351b49682c388fbe1bf60659a75953c9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3218,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 92,
"path": "/readfile.py",
"repo_name": "GeraldWu23/CytonemeSignaling",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Jun 26 15:09:19 2019\n\n@author: wukak\n\"\"\"\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n\n\nclass Readdat:\n ''' Read data from the raw file \n \n self.data = x and y of the data, in list\n self.label = labels of the data, in list\n self.Xtr = x and y of trainning data\n self.ttr = labels of training data\n self.Xte = x and y of testing data\n self.tte = labels of testing data\n self.scatte() : scatter the data in different colour according to their labels\n self.classify() : return data with spacific label\n '''\n def __init__(self, path): \n with open(path) as file:\n linelist = file.readlines()\n lst = [line.split() for line in linelist]\n self.data = []\n self.label = []\n for line in lst:\n self.data.append([float(line[0]), float(line[1])])\n self.label.append(int(float(line[2])))\n self.data = np.array(self.data)\n self.label = np.array(self.label)\n \n # cut dataset to training data and testing data\n size = int(len(self.data)/2)\n self.Xtr = self.data[:size]\n self.ttr = self.label[:size]\n self.Xte = self.data[size:]\n self.tte = self.label[size:]\n\n\n \n def scatter(self, data=None, label=None): \n ''' plot the points with different colours '''\n plt.figure(figsize = (7,7))\n if data is not None and label is not None: # if a spacific class is specialised\n plt.scatter(data[:,0], data[:,1], c = label, marker = '.')\n else:\n # classify with the label from the dataset\n data0,label0 = self.classify(0)\n data1,label1 = self.classify(1)\n data2,label2 = self.classify(2)\n \n # scatter them with labels\n plt.scatter(data0[:,0], data0[:,1], c=['C0']*len(data0), marker = '.', label = 0)\n plt.scatter(data1[:,0], data1[:,1], c=['C1']*len(data1), marker = '.', label = 1)\n plt.scatter(data2[:,0], data2[:,1], c=['C2']*len(data2), marker = '.', label = 2)\n plt.legend()\n plt.show()\n \n \n \n def classify(self, lab, data = None, label = None): \n ''' return classified data and label using the actual labels ''' \n if (data is None) and (label is None): # if data or label is not specialised, the whole data set is used\n data = self.data\n label = self.label\n \n data_classified = []\n label_classified = []\n for i in range(len(data)):\n if (label[i] == lab):\n data_classified.append(data[i])\n label_classified.append(label[i])\n return np.array(data_classified), np.array(label_classified) # return classified data and classified label\n \n\n \n \n \nif __name__ == '__main__':\n path = 'D:/CytonemeSignaling/testFateCoords.dat'\n \n \n \n# test = Readdat(path)\n# print(test.classify(0))\n# print('\\n-------------------\\n')\n# print(pred) \n \n \n \n \n \n \n \n \n \n "
}
] | 6 |
andersonssh/Simulador-de-Sistema-Bancario
|
https://github.com/andersonssh/Simulador-de-Sistema-Bancario
|
43cef46a0763ebf39cd7b95e4b7f975dca272772
|
74301113eb46be806be3522f313a23fecb9a4f2a
|
c7bd1188cbfb72acae9bb82464832834418bc333
|
refs/heads/main
| 2023-08-14T10:34:44.744501 | 2021-12-09T12:20:44 | 2021-12-09T12:20:44 | 410,984,747 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6964285969734192,
"alphanum_fraction": 0.6964285969734192,
"avg_line_length": 18,
"blob_id": "6481318368c8c2b5d4f641f7af29d22ef0a4d993",
"content_id": "6316e0c794c5b67371eb85d2944b4c4c53d29bc7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 56,
"license_type": "permissive",
"max_line_length": 22,
"num_lines": 3,
"path": "/servidor/__init__.py",
"repo_name": "andersonssh/Simulador-de-Sistema-Bancario",
"src_encoding": "UTF-8",
"text": "from . import conexoes\ndef start():\n conexoes.start()"
},
{
"alpha_fraction": 0.4512116014957428,
"alphanum_fraction": 0.45889225602149963,
"avg_line_length": 33.755638122558594,
"blob_id": "d28f9e0b21b8404675484ba5cc20d64053cae16a",
"content_id": "01dad009accd35b711d5a72f603a0b33451dcc91",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9272,
"license_type": "permissive",
"max_line_length": 186,
"num_lines": 266,
"path": "/cliente.py",
"repo_name": "andersonssh/Simulador-de-Sistema-Bancario",
"src_encoding": "UTF-8",
"text": "import interfaceConsole\nimport socket\nimport ipaddress\nfrom time import sleep\ndebug = False\nsoc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\nt_con = 5 #segundos para aguardar a conexao\nsoc.settimeout(t_con)\nporta = 8888\n\ndef conectar_server():\n while True:\n #coletar IP\n while True:\n try:\n ip = ipaddress.ip_address(input('Digite o endereço ip do servidor: '))\n break\n except ValueError:\n interfaceConsole.limpar_console()\n print('O ip inserido não é valido! Digite o ip no formato x.x.x.x')\n\n #conectar\n try:\n ip = str(ip)\n soc.connect((str(ip), porta))\n #sai do loop principal\n break\n except Exception as e:\n sleep(t_con)\n print(e)\n\ndef aguardar_resposta():\n #retorna true se a resposta do servidor for positiva\n for i in range(2):\n if debug:\n print('Aguadando resposta do servidor!')\n resposta = soc.recv(2).decode()\n if resposta:\n if resposta == 'NO':\n return False\n elif resposta == 'SI':\n return True\n\n print('Tempo limite excedido')\n return False\n\ndef painel():\n def logar():\n \"\"\"\n retorna login_sucess quando usuario for autenticado!\n cadastro_sucess para cadastro realizado!\n login_fail|cadastro_fail caso falhar\n \"\"\"\n interfaceConsole.limpar_console()\n op = interfaceConsole.mostrar_menu('SIMULADOR BANCARIO', '[ 1 ] LOGIN\\n[ 2 ] CADASTRO\\n[ 0 ] SAIR', 0, 2)\n #SAIR\n if op == 0:\n soc.close()\n exit()\n #LOGIN\n elif op == 1:\n if debug:\n email = '[email protected]'\n senha = 'seila'\n else:\n email = input('email: ').strip()\n senha = input('senha: ')\n requisicao = '5|' + email + '|' + senha\n soc.send(requisicao.encode())\n\n if aguardar_resposta():\n if debug:\n print('Login Sucesso!')\n return 'login_sucess'\n else:\n if debug:\n print('Login Fracasso')\n return 'login_fail'\n #CADASTRO\n elif op == 2:\n nome = input('nome: ')\n email = input('email: ')\n senha = input('senha: ')\n requisicao = '4|' + email + '|' + nome + '|' + senha\n soc.send(requisicao.encode())\n if aguardar_resposta():\n if debug:\n print('Cadastro Sucesso!')\n return 'cadastro_sucess'\n else:\n if debug:\n print('Cadastro Fracasso')\n return 'cadastro_fail'\n def depositar(valor):\n req = '1|' + str(valor)\n soc.send(req.encode())\n return aguardar_resposta()\n\n\n def sacar(valor):\n req = '2|' + str(valor)\n soc.send(req.encode())\n return aguardar_resposta()\n\n\n def transferir(valor, email_destino):\n req = '3|' + str(valor) + '|' + email_destino\n print(req)\n soc.send(req.encode())\n return aguardar_resposta()\n\n\n def alterar_senha(senha_antiga, senha_nova):\n req = '6|' + senha_antiga + '|' + senha_nova\n soc.send(req.encode())\n return aguardar_resposta()\n\n\n def mostrar_transacoes():\n for i in acc['transacoes']:\n print(i)\n\n while True:\n resultado = logar()\n if resultado == 'login_sucess':\n #vai para a conta do usuario\n break\n elif resultado == 'cadastro_sucess':\n print('\\nCADASTRO REALIZADO! (pressione ENTER)')\n input('')\n elif resultado == 'cadastro_fail':\n ##################### PRECISA MELHORAR O TRATAMENTO DE ERROS PARA MELHOR ANALISE #########\n print('\\nUma conta com esse email já existe! (pressione ENTER)')\n input('')\n elif resultado == 'login_fail':\n print('\\nEmail ou senha incorretos! (pressione ENTER)')\n input('')\n\n\n ########################################################################################\n ############################# sessao pós login ##############################\n\n\n while True:\n #recebendo dados da conta e adicionando ao dicionario\n soc.send(b'7')\n while True:\n ######## INICIO DE REQUISICAO DE DADOS #####\n dados = soc.recv(10000).decode()\n acc = {}\n if dados:\n if debug:\n print('DADOS RECEBIDOS!')\n print(dados)\n dados = dados.split('|')\n acc['email'] = dados[0]\n acc['nome'] = dados[1]\n acc['saldo'] = float(dados[2])\n # _ é o separador usado para distinguir transacoes diferentes\n acc['transacoes'] = [i for i in dados[3].split('_')]\n print(acc)\n break\n elif dados == 'NO':\n #########\n print('Não foi possível se conectar ao servidor!')\n input('Pressione enter para relogar')\n start()\n exit()\n\n ############### FIM LOOP DE ATUALIZACAO DOS DADOS ##############\n op = interfaceConsole.mostrar_menu(f'Logado como {acc[\"nome\"].split(\" \")[0]} SALDO: R$ {acc[\"saldo\"]}',\n f'email:{acc[\"email\"]}\\n\\n[ 1 ] DEPOSITAR\\n[ 2 ] SACAR\\n[ 3 ] TRANSFERIR\\n[ 4 ] ALTERAR SENHA\\n[ 5 ] VER HISTÓRICO DE TRANSAÇÕES\\n[ 0 ] SAIR', 0, 5)\n interfaceConsole.limpar_console()\n if op == 1:\n print('\\t\\tDEPOSITAR')\n try:\n valor = float(input('\\n\\nDigite o valor (ex: 120.50): '))\n except ValueError:\n print('Valor Inválido! (pressione ENTER)')\n input('')\n else:\n if valor <= 0:\n print('Você não pode depositar este valor! (pressione ENTER)')\n input('')\n else:\n if depositar(valor):\n print('DEPOSITO REALIZADO! (pressione ENTER)')\n input('')\n else:\n print('FALHA NO DEPOSITO! (pressione ENTER)')\n input('')\n\n elif op == 2:\n try:\n valor = float(input('\\n\\nDigite o valor (ex: 120.50): '))\n except ValueError:\n print('Valor Inválido! (pressione ENTER)')\n input('')\n else:\n if valor > acc['saldo']:\n print('Você não tem saldo suficiente para sacar essa quantia (pressione ENTER)')\n input('')\n elif valor <= 0:\n print('Você não pode sacar este valor! (pressione ENTER)')\n print(input(''))\n else:\n if sacar(valor):\n print('SAQUE REALIZADO! (pressione ENTER)')\n input('')\n else:\n print('FALHA NO SAQUE! (pressione ENTER)')\n input('')\n elif op == 3:\n print('\\t\\tTRANSFERIR')\n try:\n valor = float(input('\\n\\nDigite o valor (ex: 120.50): '))\n except ValueError:\n print('Valor Inválido! (pressione ENTER)')\n input('')\n else:\n if valor > acc['saldo']:\n print('Você não tem saldo suficiente para transferir essa quantia (pressione ENTER)')\n input('')\n elif valor <= 0:\n print('Você não pode transferir este valor! (pressione ENTER)')\n print(input(''))\n else:\n email_destinatario = input('Digite o email do destinatário: ')\n if transferir(valor, email_destinatario):\n print('TRANSFERENCIA REALIZADA! (pressione ENTER)')\n input('')\n else:\n print('FALHA NA TRANSFERENCIA! O VERIFIQUE O EMAIL INFORMADO! (pressione ENTER)')\n input('')\n elif op == 4:\n print('\\t\\tALTERAR SENHA')\n senha_antiga = input('\\n\\nDigite a senha atual: ')\n senha_nova = input('Digite a nova senha: ')\n if alterar_senha(senha_antiga, senha_nova):\n print('Senha alterada com sucesso! (pressione ENTER)')\n input('')\n painel()\n exit()\n else:\n print('A senha não foi alterada! (pressione ENTER)')\n input('')\n elif op == 5:\n print('\\t\\tTRANSAÇÕES')\n print('\\n')\n mostrar_transacoes()\n print('')\n input('pressione ENTER')\n elif op == 0:\n soc.close()\n exit()\n\n\n\ndef start():\n if debug:\n soc.connect(('10.0.0.117', porta))\n else:\n conectar_server()\n painel()\n\nstart()"
},
{
"alpha_fraction": 0.5468245148658752,
"alphanum_fraction": 0.5500538349151611,
"avg_line_length": 24.77777862548828,
"blob_id": "a9ccb5164dd720e97b6f265715464e9ebe1ac990",
"content_id": "145299bda97edec2d9660634f14b4f4db012a955",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 933,
"license_type": "permissive",
"max_line_length": 69,
"num_lines": 36,
"path": "/interfaceConsole.py",
"repo_name": "andersonssh/Simulador-de-Sistema-Bancario",
"src_encoding": "UTF-8",
"text": "from platform import system\nfrom os import system as command\n\nso = system()\ntempo_espera = 2 #em segundos\n\ndef limpar_console():\n #limpa a tela do console de acordo com o sistema\n if so == 'Linux':\n command('clear')\n elif so == 'Windows':\n command('cls')\n else:\n print('\\n' * 50)\n\n return None\n\ndef mostrar_menu(titulo:str, opcoes:str, min_:int, max_:int):\n # retorna a escolha do usuario\n while True:\n limpar_console()\n print('\\t\\t\\t{}\\n'.format(titulo))\n print(opcoes, end='\\n\\n')\n escolha = input('Opção: ').strip()\n try:\n escolha = int(escolha)\n except:\n print('Dado inválido (pressione ENTER)')\n input('')\n continue\n\n if escolha < min_ or escolha > max_:\n print('O valor selecionado não existe (pressione ENTER)')\n input('')\n else:\n return escolha\n\n"
},
{
"alpha_fraction": 0.772482693195343,
"alphanum_fraction": 0.7770945429801941,
"avg_line_length": 38.42424392700195,
"blob_id": "5fd4566373feb3ac4961b3e86ae1bbb0cb3d004c",
"content_id": "eb8c754ab92813133163324a4b49b1da792c4971",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1339,
"license_type": "permissive",
"max_line_length": 226,
"num_lines": 33,
"path": "/README.md",
"repo_name": "andersonssh/Simulador-de-Sistema-Bancario",
"src_encoding": "UTF-8",
"text": "# Simulador de Sistema Bancário\nSimula um sistema bancário em rede local através de sockets em um sistema do tipo cliente-servidor\n\nObjetivo: Aprender conceitos de back-end tais como, crud, uso de threads, pacotes em python, gerenciamento de conexões, etc... Tudo com o objetivo de melhorar minha noção de server-side no início dos meus estudos sobre a área!\n\n### Como funciona\n\nO arquivo mainServer.py irá iniciar um servidor de onde as aplicações clientes podem se conectar e realizar operações bancárias entre contas cadastradas no sistema.\n\n### Como usar\n\nCom python intalado na sua máquina, inicie a aplicação mainServer.py no terminal com o comando:\n\n```bash\n$ python3 mainServer.py\n```\n\nCom a aplicação em execução, irá ser mostrada um IP e PORTA que devem ser colocados na aplicação do cliente.\n\nPara executar o cliente use o comando:\n\n```bash\n$python3 cliente.py\n```\n\nO cliente pode ser usado em qualquer dispositivo da rede local que tenha python instalado.\n\n\nA fazer no futuro:\n\n- [ ] Mover esta aplicação para um módulo único chamado SSB_V1 e criar um novo módulo com a aplicação refatorada a cada ano. \n * Objetivo: Reescrever a aplicação de acordo com minhas novas habilidades e ver a diferença em relação a anterior.\n- [ ] Criar versao SSB_V3 1 ano após a SSB_V2 haver sido concluída.\n"
},
{
"alpha_fraction": 0.5833836197853088,
"alphanum_fraction": 0.5901688933372498,
"avg_line_length": 36.89714431762695,
"blob_id": "0f5e5ae56dd17b23bc87c64041461e7d16a93640",
"content_id": "09972346e35ebd76d827b226dc6dbb7bcdb2e763",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6636,
"license_type": "permissive",
"max_line_length": 155,
"num_lines": 175,
"path": "/servidor/crud.py",
"repo_name": "andersonssh/Simulador-de-Sistema-Bancario",
"src_encoding": "UTF-8",
"text": "from hashlib import sha256\nimport sqlite3\n#criando controladores para o bd\ncon = sqlite3.connect('banco.db')\ncur = con.cursor()\ndebug = False\n\ndef start_bd():\n #criar tabelas\n cur.execute('create table if not exists usuarios(email PRIMARY KEY, nome TEXT NOT NULL, senha TEXT NOT NULL, saldo DECIMAL(20,2));')\n cur.execute('create table if not exists transacoes(emailUsuario TEXT, transacao TEXT NOT NULL, FOREIGN KEY (emailUsuario) REFERENCES usuarios(email))')\n\n #adicionando usuarios\n try:\n cur.execute('INSERT INTO usuarios (email, nome, senha, saldo) VALUES (\"[email protected]\", \"user bot 1\", \"hashdasenha\", 0)')\n cur.execute('INSERT INTO usuarios (email, nome, senha, saldo) VALUES (\"[email protected]\", \"user bot 2\", \"hashdasenha\", 0)')\n cur.execute('INSERT INTO usuarios (email, nome, senha, saldo) VALUES (\"[email protected]\", \"user bot 3\", \"hashdasenha\", 0)')\n except sqlite3.IntegrityError:\n # if debug:\n # print('Criacao de Usuarios falhou, pois eles já existem!')\n pass\n\n con.commit()\n return None\n\ndef cadastrar(email, nome, senha, saldo=0):\n senha = sha256(senha.encode('utf-8')).hexdigest()\n try:\n cur.execute('INSERT INTO usuarios (email, nome, senha, saldo) '\n f'VALUES (\"{email}\", \"{nome}\", \"{senha}\", {saldo})')\n con.commit()\n except Exception as e:\n if debug:\n print('Erro em cadastro: ', end='')\n print(e)\n return False\n return True\n\ndef login(email, senha):\n #return True para sucesso no login e false caso contrario\n if debug:\n for i in cur.execute(f'SELECT * FROM usuarios WHERE email=\"{email}\" and senha=\"{senha}\"'):\n print(i)\n\n if tuple(cur.execute(f'SELECT * FROM usuarios WHERE email=\"{email}\" and senha=\"{senha}\"')):\n return True\n else:\n return False\n\ndef alterar_senha(email, senha_antiga, senha_nova):\n if debug:\n print('Senha antiga: ',senha_antiga)\n print('Senha nova: ', senha_nova)\n\n #se email True entao atualiza a senha\n if login(email, senha_antiga):\n cur.execute(f'UPDATE usuarios SET senha=\"{senha_nova}\" WHERE email=\"{email}\"')\n con.commit()\n if debug:\n print('Senha Alterada com sucesso!')\n return True\n else:\n if debug:\n print('A senha nao foi alterada!')\n return False\n\ndef sacar(email, senha, valor):\n if login(email, senha):\n valor_atual = tuple(cur.execute(f'SELECT saldo FROM usuarios WHERE email=\"{email}\"'))[0][0]\n if valor_atual < valor or valor <= 0:\n if debug:\n print('Valor inserido nao pode ser sacado!')\n return False\n else:\n cur.execute(f'UPDATE usuarios SET saldo={valor_atual - valor} where email=\"{email}\"')\n con.commit()\n _transacao_efetuada(email, f'Saque de R$ {valor}')\n return True\n else:\n if debug:\n print('Login invalido')\n return False\n\ndef depositar(email, senha, valor):\n if login(email, senha):\n valor_atual = tuple(cur.execute(f'SELECT saldo FROM usuarios WHERE email=\"{email}\"'))[0][0]\n if valor <= 0:\n if debug:\n print('Valor inserido nao pode ser depositado')\n return False\n else:\n if debug:\n print(f'Depositado R$ {valor}')\n cur.execute(f'UPDATE usuarios SET saldo={valor_atual + valor} where email=\"{email}\"')\n con.commit()\n _transacao_efetuada(email, f'Depósito de R$ {valor}')\n return True\n else:\n if debug:\n print('Login invalido')\n return False\n\ndef transferir(email, senha, valor, email_destino):\n if login(email, senha):\n valor_atual = tuple(cur.execute(f'SELECT saldo FROM usuarios WHERE email=\"{email}\"'))[0][0]\n if valor_atual < valor or valor <= 0:\n if debug:\n print('Valor inserido nao pode ser transferido!')\n return False\n else:\n #depositando para o destino\n valor_atual_destino = tuple(cur.execute(f'SELECT saldo FROM usuarios WHERE email=\"{email_destino}\"'))[0][0]\n cur.execute(f'UPDATE usuarios SET saldo={valor_atual_destino + valor} where email=\"{email_destino}\"')\n #sacando do usuario atual\n cur.execute(f'UPDATE usuarios SET saldo={valor_atual - valor} where email=\"{email}\"')\n con.commit()\n if debug:\n print('Valor transferido!')\n #registrando email do usuario\n _transacao_efetuada(email, f'Transferencia de R$ {valor} realizada para {email_destino}')\n _transacao_efetuada(email_destino, f'Transferencia de R$ {valor} recebida de {email}')\n return True\n else:\n if debug:\n print('Login invalido')\n\ndef _transacao_efetuada(email, ocorrencia):\n #adiciona a tabela transacoes um registro com a ocorrencia da transacao\n try:\n cur.execute(f'INSERT INTO transacoes (emailUsuario, transacao) VALUES (\"{email}\", \"{ocorrencia}\")')\n con.commit()\n return True\n except Exception as e:\n if debug:\n print('Falha no registro de transacao efetuada: ', e)\n return False\n\ndef get_conta(email, senha):\n #retorna dicionario com a dados de conta -> email:str, nome:str, saldo:float, transacoes:lista\n if login(email, senha):\n #pegando dados da conta\n dados = tuple(cur.execute(f'SELECT * FROM usuarios WHERE email=\"{email}\"'))[0]\n acc = {}\n acc['email'] = dados[0]\n acc['nome'] = dados[1]\n acc['saldo'] = dados[3]\n\n if debug:\n print(f'email: {acc[\"email\"]}\\nnome: {acc[\"nome\"]}\\nSaldo: R$ {acc[\"saldo\"]}')\n\n #pegando transacoes\n transacoes = cur.execute(f'SELECT transacao FROM transacoes WHERE emailUsuario=\"{email}\"')\n acc['transacoes'] = [transacao[0] for transacao in transacoes]\n\n if debug:\n print('TRANSAÇÕES:')\n for i in acc['transacoes']:\n print('\\t', i)\n\n return acc\n else:\n if debug:\n print('Login nao realizado!')\n return None\n\n\nif __name__ == '__main__':\n start_bd()\n #cadastrar('[email protected]', 'josezin souza', 'hojesei')\n #alterar_senha('[email protected]', 'hojesei', 'hojenaosei')\n #alterar_senha('[email protected]', 'hojenaosei', 'hojesei')\n #print(\"LOGANDO! \", login('[email protected]', 'hojesei'))\n #depositar('[email protected]', 'hojesei', 100000)\n #transferir('[email protected]', 'hojesei', 200, '[email protected]')\n #print(get_conta('[email protected]', 'hojesei'))\n"
},
{
"alpha_fraction": 0.49228131771087646,
"alphanum_fraction": 0.5018582344055176,
"avg_line_length": 35.628273010253906,
"blob_id": "a5c80ab07d2f76c07553db0a0dcdacf1d0e12440",
"content_id": "ce746be1924eabce1f280490243cb3d656905e35",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7000,
"license_type": "permissive",
"max_line_length": 144,
"num_lines": 191,
"path": "/servidor/conexoes.py",
"repo_name": "andersonssh/Simulador-de-Sistema-Bancario",
"src_encoding": "UTF-8",
"text": "import socket\nfrom threading import Thread\nfrom hashlib import sha256\nfrom . import crud\n\ndef start():\n crud.start_bd()\n debug = False\n soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\n def get_ip():\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n try:\n # doesn't even have to be reachable\n s.connect(('1.1.1.1', 1))\n IP = s.getsockname()[0]\n except Exception:\n IP = 'O endereço de ip não pôde ser definido! Descubra o endereço ip local manualmente: ifconfig para Linux e ipconfig para Windows'\n finally:\n s.close()\n return IP\n\n ip_ = get_ip()\n porta = 8888\n print(f'IP: {ip_}\\nPORTA: {porta}')\n\n #adicionando host para toda a rede\n soc.bind(('0.0.0.0', porta))\n\n #formato {IP: {con: objCon, email: str, senha: str-hash}}\n\n sessoes_ativas = {}\n\n def gerenciar_conexoes():\n while True:\n soc.listen()\n con, addr = soc.accept()\n con.settimeout(0.2)\n sessoes_ativas[addr[0]] = {'con': con , 'email': '', 'senha': ''}\n if debug:\n print(f'Conexao com {addr[0]} estabelecida!')\n print('Mostrando sessoes ativas')\n print(sessoes_ativas)\n\n def requisicao(dados, ip):\n \"\"\"\n TABELA DE REQUISICOES\n 1|valor - DEPOSITAR - email|senha|valor via dict acc\n 2|valor - SACAR - email|senha|valor via dict acc\n 3|valor|email - TRANSFERIR - email|senha|valor|email via dict acc\n 4|email|nome|senha - CADASTRAR\n 5|email|senha - LOGIN\n 6|senha_antiga|senha_nova - MUDAR SENHA email|senha_antiga|senha_nova via dict acc\n 7 retorna conta atual\n\n \"\"\"\n #respostas do servidor para o cliente\n POSITIVO = 'SI'.encode()\n NEGATIVO = 'NO'.encode()\n\n\n\n dados = dados.split('|')\n if debug:\n print('Mostrando dados!')\n print(dados)\n\n #depositar\n if dados[0] == '1':\n valor = float(dados[1])\n if crud.depositar(sessoes_ativas[ip]['email'], sessoes_ativas[ip]['senha'], valor):\n if debug:\n print('DEPOSITO EFETUADO COM SUCESSO!')\n sessoes_ativas[ip]['con'].send(POSITIVO)\n else:\n if debug:\n print('FRACASSO NO DEPOSITO!')\n sessoes_ativas[ip]['con'].send(NEGATIVO)\n #sacar\n elif dados[0] == '2':\n valor = float(dados[1])\n if crud.sacar(sessoes_ativas[ip]['email'], sessoes_ativas[ip]['senha'], valor):\n if debug:\n print('SAQUE EFETUADO COM SUCESSO!')\n sessoes_ativas[ip]['con'].send(POSITIVO)\n else:\n if debug:\n print('FRACASSO NO SAQUE!')\n sessoes_ativas[ip]['con'].send(NEGATIVO)\n #transferir\n elif dados[0] == '3':\n print('CONEXOES TRANSFERIR!!!')\n print(dados)\n valor = float(dados[1])\n email_dest = dados[2]\n if crud.transferir(sessoes_ativas[ip]['email'], sessoes_ativas[ip]['senha'],valor, email_dest):\n if debug:\n print('TRANSFERENCIA EFETUADA COM SUCESSO!')\n sessoes_ativas[ip]['con'].send(POSITIVO)\n else:\n if debug:\n print('FRACASSO NA TRANSFERENCIA!')\n sessoes_ativas[ip]['con'].send(NEGATIVO)\n #cadastro\n elif dados[0] == '4':\n email = dados[1]\n nome = dados[2]\n senha = dados[3]\n if crud.cadastrar(email, nome, senha):\n if debug:\n print('CADASTRO REALIZADO COM SUCESSO!')\n sessoes_ativas[ip]['con'].send(POSITIVO)\n else:\n if debug:\n print('FRACASSO NO CADASTRO!')\n sessoes_ativas[ip]['con'].send(NEGATIVO)\n #login\n elif dados[0] == '5':\n email = dados[1]\n #gera o hash da senha para comparacao no bd\n senha = sha256(dados[2].encode()).hexdigest()\n if crud.login(email, senha):\n #registra os dados na sessao!\n sessoes_ativas[ip]['email'] = email\n sessoes_ativas[ip]['senha'] = senha\n if debug:\n print('LOGIN REALIZADO COM SUCESSO!')\n sessoes_ativas[ip]['con'].send(POSITIVO)\n else:\n if debug:\n print('FRACASSO NO LOGIN!')\n sessoes_ativas[ip]['con'].send(NEGATIVO)\n elif dados[0] == '6':\n senha_antiga = sha256(dados[1].encode()).hexdigest()\n senha_nova = sha256(dados[2].encode()).hexdigest()\n if crud.alterar_senha(sessoes_ativas[ip]['email'], senha_antiga, senha_nova):\n if debug:\n print('SENHA ALTERADA COM SUCESSO!')\n sessoes_ativas[ip]['con'].send(POSITIVO)\n else:\n if debug:\n print('FALHA AO ALTERAR A SENHA!')\n sessoes_ativas[ip]['con'].send(NEGATIVO)\n elif dados[0] == '7':\n conta = crud.get_conta(sessoes_ativas[ip]['email'], sessoes_ativas[ip]['senha'])\n if conta:\n gtc = conta['email'] + '|' + conta['nome'] + '|' + str(conta['saldo']) + '|' + '_'.join([i for i in conta['transacoes']])\n if debug:\n print(gtc)\n sessoes_ativas[ip]['con'].send(gtc.encode('utf-8'))\n else:\n sessoes_ativas[ip]['con'].send(NEGATIVO)\n else:\n if debug:\n print('o primeiro elemento da requisicao nao existe!')\n sessoes_ativas[ip]['con'].send(NEGATIVO)\n\n\n Thread(target=gerenciar_conexoes).start()\n if debug:\n print('Entrada de loop aguardando requisicoes')\n\n #fica em loop aguardando requisicoes\n while True:\n #varre todas as conexoes a procura de requisicoes\n chaves_sessoes_tivas = list(sessoes_ativas.keys())\n for ip in chaves_sessoes_tivas:\n obj_conexao = sessoes_ativas[ip]['con']\n try:\n try:\n req = obj_conexao.recv(1024).decode()\n except socket.timeout:\n continue\n except ConnectionResetError:\n del sessoes_ativas[ip]\n continue\n\n if req:\n if debug:\n print('requisicao recebida, iniciando gerenciador de requisicoes!')\n try:\n requisicao(req, ip)\n except Exception as e:\n if debug:\n print(e)\n print('Cliente tentou violar regras')\n sessoes_ativas[ip]['con'].send(b'NO')\n\n\nif __name__ == '__main__':\n start()\n"
},
{
"alpha_fraction": 0.84375,
"alphanum_fraction": 0.84375,
"avg_line_length": 15.5,
"blob_id": "4c45d6056d7d0d8c2038e86707eb64cb4cf5df1b",
"content_id": "9db6ef0614bef8d61cf924ab62ff8f6012f475ac",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 32,
"license_type": "permissive",
"max_line_length": 16,
"num_lines": 2,
"path": "/mainServer.py",
"repo_name": "andersonssh/Simulador-de-Sistema-Bancario",
"src_encoding": "UTF-8",
"text": "import servidor\nservidor.start()"
}
] | 7 |
vinodsharma/ecs-deploy
|
https://github.com/vinodsharma/ecs-deploy
|
4d236f3cfeaff3dc88e6dd59e90b7db09a4d89ee
|
35616cdff5664315ae3ac4f65b7056cd8c260133
|
f2b98a07c8e8700aa08c748a10de56c6bfa6be8b
|
refs/heads/master
| 2021-01-19T18:00:09.672609 | 2017-09-22T21:53:31 | 2017-09-22T21:53:31 | 101,105,116 | 0 | 0 | null | 2017-08-22T20:35:45 | 2017-08-25T23:18:22 | 2017-09-22T21:53:32 |
Python
|
[
{
"alpha_fraction": 0.5891596078872681,
"alphanum_fraction": 0.5946295261383057,
"avg_line_length": 27.165266036987305,
"blob_id": "35ae51a28308028c384aeee8d2c3ad4983234820",
"content_id": "6a51dc59a9ab7963cbfe4a60e09d3b7c5ac69715",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10055,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 357,
"path": "/deploy.py",
"repo_name": "vinodsharma/ecs-deploy",
"src_encoding": "UTF-8",
"text": "import boto3\nfrom botocore.exceptions import ClientError as BotoClientError\nimport subprocess\nimport logging\nimport sys\nimport os\nfrom os.path import join, dirname\nfrom dotenv import load_dotenv\nfrom time import sleep\n\nlogger = logging.getLogger('thestral_deployment')\nlogger.setLevel(logging.DEBUG)\n\nch = logging.StreamHandler(sys.stdout)\nch.setLevel(logging.DEBUG)\nformatter = logging.Formatter(\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s')\nch.setFormatter(formatter)\nlogger.addHandler(ch)\n\nlambda_client = boto3.client('lambda')\nevents_client = boto3.client('events')\nbatch_client = boto3.client('batch')\nec2_client = boto3.client('ec2')\n\n\nclass Deploy_Exception(Exception):\n pass\n\n\ndef read_deploy_config():\n dotenv_path = join(dirname('.'), '.env')\n load_dotenv(dotenv_path)\n\n configuration = {\n 'AWS_ACCOUNT_ID': os.getenv('AWS_ACCOUNT_ID'),\n 'DOCKER_IMAGE': os.getenv('DOCKER_IMAGE'),\n }\n return configuration\n\n\ndef get_function_arn(fn_name):\n response = lambda_client.get_function_configuration(\n FunctionName=fn_name\n )\n return response['FunctionArn']\n\n\ndef get_function(fn_name):\n response = lambda_client.get_function(\n FunctionName=fn_name\n )\n return response\n\n\ndef is_function_exists(fn_name):\n try:\n get_function(fn_name)\n return True\n except BotoClientError as bce:\n if bce.response['Error']['Code'] == 'ResourceNotFoundException':\n return False\n raise\n\n\ndef update_function(fn_name):\n zip_file_name = fn_name + \".zip\"\n code_file_name = fn_name + \".py\"\n create_zip_file_for_code(zip_file_name, code_file_name)\n lambda_client.update_function_code(\n FunctionName=fn_name,\n Publish=True,\n ZipFile=open(\"{0}.zip\".format(fn_name), 'rb').read()\n )\n\n\ndef get_rule_arn(rule_name):\n response = events_client.describe_rule(\n Name=rule_name\n )\n return response['Arn']\n\n\ndef create_zip_file_for_code(zip_file_name, code_file_name):\n subprocess.check_output([\"zip\", zip_file_name, code_file_name])\n\n\ndef create_function(fn_role, fn_name):\n zip_file_name = fn_name + \".zip\"\n code_file_name = fn_name + \".py\"\n create_zip_file_for_code(zip_file_name, code_file_name)\n lambda_client.create_function(\n FunctionName=fn_name,\n Runtime='python2.7',\n Role=fn_role,\n Handler=\"{0}.lambda_handler\".format(fn_name),\n Code={'ZipFile': open(\"{0}.zip\".format(fn_name), 'rb').read(), },\n )\n\n\ndef put_rule(rule_name):\n # frequency = \"rate(1 hour)\"\n frequency = \"cron(0 8 ? * mon *)\"\n events_client.put_rule(\n Name=rule_name,\n ScheduleExpression=frequency,\n State='ENABLED',\n )\n\n\ndef add_permissions(fn_name, rule_name):\n try:\n lambda_client.add_permission(\n FunctionName=fn_name,\n StatementId=\"{0}-Event\".format(rule_name),\n Action='lambda:InvokeFunction',\n Principal='events.amazonaws.com',\n SourceArn=get_rule_arn(rule_name),\n )\n except BotoClientError as bce:\n if not bce.response['Error']['Code'] == 'ResourceConflictException':\n raise\n\n\ndef put_targets(fn_arn, rule_name):\n events_client.put_targets(\n Rule=rule_name,\n Targets=[\n {\n 'Id': \"1\",\n 'Arn': fn_arn,\n },\n ]\n )\n\n\ndef is_compute_env_exists(compute_env_name):\n response = batch_client.describe_compute_environments(\n computeEnvironments=[\n compute_env_name,\n ],\n )\n if len(response['computeEnvironments']) == 1:\n return True\n else:\n return False\n\n\ndef is_job_queue_exists(job_queue_name):\n response = batch_client.describe_job_queues(\n jobQueues=[\n job_queue_name,\n ],\n )\n\n if len(response['jobQueues']) == 1:\n return True\n else:\n return False\n\n\ndef is_job_definition_exists(job_definition_name):\n response = batch_client.describe_job_definitions(\n jobDefinitionName=job_definition_name,\n )\n if len(response['jobDefinitions']) >= 1:\n return True\n else:\n return False\n\n\ndef get_default_vpc_id():\n vpcs_info = ec2_client.describe_vpcs(\n Filters=[\n {\n 'Name': 'isDefault',\n 'Values': [\n 'true',\n ]\n },\n ],\n )\n if len(vpcs_info['Vpcs']) < 1:\n raise Deploy_Exception(\"No Default VPC Exists\")\n vpc_id = vpcs_info['Vpcs'][0]['VpcId']\n return vpc_id\n\n\ndef get_security_group_ids(vpc_id):\n security_groups_info = ec2_client.describe_security_groups(\n Filters=[\n {\n 'Name': 'vpc-id',\n 'Values': [\n vpc_id,\n ]\n },\n ],\n )\n if len(security_groups_info['SecurityGroups']) < 1:\n raise Deploy_Exception(\n \"No SecurityGroup exits for the vpc-id %s\" % vpc_id)\n\n security_group_ids = []\n for security_group in security_groups_info['SecurityGroups']:\n security_group_ids.append(security_group['GroupId'])\n return security_group_ids\n\n\ndef get_subnet_ids(vpc_id):\n subnets_info = ec2_client.describe_subnets(\n Filters=[\n {\n 'Name': 'vpc-id',\n 'Values': [\n vpc_id,\n ]\n },\n ],\n )\n if len(subnets_info['Subnets']) < 1:\n raise Deploy_Exception(\"No Subnet exits for the vpc-id %s\" % vpc_id)\n\n subnet_ids = []\n for subnet in subnets_info['Subnets']:\n subnet_ids.append(subnet['SubnetId'])\n return subnet_ids\n\n\ndef create_compute_env(compute_env_name, aws_account_id):\n vpc_id = get_default_vpc_id()\n instance_types = ['optimal', 'm4']\n batch_client.create_compute_environment(\n type='MANAGED',\n computeEnvironmentName=compute_env_name,\n computeResources={\n 'type': 'EC2',\n 'instanceRole': 'arn:aws:iam::' + aws_account_id +\n ':instance-profile/ecsInstanceRole',\n 'instanceTypes': instance_types,\n 'maxvCpus': 256,\n 'minvCpus': 0,\n 'securityGroupIds': get_security_group_ids(vpc_id),\n 'subnets': get_subnet_ids(vpc_id),\n 'tags': {\n 'Name': 'Batch Instance - '+compute_env_name,\n },\n },\n serviceRole='arn:aws:iam::' + aws_account_id +\n ':role/service-role/AWSBatchServiceRole',\n state='ENABLED',\n )\n\n\ndef wait_until_compute_env_is_ready(compute_env_name):\n for i in range(30):\n sleep(10)\n response = batch_client.describe_compute_environments(\n computeEnvironments=[compute_env_name])\n comp_env = response['computeEnvironments'][0]\n if comp_env['status'] == 'VALID':\n return\n raise Deploy_Exception(\n \"TimeOut: Compute Environemnt %s is not ready\" % compute_env_name)\n\n\ndef wait_until_job_queue_is_ready(job_queue_name):\n for i in range(30):\n sleep(10)\n response = batch_client.describe_job_queues(jobQueues=[job_queue_name])\n job_queue = response['jobQueues'][0]\n if job_queue['status'] == 'VALID':\n return\n raise Deploy_Exception(\n \"TimeOut: Job Queue %s is not ready\" % job_queue_name)\n\n\ndef create_job_queue(job_queue_name, compute_env_name):\n batch_client.create_job_queue(\n computeEnvironmentOrder=[\n {\n 'computeEnvironment': compute_env_name,\n 'order': 1,\n },\n ],\n jobQueueName=job_queue_name,\n priority=1,\n state='ENABLED',\n )\n\n\ndef register_job_definition(job_definition_name, docker_image):\n response = batch_client.register_job_definition(\n type='container',\n containerProperties={\n 'command': [\n 'python',\n 'thestral_app.py',\n ],\n 'image': docker_image,\n 'memory': 1024*6,\n 'vcpus': 2,\n },\n jobDefinitionName=job_definition_name,\n )\n return response\n\n\ndef submit_job(job_definition_name, job_name, job_queue_name):\n response = batch_client.submit_job(\n jobDefinition=job_definition_name,\n jobName=job_name,\n jobQueue=job_queue_name,\n )\n return response\n\n\ndef main():\n deploy_conf = read_deploy_config()\n docker_image = deploy_conf[\"DOCKER_IMAGE\"]\n\n aws_account_id = boto3.client('sts').get_caller_identity().get('Account')\n fn_name = \"thestral_aws_lambda_function\"\n fn_role = 'arn:aws:iam::' + aws_account_id + ':role/LambdaBatch'\n if is_function_exists(fn_name):\n update_function(fn_name)\n else:\n create_function(fn_role, fn_name)\n logger.info(\"Lambda function %s created\" % fn_name)\n fn_arn = get_function_arn(fn_name)\n rule_name = \"{0}-trigger\".format(fn_name)\n put_rule(rule_name)\n add_permissions(fn_name, rule_name)\n put_targets(fn_arn, rule_name)\n logger.info(\"Cloudwatch trigger %s added/updated\" % rule_name)\n\n compute_env_name = 'thestral_comp_env_v1'\n job_queue_name = 'thestral_job_queue_v1'\n job_definition_name = 'thestral_job_definition'\n\n if not is_compute_env_exists(compute_env_name):\n create_compute_env(compute_env_name, aws_account_id)\n logger.info(\"Compute environment %s is created\" % compute_env_name)\n logger.info(\"Waiting for compute environment to be ready\")\n wait_until_compute_env_is_ready(compute_env_name)\n if not is_job_queue_exists(job_queue_name):\n create_job_queue(job_queue_name, compute_env_name)\n logger.info(\"Job queue %s is created\" % job_queue_name)\n logger.info(\"Waiting for job queue to be ready\")\n wait_until_job_queue_is_ready(job_queue_name)\n register_job_definition(job_definition_name, docker_image)\n logger.info(\"Job definition is registered\")\n logger.info(\"Deployed Successfully\")\n\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.7931034564971924,
"alphanum_fraction": 0.7931034564971924,
"avg_line_length": 28,
"blob_id": "e637fb463c9a8a45277edac503fa855df9a38c8a",
"content_id": "0f6e7acb7f4160150eb1d9adff3d5584832679db",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 29,
"license_type": "permissive",
"max_line_length": 28,
"num_lines": 1,
"path": "/thestral_app.py",
"repo_name": "vinodsharma/ecs-deploy",
"src_encoding": "UTF-8",
"text": "print \"Running Main Program\"\n"
},
{
"alpha_fraction": 0.7719298005104065,
"alphanum_fraction": 0.780701756477356,
"avg_line_length": 27.5,
"blob_id": "789c9bea85932f236a28949957a3906dbe6e07a1",
"content_id": "37c17c4c9831a62779d0d056bb289ddd027e5acf",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 228,
"license_type": "permissive",
"max_line_length": 60,
"num_lines": 8,
"path": "/Dockerfile",
"repo_name": "vinodsharma/ecs-deploy",
"src_encoding": "UTF-8",
"text": "FROM vinodsharma/python:2.7-devel\nENV APPDIR=/opt/thestral\nRUN /usr/bin/virtualenv venv\nRUN source venv/bin/activate\nRUN mkdir $APPDIR\nCOPY . $APPDIR/\nWORKDIR $APPDIR\nRUN pip install --upgrade --no-cache-dir -r requirements.txt\n"
},
{
"alpha_fraction": 0.7063081860542297,
"alphanum_fraction": 0.709410548210144,
"avg_line_length": 29.21875,
"blob_id": "4de378933830dd184729877508ce54afda93500a",
"content_id": "44fd5a3978ee343b5649fd164ecbef7985e4efc3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 967,
"license_type": "permissive",
"max_line_length": 76,
"num_lines": 32,
"path": "/thestral_aws_lambda_function.py",
"repo_name": "vinodsharma/ecs-deploy",
"src_encoding": "UTF-8",
"text": "import boto3\nimport logging\nimport sys\n\nlogger = logging.getLogger('thestral_aws_lambda')\nlogger.setLevel(logging.DEBUG)\n\nch = logging.StreamHandler(sys.stdout)\nch.setLevel(logging.DEBUG)\nformatter = logging.Formatter(\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s')\nch.setFormatter(formatter)\nlogger.addHandler(ch)\n\n\ndef submit_job(batch_client, job_definition_name, job_name, job_queue_name):\n response = batch_client.submit_job(\n jobDefinition=job_definition_name,\n jobName=job_name,\n jobQueue=job_queue_name,\n )\n logger.info(\"Submit job response %s\", response)\n\n\ndef lambda_handler(event, context):\n logger.info(\"submit_job Started\")\n batch_client = boto3.client('batch')\n job_queue_name = 'thestral_job_queue_v1'\n job_definition_name = 'thestral_job_definition'\n job_name = 'thestral_job'\n submit_job(batch_client, job_definition_name, job_name, job_queue_name)\n logger.info(\"submit_job Completed\")\n"
},
{
"alpha_fraction": 0.5536859035491943,
"alphanum_fraction": 0.5774572491645813,
"avg_line_length": 27.58015251159668,
"blob_id": "3f86383d5a91944468e4981521e280d14f855570",
"content_id": "174cc19791cb2541401a5b2263a1a3eb096329ac",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3744,
"license_type": "permissive",
"max_line_length": 77,
"num_lines": 131,
"path": "/batch_deploy.py",
"repo_name": "vinodsharma/ecs-deploy",
"src_encoding": "UTF-8",
"text": "import boto3\n\n\ndef is_compute_env_exists(batch_client, compute_env_name):\n response = batch_client.describe_compute_environments(\n computeEnvironments=[\n compute_env_name,\n ],\n )\n if len(response['computeEnvironments']) == 1:\n return True\n else:\n return False\n\n\ndef is_job_queue_exists(batch_client, job_queue_name):\n response = batch_client.describe_job_queues(\n jobQueues=[\n job_queue_name,\n ],\n )\n\n if len(response['jobQueues']) == 1:\n return True\n else:\n return False\n\n\ndef is_job_definition_exists(batch_client, job_definition_name):\n response = batch_client.describe_job_definitions(\n jobDefinitionName=job_definition_name,\n )\n if len(response['jobDefinitions']) >= 1:\n return True\n else:\n return False\n\n\ndef create_compute_env(batch_client, compute_env_name,\n instance_types, aws_account_id):\n batch_client.create_compute_environment(\n type='MANAGED',\n computeEnvironmentName=compute_env_name,\n computeResources={\n 'type': 'EC2',\n 'desiredvCpus': 2,\n 'instanceRole': 'ecsInstanceRole',\n 'instanceTypes': instance_types,\n 'maxvCpus': 256,\n 'minvCpus': 2,\n 'securityGroupIds': [\n 'sg-6b10f90e',\n ],\n 'subnets': [\n 'subnet-e77c9d82',\n 'subnet-350d0d41',\n 'subnet-e0a380a6',\n ],\n 'tags': {\n 'Name': 'Batch Instance - C4OnDemand',\n },\n },\n serviceRole='arn:aws:iam::' + aws_account_id +\n ':role/service-role/AWSBatchServiceRole',\n state='ENABLED',\n )\n\n\ndef create_job_queue(batch_client, job_queue_name):\n batch_client.create_job_queue(\n computeEnvironmentOrder=[\n {\n 'computeEnvironment': compute_env_name,\n 'order': 1,\n },\n ],\n jobQueueName=job_queue_name,\n priority=1,\n state='ENABLED',\n )\n\n\ndef register_job_definition(batch_client, job_definition_name, docker_image):\n response = batch_client.register_job_definition(\n type='container',\n containerProperties={\n 'command': [\n 'sleep',\n '10',\n ],\n 'image': docker_image,\n 'memory': 1024*6,\n 'vcpus': 2,\n },\n jobDefinitionName=job_definition_name,\n )\n\n print(response)\n\n\ndef submit_job(batch_client, job_definition_name, job_name, job_queue_name):\n response = batch_client.submit_job(\n jobDefinition=job_definition_name,\n jobName=job_name,\n jobQueue=job_queue_name,\n )\n\n print(response)\n\n\nif __name__ == \"__main__\":\n aws_account_id = '156083142943'\n batch_client = boto3.client('batch')\n compute_env_name = 'V3_M4OnDemand'\n job_queue_name = 'M4OnDemandQueue'\n job_definition_name = 'M4OnDemandJobDefinition'\n job_name = 'M4OnDemandJob'\n instance_types = ['m4.large']\n docker_repo = \"vinodsharma/circleci-demo-docker\"\n docker_image_tag = \"459049b9305ed6d5b74f62fe5c06c7620b5e7214\"\n docker_image = docker_repo + \":\" + docker_image_tag\n\n if not is_compute_env_exists(batch_client, compute_env_name):\n create_compute_env(\n batch_client, compute_env_name,\n instance_types, aws_account_id\n )\n if not is_job_queue_exists(batch_client, job_queue_name):\n create_job_queue(batch_client, job_queue_name)\n register_job_definition(batch_client, job_definition_name, docker_image)\n submit_job(batch_client, job_definition_name, job_name, job_queue_name)\n"
},
{
"alpha_fraction": 0.7457627058029175,
"alphanum_fraction": 0.7457627058029175,
"avg_line_length": 15.857142448425293,
"blob_id": "f50c69199c64226f581c44347d4a4e928080c465",
"content_id": "2d0e06e1a546988a24da3859590b18fd46880af7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 118,
"license_type": "permissive",
"max_line_length": 30,
"num_lines": 7,
"path": "/tests/test_basis.py",
"repo_name": "vinodsharma/ecs-deploy",
"src_encoding": "UTF-8",
"text": "import os\n\n\nprint \"Testing Started\"\nprint os.environ['LOG_FILE']\nprint os.environ['LOGDNA_APP']\nprint \"Testing Ended\"\n"
},
{
"alpha_fraction": 0.7417218685150146,
"alphanum_fraction": 0.748344361782074,
"avg_line_length": 17.875,
"blob_id": "4d3cf6117972d74bc04ab0a1317b56a7c881955c",
"content_id": "ba5b7454945f07afe5f591d779b4f90450b973e0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 151,
"license_type": "permissive",
"max_line_length": 30,
"num_lines": 8,
"path": "/test_deploy.py",
"repo_name": "vinodsharma/ecs-deploy",
"src_encoding": "UTF-8",
"text": "import os\n\n\nprint \"Deployment Started\"\nprint os.environ['LOG_FILE']\nprint os.environ['LOGDNA_APP']\nprint os.environ['S3_DIR']\nprint \"Deployment Ended\"\n"
},
{
"alpha_fraction": 0.8120300769805908,
"alphanum_fraction": 0.8120300769805908,
"avg_line_length": 25.600000381469727,
"blob_id": "20b167abec389ff68de4ada4bb44ba9b016162ee",
"content_id": "78e2e09bec6d536aa654321e2b87f3ce51d0d75b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 133,
"license_type": "permissive",
"max_line_length": 44,
"num_lines": 5,
"path": "/test_main.py",
"repo_name": "vinodsharma/ecs-deploy",
"src_encoding": "UTF-8",
"text": "from sklearn import linear_model\nprint \"Test Started\"\nlogistic = linear_model.LogisticRegression()\nprint logistic\nprint \"Test Ended\"\n"
}
] | 8 |
WeiChengLiou/green_team
|
https://github.com/WeiChengLiou/green_team
|
15c16bebe10faba9a8e7dee8641783a639194b78
|
0612a6e328523a8962f22a0c30d44578127001c9
|
ca18666ee966ba5ad2e366cb973065c5a28f9483
|
refs/heads/master
| 2021-05-10T20:58:55.855155 | 2018-01-20T07:21:20 | 2018-01-20T07:21:20 | 118,213,371 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5470941662788391,
"alphanum_fraction": 0.5731462836265564,
"avg_line_length": 18.959999084472656,
"blob_id": "7eaced48dfc3473fd0679761bbf3981c611218b8",
"content_id": "6535288439eacdf87bf51d6693bd41846490802c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 998,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 50,
"path": "/social_network.py",
"repo_name": "WeiChengLiou/green_team",
"src_encoding": "UTF-8",
"text": "##\nfrom vis.output import exp_graph, write_d3\nimport pandas as pd\nimport json\nimport networkx as nx\n\nfi = 'G1101_2016_list.json'\nret = json.loads(open(fi).read())\n\n##\ndf = pd.DataFrame(ret)\ncoms = pd.concat([\n (df[['source', 'taxcode_source']]\n .drop_duplicates()\n .rename(columns={\n 'source': 'com',\n 'taxcode_source': 'taxcode',\n })\n ),\n (df[['target', 'taxcode_target']]\n .drop_duplicates()\n .rename(columns={\n 'target': 'com',\n 'taxcode_target': 'taxcode',\n })\n ),\n])\ncoms = coms.drop_duplicates()\n\n\n##\nG = nx.DiGraph()\nfor x in ret:\n G.add_edge(x['source'], x['target'])\n\nprint('Nodes: %d' % G.number_of_nodes())\nprint('Edges: %d' % G.number_of_edges())\n\n##\nfor x in coms.itertuples():\n G.node[x.com]['name'] = x.com\n G.node[x.com]['group'] = int(x.taxcode == 'NA')\n\n\n##\npush = False # True if want to push to gh-pages\nexp_graph(G, path='docs', fi='G1101_2016')\nwrite_d3('G1101_2016', path='docs', push=True)\n\n##\n"
},
{
"alpha_fraction": 0.5592930316925049,
"alphanum_fraction": 0.57012540102005,
"avg_line_length": 24.05714225769043,
"blob_id": "0015d34c4fee02fbaf9b9eed71bf4b7d94b69a77",
"content_id": "aa25df96c0e3acb6ba87ab10021f15da119ac473",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1754,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 70,
"path": "/vis/output.py",
"repo_name": "WeiChengLiou/green_team",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport jinja2\nfrom os.path import join, dirname\nimport json\nimport pdb\nfrom traceback import print_exc\ntemplate_dir = join(dirname(__file__), 'templates')\njinja_env = jinja2.Environment(\n loader=jinja2.FileSystemLoader(template_dir),\n autoescape=True)\n\n\ndef render_str(template, **params):\n t = jinja_env.get_template(template)\n return t.render(params)\n\n\ndef render_html(fi, **params):\n template = 'force_layout.html'\n try:\n with open(fi, 'w') as f:\n f.write(render_str(template, **params))\n except:\n print_exc()\n pdb.set_trace()\n\n\ndef write_d3(fi, **kwargs):\n # Export d3 file, return htmrul\n path = kwargs.get('path', '')\n jsonfi = fi + '.json'\n htmlfi = join(path, fi + '.html')\n if kwargs.get('push'):\n jsonfi = \"https://weichengliou.github.io/green_team/{}/{}\".format(\n path, jsonfi)\n\n render_html(htmlfi, FileName=jsonfi, **kwargs)\n return htmlfi\n\n\ndef exp_graph(G, **kwargs):\n for k, v in G.node.items():\n v.setdefault('size', 10)\n v.setdefault('group', 0)\n\n dicBig = {}\n dic0 = [dic for k, dic in G.node.items()]\n dicBig['nodes'] = dic0\n idx = list(G.node.keys())\n dic1 = []\n for x, y in G.edges():\n dic = {'source': idx.index(x), 'target': idx.index(y)}\n dic.update(G.get_edge_data(x, y))\n dic1.append(dic)\n if len(dic1) > 0:\n dic1.append(dic1[0])\n dicBig['links'] = dic1\n\n if 'fi' in kwargs:\n jsonfi = join(kwargs.get('path', ''), kwargs.get('fi') + '.json')\n json.dump(dicBig, open(jsonfi, 'w'),\n ensure_ascii=False)\n else:\n return dicBig\n\n\nif __name__ == '__main__':\n \"\"\"\"\"\"\n # test()\n"
}
] | 2 |
cjbruin23/web-scraper
|
https://github.com/cjbruin23/web-scraper
|
5aa6d5e29711cd96e381ac7d0ef9027a3fc548bd
|
9f24db3e6e49cb7c44cc53820240d3c0a014fd1d
|
ed6b928f280642c6563e171089796d18df0210d4
|
refs/heads/master
| 2020-03-27T01:39:10.696040 | 2018-08-27T17:49:40 | 2018-08-27T17:49:40 | 145,730,526 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.3785776197910309,
"alphanum_fraction": 0.4310494363307953,
"avg_line_length": 26.452381134033203,
"blob_id": "e5492738894cea810ce7517148c765298f3e5a97",
"content_id": "581639ecff83a2adcb4216dd454e74e0514fcf2e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2306,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 84,
"path": "/scraper.py",
"repo_name": "cjbruin23/web-scraper",
"src_encoding": "UTF-8",
"text": "import argparse\nimport requests\nimport re\nimport sys\n\n\ndef get_emails(content):\n email_add = re.findall(r'''(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+'''\n r'''(?:\\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*'''\n r'|\"(?:[\\x01-\\x08\\x0b\\x0c\\x0e-'\n r'\\x1f\\x21\\x23-\\x5b\\x5d-\\x7f]'\n r'|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])*\")@'\n r'(?:(?:[a-z0-9]'\n r'(?:[a-z0-9-]*[a-z0-9])?\\.)'\n r'+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?'\n r'|\\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.)'\n r'{3}(?:25[0-5]|2[0-4][0-9]'\n r'|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:'\n r'(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21-\\x5a\\x53-\\x7f]'\n r'|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])+)\\])', content)\n email_add = list(set(email_add))\n print '\\nEmails'\n for email in email_add:\n print email\n pass\n\n\ndef get_phone(content):\n phone_nums = re.findall(r'1?\\W*([2-9][0-8][0-9])'\n r'\\W*([2-9][0-9]{2})\\W*([0-9]{4})(\\se?x?t?(\\d*))?',\n content)\n phone_nums = list(set(phone_nums))\n print '\\nPhone Numbers'\n for phone in phone_nums:\n fin_phone = '-'.join(phone[0:3])\n print fin_phone\n\n pass\n\n\ndef get_urls(content):\n print 'Urls'\n link_matches = re.findall(r'http[s]?://(?:[a-zA-Z]'\n r'|[0-9]|[$-_@.&+]|[!*\\(\\),]'\n r'|(?:%[0-9a-fA-F][0-9a-fA-F]))+', content)\n link_matches = list(set(link_matches))\n for link in link_matches:\n print link\n\n pass\n\n\ndef extract_html(url):\n req = requests.get(url)\n content = req.content\n\n get_urls(content)\n\n get_phone(content)\n\n get_emails(content)\n return\n\n\ndef create_parser():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"url\", help=\"url destination to extract data from\")\n\n return parser\n\n\ndef main(args):\n parser = create_parser()\n\n if not args:\n parser.print_usage()\n sys.exit(1)\n\n parsed_args = parser.parse_args(args)\n extract_html(parsed_args.url)\n\n\nif __name__ == \"__main__\":\n main(sys.argv[1:])\n"
}
] | 1 |
tejastank/rssi_positioning
|
https://github.com/tejastank/rssi_positioning
|
bc257493a0a6a118cd8cff2b40aae041d0347579
|
c2fd6cb12bbea55321a6c18306340efb087d27cf
|
fd494c7bae89d3938439358904e79112064458e5
|
refs/heads/master
| 2022-04-10T01:57:13.264580 | 2019-12-01T03:14:37 | 2019-12-01T03:14:37 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7697947025299072,
"alphanum_fraction": 0.7697947025299072,
"avg_line_length": 31.5238094329834,
"blob_id": "7f2c7eaab462ebb05a81ba8524e31bff1ceed30c",
"content_id": "9a992e35fa17b47e60a12d217583f406bec912d1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 682,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 21,
"path": "/install.sh",
"repo_name": "tejastank/rssi_positioning",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\nsudo apt-get -y update\nsudo apt-get -y upgrade\n\nsudo apt-get install -y openssh-server\nsudo apt-get install -y bluetooth libbluetooth-dev\nsudo apt-get install -y python-dev\n\nwget https://bootstrap.pypa.io/get-pip.py\n#python get-pip.py\n#pip install --upgrade pip\n#(note: do not use sudo pip, which can cause problems)\n#python -m pip install --user numpy scipy matplotlib pybluez\nsudo apt-get install -y python-scipy python-bluez\n\nsudo apt-get install --assume-yes git\ngit clone http://ci-git.int.osram-light.com/positioning_middleware/rssi_positioning.git\n\n#If need to have java as part of the system\n#sudo apt-get install -y default-jre\n#sudo apt-get install default-jdk"
},
{
"alpha_fraction": 0.7614588737487793,
"alphanum_fraction": 0.7727944850921631,
"avg_line_length": 44.088890075683594,
"blob_id": "6abe49cf50b632ec6c7db2e51e0ec9ca96abd794",
"content_id": "00c5cc55e847b79a3b6db2dbc778e80b862f39f6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2029,
"license_type": "no_license",
"max_line_length": 207,
"num_lines": 45,
"path": "/README.md",
"repo_name": "tejastank/rssi_positioning",
"src_encoding": "UTF-8",
"text": "## Description\nThis is the software for developing the indoor positioning using RF RSSI (Bluetooth LE beacons).\n\n\n## Installation\n1. Run install.sh to install the project dependencies. (not completed yet)\n\n## Usage\n\n1. visualization_server.py is supposed to be running on a server with known IP:Port address. This file is only in charge of displaying the results.\n2. main.py is supposed to be running on a remote target whose position is to be estimated. The position of the target is estimated by the target and then sent to the server with the known IP:Port address.\n3. Please make sure the IP:Port addresses are the same and meaningful for test setup.\n4. In order to run the real-time interactive visualization on the server, bokeh package is required. To install please install Anaconda first (https://www.anaconda.com/download/#linux), then install bokeh by\n```\n$ conda install bokeh\n```\n5. To run the visualization_server.py use command:\n```\n$ bokeh serve --show visualization_server.py\n```\n6. To run the main.py on a raspberry pi, use command:\n```\nsudo python main.py\n```\n\n7. Proper shutdown:\n - First, shutdown the main.py running on the target first by using \"Ctrl+c\"\n - Then, shutdown the bokeh server by using \"Ctrl+c\"\n - If sequence is wrong, need to restart the server machine since the port used by the bokeh server will remain occupied.\n\n\n## Usefull links and origional work\n\nhttp://inside.mines.edu/fs_home/whereman/papers/Hereman-Murphy-Trilateration-Manual-MCS-11-1991.pdf\n\nhttps://inside.mines.edu/~whereman/papers/Murphy-MS-Thesis-Complete-1992.pdf\n\n\n[Trilateration using Java] (https://github.com/lemmingapex/trilateration).\n\n[Levenberg-Marquardt algorithm](http://en.wikipedia.org/wiki/Levenberg%E2%80%93Marquardt_algorithm) from [Apache Commons Math](http://commons.apache.org/proper/commons-math/).\n\n[iBeacon-Scanner-](https://github.com/switchdoclabs/iBeacon-Scanner-).\n\n[Capture beacons using ubertooth with Wireshark](https://github.com/greatscottgadgets/ubertooth/wiki/Capturing-BLE-in-Wireshark).\n"
},
{
"alpha_fraction": 0.5637393593788147,
"alphanum_fraction": 0.5736544132232666,
"avg_line_length": 24.214284896850586,
"blob_id": "8cb5891f142ff1501487bb78fd51377763babbcc",
"content_id": "9c918619822956e8616a37da1899b946b77de71e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 706,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 28,
"path": "/testblescan.py",
"repo_name": "tejastank/rssi_positioning",
"src_encoding": "UTF-8",
"text": "import blescan\nimport sys\n\nimport bluetooth._bluetooth as bluez\n\ndev_id = 0\ntry:\n\tsock = bluez.hci_open_dev(dev_id)\n\tprint \"ble thread started\"\n\nexcept:\n\tprint \"error accessing bluetooth device...\"\n \tsys.exit(1)\n\nblescan.hci_le_set_scan_parameters(sock)\nblescan.hci_enable_le_scan(sock)\n\nwhile True:\n\treturnedList = blescan.parse_events(sock, 5)\n\tprint \"*******************************\"\n\tfor indx, beacon in enumerate(returnedList):\n\t\tprint \"-------------------------------\"\n\t\tsegments = [x.strip() for x in beacon.split(',')]\n\t\tprint \"Index: \" + str(indx) + \"\"\\\n\t\t+ \", \" + \"MAC: \" + segments[0]\\\n\t\t+ \", \" + \"UUID: \" + segments[1]\\\n\t\t+ \", \" + \"Major: \" + segments[2]\\\n\t\t+ \", \" + \"Minor: \" + segments[3]\n"
},
{
"alpha_fraction": 0.6673300266265869,
"alphanum_fraction": 0.6834399700164795,
"avg_line_length": 38.82075500488281,
"blob_id": "23132200296975ca1539600219dab8497b946b8a",
"content_id": "cde6fd94f5acae635136cd7510d22dfd02fee87c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 21105,
"license_type": "no_license",
"max_line_length": 290,
"num_lines": 530,
"path": "/main.py",
"repo_name": "tejastank/rssi_positioning",
"src_encoding": "UTF-8",
"text": "#indoor positioning using BLE beacons and nonlinear least squares optimization\n# run the following first to start the visualization server: \"bokeh serve --show visualization_server.py\"\nimport sys\nimport os\nimport time\nimport datetime\nimport socket\nimport blescan\nimport bluetooth._bluetooth as bluez\nimport numpy as np\nimport numpy.matlib\nfrom anchors import Anchors\nfrom scipy import optimize\n\n\ndef d_i_of_theta(theta, x_i, y_i, z_i ):\n\treturn ( (theta[0] - x_i)**2 + (theta[1] - y_i)**2 + (theta[2] - z_i)**2 ) ** 0.5\n\n# the callable function for a residual term\ndef f_i_of_theta(theta, x_i, y_i, z_i, r_i ):\n return d_i_of_theta(theta, x_i, y_i, z_i ) - r_i\n\n# the callable function for a residual term squared\ndef g_i_of_theta(theta, x_i, y_i, z_i, r_i ):\n\treturn f_i_of_theta(theta, x_i, y_i, z_i, r_i ) ** 2\n\n# partial direvatives of g_i_of_theta with respect to theta[0], theta[1], and theta[2].\ndef gradients(theta, x_i, y_i, z_i, r_i ):\n\tJ = np.empty((r_i.size, theta.size))\n\tJ[:, 0] = 2 * f_i_of_theta( theta, x_i, y_i, z_i, r_i ) * ( theta[0] - x_i ) / d_i_of_theta( theta, x_i, y_i, z_i )\n\tJ[:, 1] = 2 * f_i_of_theta( theta, x_i, y_i, z_i, r_i ) * ( theta[1] - y_i ) / d_i_of_theta( theta, x_i, y_i, z_i )\n\tJ[:, 2] = 2 * f_i_of_theta( theta, x_i, y_i, z_i, r_i ) * ( theta[2] - z_i ) / d_i_of_theta( theta, x_i, y_i, z_i )\n\treturn J\n\n\nclass Scanner(object):\n\n\tdef __init__(self, uuidList, dev_id=0, numberOfBeaconsToWait=10):\n\t\tself.uuidList = uuidList\n\t\tself.dev_id = dev_id\n\t\tself.__numberOfBeaconsToWait = numberOfBeaconsToWait\n\t\tself.sock = None\n\n\t\ttry:\n\t\t\tself.sock = bluez.hci_open_dev(self.dev_id)\n\t\t\tprint \"ble thread started\"\n\n\t\texcept:\n\t\t\tprint \"error accessing bluetooth device...\"\n\t\t\tsys.exit(1)\n\n\t\tblescan.hci_le_set_scan_parameters(self.sock)\n\t\tblescan.hci_enable_le_scan(self.sock)\n\n\tdef set_number_of_beacons_to_wait(self, numberOfBeaconsToWait):\n\t\tself.__numberOfBeaconsToWait = numberOfBeaconsToWait\n\n\tdef scan(self):\n\t\tprint(\"\\nScanning...\")\n\t\treturnedList = blescan.parse_events(self.sock, self.__numberOfBeaconsToWait)\n\t\ttimestamp = str(time.time())#all scanned beacons would have the same timestamp\n\t\treturn timestamp, returnedList\n\nclass Beacon(object):\n\n\tdef __init__(self, index, rawString, timestamp):\n\t\tself.index = index\n\t\tself.rawString = rawString\n\t\tself.timestamp = timestamp\n\t\tsegments = [x.strip() for x in self.rawString.split(',')]\n\t\tself.mac = segments[0]\n\t\tself.uuid = segments[1]\n\t\tself.major = segments[2]\n\t\tself.minor = segments[3]\n\t\tself.refdbm = float(segments[4])\n\t\tself.rssi = float(segments[5])\n\t\tself.fullID = self.uuid + self.major + self.minor #strings appending\n\n\t\tself.numberOfOccurence = 0\n\t\tself.rssiVariance = None\n\t\tself.onFile = False\n\t\tself.x = None\n\t\tself.y = None\n\t\tself.z = None\n\t\tself.measuredDistance = None\n\nclass DataPoint(object):\n\n\tdef __init__(self, uuidList, anchors, timestamp, returnedList):\n\t\tself.listOfValidUuids = uuidList\n\t\tself.anchors = anchors\n\t\tself.timestamp = timestamp\n\t\tself.returnedList = returnedList\n\t\tself.gamma = 2.5# path loss exponent\n\t\tself.conversionRatio = 0.6096\n\n\t\tself.listOfBeacons = list()\n\t\tself.listOfValidBeacons = list()\n\t\tself.listOfValidUniqueBeacons = list() #used for positioning\n\n\t\tself.listOfFullIDsScanned = list()\n\t\tself.listOfNumberOfOccurence = list()\n\n\t\tself.numberOfBeacons = 0 # total number of scanned packets (multiple packets from a single beacon is possible)\n\t\tself.numberOfValidBeacons = 0 #number of scanned packets from valid beacons (multiple packets from a single beacon is possible)\n\t\tself.numberOfValidUniqueBeacons = 0 #number of usable beacons to do positioning\n\n\t\tself.listOfRSSI = list()\n\t\tself.listOfRefdbm = list()\n\t\tself.listOfPathLoss = list()\n\t\tself.listOfMeasuredDistances = list() #distances from the target to the valid and unique beacons.\n\t\tself.listOfXPositions = list() #find the values from the commissioning file\n\t\tself.listOfYPositions = list() #find the values from the commissioning file\n\t\tself.listOfZPositions = list() #find the values from the commissioning file\n\n\t\tself.populate()\n\n\tdef populate(self):\n\t\tfor index, rawBeaconString in enumerate(self.returnedList):\n\t\t\tbeacon = Beacon( index, rawBeaconString, self.timestamp )\n\t\t\tself.add_beacon(beacon)\n\n\t\tself.filter_beacons_by_uuid()\n\t\tself.condense()\n\t\tself.sort_beacons()\n\t\tself.find_beacon_geo_parameters()\n\t\tself.show_debug()\n\n\tdef add_beacon(self, beacon):\n\t\tself.listOfBeacons.append(beacon)\n\t\tself.numberOfBeacons += 1\n\n\tdef filter_beacons_by_uuid(self):\n\t\tfor beacon in self.listOfBeacons:\n\t\t\tif beacon.uuid in self.listOfValidUuids:\n\t\t\t\tself.listOfValidBeacons.append(beacon)\n\n\t\tself.numberOfValidBeacons = len(self.listOfValidBeacons)\n\n\tdef condense(self):\n\t\t#if multiple identical beacons exist, combine them to a single one with their rssi value averaged\n\t\t#if two beacons with same UUID, Major, and Minor group number, then the two are identical\n\t\tfor beacon in self.listOfValidBeacons:\n\t\t\tif beacon.fullID not in self.listOfFullIDsScanned:\n\t\t\t\tself.listOfFullIDsScanned.append(beacon.fullID)\n\t\t\t\tbeacon.numberOfOccurence = 1\n\t\t\t\tself.listOfValidUniqueBeacons.append(beacon)\n\t\t\t\tself.listOfNumberOfOccurence.append(1)\n\t\t\t\tself.numberOfValidUniqueBeacons +=1\n\t\t\telse:\n\t\t\t\tindex = self.listOfFullIDsScanned.index(beacon.fullID)\n\t\t\t\tself.listOfValidUniqueBeacons[index].rssi = \\\n\t\t\t\t(self.listOfValidUniqueBeacons[index].rssi * self.listOfNumberOfOccurence[index] + \\\n\t\t\t\t beacon.rssi * 1)/(self.listOfNumberOfOccurence[index] + 1)\n\t\t\t\tself.listOfNumberOfOccurence[index] += 1\n\t\t\t\tself.listOfValidUniqueBeacons[index].numberOfOccurence += 1\n\n\tdef sort_beacons(self):\n\t\tself.listOfValidUniqueBeacons.sort(self.compare_beacons)\n\n\tdef compare_beacons(self, a, b):\n\t\taValue = int(a.major)*100000 + int(a.minor)\n\t\tbValue = int(b.major)*100000 + int(b.minor)\n\t\tif aValue > bValue:\n\t\t\treturn 1\n\t\telif aValue == bValue:\n\t\t\treturn 0\n\t\telse:\n\t\t\treturn -1\n\n\tdef find_beacon_geo_parameters(self):\n\t\tfor beacon in self.listOfValidUniqueBeacons:\n\t\t\tif beacon.fullID in self.anchors.listOfFullIDsFromeFile:\n\t\t\t\tbeacon.onFile = True\n\t\t\t\t# beacon.measuredDistance = 10.0 ** ( (beacon.refdbm - beacon.rssi)/(10*self.gamma) )\n\t\t\t\tbeacon.measuredDistance = 10.0 ** ( (-39 - beacon.rssi)/(10*self.gamma) )\n\t\t\t\t### d = 10^( ( ref - rssi )/( 10*gamma ) )\n\t\t\t\twhichBeaconOnFile = self.anchors.listOfFullIDsFromeFile.index(beacon.fullID)\n\t\t\t\tbeacon.x = self.conversionRatio * self.anchors.listOfX[whichBeaconOnFile]\n\t\t\t\tbeacon.y = self.conversionRatio * self.anchors.listOfY[whichBeaconOnFile]\n\t\t\t\tbeacon.z = self.conversionRatio * self.anchors.listOfZ[whichBeaconOnFile]\n\t\t\t\tself.listOfRSSI.append(beacon.rssi)\n\t\t\t\tself.listOfRefdbm.append(beacon.refdbm)\n\t\t\t\tself.listOfPathLoss.append(beacon.refdbm - beacon.rssi)\n\t\t\t\tself.listOfMeasuredDistances.append(beacon.measuredDistance)\n\t\t\t\tself.listOfXPositions.append(beacon.x)\n\t\t\t\tself.listOfYPositions.append(beacon.y)\n\t\t\t\tself.listOfZPositions.append(beacon.z)\n\n\tdef print_beacons(self):\n\t\tfor beacon in self.listOfBeacons:\n\t\t\tprint(beacon.timestamp, beacon.rawString)\n\n\tdef print_valid_beacons(self):\n\t\tfor beacon in self.listOfValidBeacons:\n\t\t\tprint(beacon.rawString)\n\n\tdef print_valid_unique_beacons(self):\n\t\tfor beacon in self.listOfValidUniqueBeacons:\n\t\t\tprint(\"MAC: %18s, Major: %5d, Minor: %5d, Ref: %6.2f, #occurence: %3d, rssi: %6.2f, d: %5.2f, x: %5.2f, y: %5.2f, z: %5.2f\" % (beacon.mac, int(beacon.major), int(beacon.minor), beacon.refdbm, beacon.numberOfOccurence, beacon.rssi, beacon.measuredDistance, beacon.x, beacon.y, beacon.z) )\n\t\t\t#print(beacon.mac, beacon.uuid, beacon.major, beacon.minor, beacon.refdbm, beacon.rssi, beacon.onFile, beacon.x, beacon.y, beacon.z, beacon.measuredDistance)\n\n\t# def log_packets(self):\n\n\tdef show_debug(self):\n\t\tprint(\"\\n\\n----------------------------------------\")\n\t\tprint(\"\\n%d beacon packets in total.\" % self.numberOfBeacons)\n\t\t# self.print_beacons()\n\t\tprint(\"\\n%d valid beacon packets.\" % self.numberOfValidBeacons)\n\t\t# self.print_valid_beacons()\n\t\tprint(\"\\n%d valid and unique beacon packets (rssi averaged).\" % self.numberOfValidUniqueBeacons)\n\t\tself.print_valid_unique_beacons()\n\nclass Solver(object):\n\n\tdef __init__(self, dataPoint):\n\n\t\t# Definitions of the non-linear minimization problem\n\t\t# -- theta is the target point to be estimated with theta = (x, y, z)\n\t\t# -- r_i is the measured distances from the target point to the beacon i\n\t\t# -- x_i is the 3D coordinates of the beacon i in the x axis\n\t\t# -- y_i is the 3D coordinates of the beacon i in the y axis\n\t\t# -- z_i is the 3D coordinates of the beacon i in the z axis\n\n\t\tself.x_i = np.asarray(dataPoint.listOfXPositions)\n\t\tself.y_i = np.asarray(dataPoint.listOfYPositions)\n\t\tself.z_i = np.asarray(dataPoint.listOfZPositions)\n\t\tself.r_i = np.asarray(dataPoint.listOfMeasuredDistances)\n\n\t\t# self.theta_initial_guess = np.array([-1.0, 1.8, -1.5])\n\t\t# self.theta_initial_guess = np.array([ np.mean(self.x_i), np.mean(self.y_i), -2.7 ])\n\n\t\tseeminglyClosestAnchorIndex = np.argmin(dataPoint.listOfMeasuredDistances)\n\t\tprint(\"Closest anchor index and value:\")\n\t\tprint(\"Anchor: \", seeminglyClosestAnchorIndex)\n\t\tprint(\"RSSI: \", dataPoint.listOfRSSI[seeminglyClosestAnchorIndex])\n\t\tself.theta_initial_guess = np.array([ self.x_i[seeminglyClosestAnchorIndex], self.y_i[seeminglyClosestAnchorIndex], -2.7 ])\n\n\t\tself.lowerFeasibleBoundTheta = [-10.0, -10.0, -3.0]\n\t\tself.upperFeasibleBoundTheta = [ 10.0, 10.0, 3.0]\n\t\t'''\n\t\tusage: https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.least_squares.html#scipy.optimize.least_squares\n\t\t(Search for Example of solving a fitting problem, in the follow webpage\n\t\thttps://docs.scipy.org/doc/scipy/reference/tutorial/optimize.html )\n\t\t'''\n\t\tself.result_dogbox_cauchy = optimize.least_squares(g_i_of_theta, self.theta_initial_guess, method='dogbox', loss='cauchy', bounds=(self.lowerFeasibleBoundTheta, self.upperFeasibleBoundTheta), jac=gradients, args=(self.x_i, self.y_i, self.z_i, self.r_i), verbose=1)\n\t\tself.result_dogbox_arctan = optimize.least_squares(g_i_of_theta, self.theta_initial_guess, method='dogbox', loss='arctan', bounds=(self.lowerFeasibleBoundTheta, self.upperFeasibleBoundTheta), jac=gradients, args=(self.x_i, self.y_i, self.z_i, self.r_i), verbose=1)\n\t\tself.result_dogbox_soft_l1 = optimize.least_squares(g_i_of_theta, self.theta_initial_guess, method='dogbox', loss='soft_l1', bounds=(self.lowerFeasibleBoundTheta, self.upperFeasibleBoundTheta), jac=gradients, args=(self.x_i, self.y_i, self.z_i, self.r_i), verbose=1)\n\t\tself.result_trf_cauchy = optimize.least_squares(g_i_of_theta, self.theta_initial_guess, method='trf', loss='cauchy', bounds=(self.lowerFeasibleBoundTheta, self.upperFeasibleBoundTheta), jac=gradients, args=(self.x_i, self.y_i, self.z_i, self.r_i), verbose=1)\n\t\tself.result_trf_arctan = optimize.least_squares(g_i_of_theta, self.theta_initial_guess, method='trf', loss='arctan', bounds=(self.lowerFeasibleBoundTheta, self.upperFeasibleBoundTheta), jac=gradients, args=(self.x_i, self.y_i, self.z_i, self.r_i), verbose=1)\n\t\tself.result_trf_soft_l1 = optimize.least_squares(g_i_of_theta, self.theta_initial_guess, method='trf', loss='soft_l1', bounds=(self.lowerFeasibleBoundTheta, self.upperFeasibleBoundTheta), jac=gradients, args=(self.x_i, self.y_i, self.z_i, self.r_i), verbose=1)\n\n\t\t# self.result = self.result_trf_soft_l1.x\n\t\ttrustInitalGuessRatio = 0.25\n\t\tself.result = (self.result_trf_soft_l1.x*(1-trustInitalGuessRatio) + self.theta_initial_guess*trustInitalGuessRatio)\n\n\t\tfloat_formatter = lambda x: \"%07.2f\" % x\n\t\tnp.set_printoptions(formatter={'float_kind':float_formatter})\n\t\tprint(\"r_i measurements: \", self.r_i)\n\t\tprint(\"initial guess: \", self.theta_initial_guess)\n\t\tprint(\"Estimated result_dogbox_cauchy: \", self.result_dogbox_cauchy.x)\n\t\tprint(\"Estimated result_dogbox_arctan: \", self.result_dogbox_arctan.x)\n\t\tprint(\"Estimated result_dogbox_soft_l1: \", self.result_dogbox_soft_l1.x)\n\t\tprint(\"Estimated result_trf_cauchy: \", self.result_trf_cauchy.x)\n\t\tprint(\"Estimated result_trf_arctan: \", self.result_trf_arctan.x)\n\t\tprint(\"Estimated result_trf_soft_l1: \", self.result_trf_soft_l1.x)\n\nclass EDMSolver(object):\n\n\tdef __init__(self, dataPoint):\n\n\t\tself.x_i = np.asarray(dataPoint.listOfXPositions)\n\t\tself.y_i = np.asarray(dataPoint.listOfYPositions)\n\t\tself.z_i = np.asarray(dataPoint.listOfZPositions)\n\n\t\tself.r_i = np.asarray(dataPoint.listOfMeasuredDistances)\n\n\t\tself.numOfAnchors = len(self.x_i)\n\t\tself.listOfAnchorIndices = np.arange(self.numOfAnchors)\n\n\t\t#add the target as an extra point at the end in the system\n\t\tself.x_i = np.append(self.x_i, 0.0)\n\t\tself.y_i = np.append(self.y_i, 0.0)\n\t\tself.z_i = np.append(self.z_i, 0.0)\n\t\tself.r_i = np.append(self.r_i, 0.0)\n\n\t\t#constract the big X matrix\n\t\tself.numOfPoints = len(self.x_i)\n\t\tself.X = np.mat( np.reshape( np.concatenate( (self.x_i, self.y_i, self.z_i), axis=0 ), (3, self.numOfPoints ) ) )\n\t\tself.Yanchors = self.X[:, self.listOfAnchorIndices]\n\t\tself.edm = None\n\t\tself.Ghat = None\n\t\tself.Xhat = None\n\t\tself.XhatFinal = None\n\t\tself.result = None\n\n\tdef get_edm(self):\n\t\tX = self.X\n\t\tG = X.T*X\n\t\toneV = np.matlib.ones( (self.numOfPoints, 1) )\n\t\tdiagV = np.mat(np.diag(G)).T#need the tranpose to make it a column vector\n\t\tself.edm = diagV*oneV.T - 2*G + oneV*diagV.T\n\t\tsquaredDistancesToAnchors = np.square(self.r_i)\n\t\tself.edm[self.numOfPoints-1] = squaredDistancesToAnchors\n\t\tself.edm[:,self.numOfPoints-1] = np.reshape(squaredDistancesToAnchors, (self.numOfPoints, 1) )\n\t\t# print(\"self.edm:\")\n\t\t# print(self.edm)\n\t\t# print(\"self.r_i:\")\n\t\t# print(self.r_i)\n\n\tdef estimate_Ghat(self):\n\t\tnumOfPoints = self.numOfPoints\n\t\tidentityMat = np.matlib.identity(numOfPoints, dtype=float)\n\t\toneV = np.matlib.ones((numOfPoints,1))\n\t\tJ = identityMat - (1/float(numOfPoints))*oneV*oneV.T\n\t\tself.Ghat = -0.5*J*self.edm*J\n\n\tdef estimate_Xhat(self):\n\t\tdim = 3\n\t\tnumOfPoints = self.numOfPoints\n\t\tw1, v1 = np.linalg.eig(self.Ghat)\n\t\tw2 = np.mat(w1)\n\t\tv2 = np.mat(v1)\n\t\tsortingIndices = np.argsort(w2)\n\t\tindices = np.squeeze(np.array(np.flip(sortingIndices,1)))\n\t\tw3 = np.flip(np.sort(w2),1)\n\t\tw = w3\n\t\tv = v2\n\t\tv=v[:, indices]\n\t\tw = np.sqrt(w[:,0:dim])\n\t\tconstructMat = np.mat(np.diagflat(w))\n\t\tconstructMat = np.real(np.concatenate((constructMat, np.matlib.zeros((dim,numOfPoints-dim))), axis=1))\n\t\tXhat = constructMat*v.T\n\t\tXhat = Xhat.real\n\t\tself.Xhat = Xhat\n\n\tdef rectify_w_anchors(self):\n\t\tnumOfPoints = self.numOfPoints\n\t\tYanchors = self.Yanchors\n\t\tXa = self.Xhat[:,self.listOfAnchorIndices]\n\t\tYBar = Yanchors - Yanchors.mean(1)\n\t\tXaBar = Xa - Xa.mean(1)\n\t\tXYBarProd = XaBar*YBar.T\n\t\tU, s, VT = np.linalg.svd(XYBarProd, full_matrices=True) #VT is actually the V transpose\n\t\tV=VT.T\n\t\tR = V*U.T\n\t\toneV = np.matlib.ones((numOfPoints,1))\n\t\tself.XhatFinal = R*(self.Xhat - Xa.mean(1)*oneV.T) + Yanchors.mean(1)*oneV.T\n\n\n\tdef run(self):\n\t\tprint(\"===============================================================\")\n\t\tprint(\"EDM apporach solver:\")\n\t\tself.get_edm()\n\t\tself.estimate_Ghat()\n\t\tself.estimate_Xhat()\n\t\tself.rectify_w_anchors()\n\t\tself.result = np.asarray(self.XhatFinal[:,-1]).reshape((1,3))\n\t\tprint(\"EDM approach result:\")\n\t\tprint(self.result)\n\t\t# print(\"X:\")\n\t\t# print(self.X)\n\t\t# print(\"XhatFinal - X:\")\n\t\t# print(self.XhatFinal - self.X)\n\t\tprint(\"===============================================================\")\n\n\nclass Reporter(object):\n\n\tdef __init__(self, hostIPString, portNumber):\n\n\t\tself.hostIPString = hostIPString\n\t\tself.portNumber = portNumber\n\t\tself.serverAddress = (self.hostIPString, self.portNumber)\n\t\tself.dataSocket = socket.socket()\n\t\tself.dataSocket.settimeout(3.0)\n\t\tself.setup = False\n\t\ttry:\n\t\t\tself.dataSocket.connect(self.serverAddress)\n\t\t\tself.setup = True\n\t\t\tprint(\"\\nConnection to server established.\")\n\t\texcept:\n\t\t\tprint(\"\\nError when setting up a reporter.\")\n\t\t\tpass\n\n\tdef __del__(self):\n\t\tself.dataSocket.close()\n\t\tprint(\"\\nData socket closed.\")\n\n\tdef report(self, result):\n\t\tif self.setup == True:\n\t\t\tresult = result.flatten()\n\t\t\tprint(result)\n\t\t\ttry:\n\t\t\t\t# xStr = \"%.2f\" % result.x[0]\n\t\t\t\t# yStr = \"%.2f\" % result.x[1]\n\t\t\t\t# zStr = \"%.2f\" % result.x[2]\n\t\t\t\txStr = \"%.2f\" % result[0]\n\t\t\t\tyStr = \"%.2f\" % result[1]\n\t\t\t\tzStr = \"%.2f\" % result[2]\n\t\t\t\tdataStr = xStr + \", \" + yStr + \", \" + zStr\n\t\t\t\tself.dataSocket.send(dataStr)\n\t\t\t\tprint(\"-------------Data reported successfully.\\n\")\n\t\t\texcept:\n\t\t\t\tprint(\"-------------Error when reporting.\\n\")\n\t\t\t\tpass\n\t\telse:\n\t\t\tprint(\"-------------Data not reported due to unsuccessful setup.\\n\")\n\nclass Logger(object):\n\n\tdef __init__(self):\n\t\tself.directoryName = \"/home/pi/rssi_positioning/logs\"\n\t\tif not os.path.exists(self.directoryName):\n\t\t\tos.mkdir(self.directoryName)\n\t\tself.humanTimestamp = time.strftime(\"%Y-%m-%d-%H-%M-%S\")\n\t\tself.logFileName = self.directoryName + \"/\" + self.humanTimestamp + \".log\"\n\t\tself.logFile = open(self.logFileName, 'w')\n\t\tprint(self.logFileName)\n\n\tdef __del__(self):\n\t\tprint(\"Closing log file \" + self.logFileName)\n\t\tself.logFile.close()\n\t\tprint(\"Log file \" + self.logFileName + \" is closed.\")\n\n\tdef log(self, dataString):\n\t\tself.logFile.write(dataString+\"\\n\")\n\n\tdef logDataPoint(self, dataPoint):\n\t\tfor beacon in dataPoint.listOfValidBeacons:\n\t\t\tself.log(dataPoint.timestamp + \",\" + beacon.rawString)\n\n\ndef main():\n\n\tuuidList = [\"e2c56db5dffb48d2b060d0f5a71096e0\"]\n\tdev_id = 0\n\t#please give a nubmer below 40, since now we are assuming the packets are received roughly at the same time from the same batch of scan.\n\t#if the number is high, motion is then significant, rssi values are going to be less accurate.\n\tnumberOfBeaconsToWait = 2000\n\tcommissioningFileName = \"/home/pi/rssi_positioning/commissionning.dat\"\n\n\thost = '192.168.1.6'\n\tport = 5000\n\treporter = Reporter(host, port)\n\n\tlogger = Logger()\n\n\ttry:\n\t\tanchors = Anchors(commissioningFileName)#in anchors.py\n\t\tanchors.show_debug()\n\t\tscanner = Scanner( uuidList, dev_id, numberOfBeaconsToWait )\n\t\ti = 0\n\t\twhile True:\n\t\t\ttimestamp, returnedList = scanner.scan()\n\t\t\tdataPoint = DataPoint(uuidList, anchors, timestamp, returnedList)\n\t\t\tsolver = Solver(dataPoint)\n\t\t\tedmSolver = EDMSolver(dataPoint)\n\t\t\tedmSolver.run()\n\t\t\tlogger.logDataPoint(dataPoint)\n\t\t\tresult = (solver.result + edmSolver.result)/2\n\t\t\t# reporter.report(solver.result)\n\t\t\t# reporter.report(edmSolver.result)\n\t\t\treporter.report(result)\n\t\t\ti += 1\n\t\t\tprint(\"Running loop number: \", i)\n\n\texcept KeyboardInterrupt:\n\t print(\"\\nWarning: keyboard interrupt detected, quitting...\")\n\n\tfinally:\n\t\t#clean up\n\t\tprint(\"Program done.\")\n\nif __name__ == \"__main__\":\n\tmain()\n\n\n\n###########\n#Code below is one example of using the least_squares method provided by scipy library\n###########\n# def d_i_of_theta( theta, x_i, y_i, z_i ):\n# \treturn ( (theta[0] - x_i)**2 + (theta[1] - y_i)**2 + (theta[2] - z_i)**2 ) ** 0.5\n#\n# #the callable function for a residual term\n# def f_i_of_theta( theta, x_i, y_i, z_i, r_i ):\n# return d_i_of_theta(theta, x_i, y_i, z_i ) - r_i\n#\n# #the callable function for a residual term squared\n# def g_i_of_theta( theta, x_i, y_i, z_i, r_i ):\n# \treturn f_i_of_theta(theta, x_i, y_i, z_i, r_i ) ** 2\n#\n# #the callable function for the gradients with respect to theta[0], theta[1], and theta[2].\n# # partial direvatives of g_i_of_theta\n# def gradients( theta, x_i, y_i, z_i, r_i ):\n# \tJ = np.empty((r_i.size, theta.size))\n# \tJ[:, 0] = 2 * f_i_of_theta( theta, x_i, y_i, z_i, r_i ) * ( theta[0] - x_i ) / d_i_of_theta( theta, x_i, y_i, z_i )\n# \tJ[:, 1] = 2 * f_i_of_theta( theta, x_i, y_i, z_i, r_i ) * ( theta[1] - y_i ) / d_i_of_theta( theta, x_i, y_i, z_i )\n# \tJ[:, 2] = 2 * f_i_of_theta( theta, x_i, y_i, z_i, r_i ) * ( theta[2] - z_i ) / d_i_of_theta( theta, x_i, y_i, z_i )\n# \treturn J\n#\n#\n#\n# #get print format back to defualt please use the next line\n# #np.set_printoptions(edgeitems=3,infstr='inf', linewidth=75, nanstr='nan', precision=8, suppress=False, threshold=1000, formatter=None)\n# x_i = np.array([ -5.0, 5.0, -5.0, 5.0])\n# y_i = np.array([ -5.0, -5.0, 5.0, 5.0])\n# z_i = np.array([ 3.0, 3.0, 3.0, 3.0])\n#\n# r_i = np.array([ 3 + np.random.normal(0, 0.5),\\\n# \t\t\t\t np.sqrt(109) + np.random.normal(0, 1),\\\n# \t\t\t\t np.sqrt(109) + np.random.normal(0, 1),\\\n# \t\t\t\t np.sqrt(209) + np.random.normal(0, 1)])\n#\n# theta_initial_guess = np.array([0.0, 0.0, 0.0])\n# lowerFeasibleBoundTheta = [-10, -10, 0]\n# upperFeasibleBoundTheta = [10, 10, 2.0]\n# '''\n# usage: https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.least_squares.html#scipy.optimize.least_squares\n# (Search for Example of solving a fitting problem, in the follow webpage\n# https://docs.scipy.org/doc/scipy/reference/tutorial/optimize.html )\n# '''\n# result = optimize.least_squares(g_i_of_theta, theta_initial_guess, bounds=(lowerFeasibleBoundTheta, upperFeasibleBoundTheta), jac=gradients, args=(x_i, y_i, z_i, r_i), verbose=1)\n#\n# float_formatter = lambda x: \"%07.2f\" % x\n# np.set_printoptions(formatter={'float_kind':float_formatter})\n# print(\"r_i measurements: \", r_i)\n# print(\"initial guess: \", theta_initial_guess)\n# print(\"Estimated: \", result.x)\n"
},
{
"alpha_fraction": 0.656471848487854,
"alphanum_fraction": 0.6854178309440613,
"avg_line_length": 30.033897399902344,
"blob_id": "f402351801d9d63e177ccbca203ee38f5644d835",
"content_id": "13d4d6a0ccb31e6aeeac6364540d9865672af186",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1831,
"license_type": "no_license",
"max_line_length": 130,
"num_lines": 59,
"path": "/visualization_server.py",
"repo_name": "tejastank/rssi_positioning",
"src_encoding": "UTF-8",
"text": "# to run the server: $ bokeh serve --show visulization_server.py\nfrom bokeh.plotting import figure, curdoc\nfrom bokeh.driving import linear\nfrom bokeh.models import Range1d\nimport random\nimport socket\n\nfrom anchors import Anchors\n\n\n@linear()\ndef update(step):\n global connection\n dataString = \"\"\n dataList = []\n x = 0.0\n y = 0.0\n try:\n dataString = connection.recv(128)#maximum amount of data to be received at once.\n dataList = dataString.split(',')\n x = float(dataList[0])\n y = float(dataList[1])\n print(dataString)\n except:\n pass\n\n ds.data['x'].append(x)\n ds.data['y'].append(y)\n ds.trigger('data', ds.data, ds.data)\n\n\ntry:\n host = '192.168.1.6'\n port = 5000\n serverAddress = (host, port)\n print(\"Starting data socket server on %s:%s\" % serverAddress)\n dataSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n dataSocket.bind(serverAddress)\n dataSocket.listen(1)#listen to a maximum number of queued connections of 1\n print(\"Data socket server on %s:%s is listening now.\" % serverAddress )\n connection, clientAddress = dataSocket.accept()\n print(\"Connection established with client with IP: %s:%s\" % clientAddress)\nexcept:\n pass\n\ncommissioningFileName = \"commissionning.dat\"\nanchors = Anchors(commissioningFileName)\n\np = figure(plot_width=800, plot_height=800)\nr_anchors = p.scatter([x*0.6096 for x in anchors.listOfX], [y*0.6096 for y in anchors.listOfY], size=10, color=\"black\", alpha=0.6)\np.xaxis.axis_label = \"X(meter)\"\np.yaxis.axis_label = \"Y(meter)\"\nr = p.scatter([], [], size=6, color=\"firebrick\", alpha=0.6)\nds = r.data_source\n\ncurdoc().add_root(p)\ncurdoc().title = \"Visualization of positioning results\"\n# Add a periodic callback to be run every 500 milliseconds\ncurdoc().add_periodic_callback(update, 500)\n"
},
{
"alpha_fraction": 0.6425501704216003,
"alphanum_fraction": 0.6461318135261536,
"avg_line_length": 32.238094329833984,
"blob_id": "5f66bc672b04c0bce78447d5a23923bc5c82f2c0",
"content_id": "d193fc5342aa15d198a766b99bc490ce41199079",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1396,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 42,
"path": "/anchors.py",
"repo_name": "tejastank/rssi_positioning",
"src_encoding": "UTF-8",
"text": "class Anchors(object):\n\n\tdef __init__(self, filename):\n\t\tself.listOfFullIDsFromeFile = list()\n\t\tself.listOfX = list()\n\t\tself.listOfY = list()\n\t\tself.listOfZ = list()\n\t\twith open(filename, 'r') as f:\n\t\t\tfor line in f:\n\t\t\t\tif line[0] != \"#\":\n\t\t\t\t\tsegments = [x.strip() for x in line.split(',')]\n\t\t\t\t\tself.listOfFullIDsFromeFile.append(segments[1])\n\t\t\t\t\tself.listOfX.append( float(segments[2]) )\n\t\t\t\t\tself.listOfY.append( float(segments[3]) )\n\t\t\t\t\tself.listOfZ.append( float(segments[4]) )\n\n\tdef print_list_of_beacon_ids(self):\n\t\tprint(\"List of anchors with fullID (UUID-major-minor): \")\n\t\tfor index, val in enumerate(self.listOfFullIDsFromeFile):\n\t\t\tprint(\"# \" + str(index) + \": \" + val)\n\n\tdef print_list_of_beacon_x(self):\n\t\tprint(\"List of anchors' x position: \")\n\t\tfor index, val in enumerate(self.listOfX):\n\t\t\tprint(\"# \" + str(index) + \": \" + str(val))\n\n\tdef print_list_of_beacon_y(self):\n\t\tprint(\"List of anchors' y position: \")\n\t\tfor index, val in enumerate(self.listOfY):\n\t\t\tprint(\"# \" + str(index) + \": \" + str(val))\n\n\tdef print_list_of_beacon_z(self):\n\t\tprint(\"List of anchors' z position: \")\n\t\tfor index, val in enumerate(self.listOfZ):\n\t\t\tprint(\"# \" + str(index) + \": \" + str(val))\n\n\tdef show_debug(self):\n\t\tprint(\"Debug info for the commissioned anchors:\")\n\t\tself.print_list_of_beacon_ids()\n\t\tself.print_list_of_beacon_x()\n\t\tself.print_list_of_beacon_y()\n\t\tself.print_list_of_beacon_z()\n"
}
] | 6 |
zxfskr/consul_test
|
https://github.com/zxfskr/consul_test
|
731a98e9c9ccdb3f111ad3999ea21f7d76321edb
|
71e28eb15354ddce2f91e5b3300c91711f6de5e3
|
5ce9003dac157f325536295922b98ab12bcaad76
|
refs/heads/master
| 2023-05-31T20:58:45.066660 | 2019-08-23T09:47:12 | 2019-08-23T09:47:12 | 203,754,422 | 0 | 0 | null | 2019-08-22T08:54:53 | 2019-08-23T09:47:15 | 2023-05-01T20:36:11 |
Python
|
[
{
"alpha_fraction": 0.4614611268043518,
"alphanum_fraction": 0.4731903374195099,
"avg_line_length": 25.175437927246094,
"blob_id": "0264c190daaee236059f64e02c73085cf650f37b",
"content_id": "01b8bcb116a5cb3ca416d9b32e250077cb616960",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3028,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 114,
"path": "/flask_test.py",
"repo_name": "zxfskr/consul_test",
"src_encoding": "UTF-8",
"text": "from flask import Flask, Response, request\nimport json\nimport consul\nimport socket\nimport multiprocessing\nfrom multiprocessing import Process, Queue, pool\nimport random\nimport time\n\napp = Flask(__name__)\nc = consul.Consul()\nq1 = Queue()\n\n\ndef long_time_task(ip, q):\n while True:\n # q.get()\n print(\"long time \", q.get())\n # print(ip, q.get())\n index, data = c.kv.get(ip+\"_status\")\n if data:\n if data[\"Value\"].decode() == \"idle\":\n c.kv.put(ip+\"_status\", \"busy\")\n print(\"do working...\")\n time.sleep(20)\n print(\"working finish.\")\n c.kv.put(ip+\"_status\", \"idle\")\n else:\n print(\"agent busy\")\n time.sleep(5)\n else:\n print(\"new start.\")\n c.kv.put(ip+\"_status\", \"idle\")\n\n\ndef get_host_ip():\n try:\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n s.connect(('8.8.8.8', 80))\n ip = s.getsockname()[0]\n finally:\n s.close()\n return ip\n\n\[email protected](\"/\", methods=[\"GET\"])\ndef flask_test():\n test = 0\n test = request.values.get('test')\n str = \"\"\n ret = 0\n if test:\n index, data = c.kv.get(ip+\"_status\")\n if data:\n if data[\"Value\"].decode() == \"idle\":\n # c.kv.put(ip+\"_status\", \"idle\")\n q1.put(\"task\")\n str = \"task start\"\n ret = 0\n else:\n print(\"agent busy\")\n str = \"agent busy\"\n ret = -1\n else:\n str = \"agent starting...\"\n c.kv.put(ip+\"_status\", \"idle\")\n q1.put(\"task\")\n ret = 0\n\n return Response(json.dumps({\"ret\": ret, \"msg\": str}), mimetype='text/json')\n else:\n index, data = c.kv.get(ip+\"_status\")\n if data:\n if data[\"Value\"].decode() == \"idle\":\n str = \"agent idle\"\n ret = 0\n else:\n print(\"agent busy\")\n str = \"agent busy\"\n ret = -1\n else:\n str = \"agent starting...\"\n c.kv.put(ip+\"_status\", \"idle\")\n ret = 0\n\n return Response(json.dumps({\"ret\": ret, \"msg\": str}), mimetype='text/json')\n\n\ndef main():\n ip = get_host_ip()\n index, data = c.kv.get(ip+\"_status\")\n if not data:\n print(\"agent starting...\")\n c.kv.put(ip+\"_status\", \"idle\")\n\n p1 = Process(target=long_time_task, args=(ip, q1,))\n p1.start() # 启动进程\n # print(q1.get()) # 从队列中取出一个项目,并打印\n # p1.join() # 阻塞进程\n # print(q.get())\n # logging.config.fileConfig(config.LOG_CONFIG_FILE)\n app.run(host='0.0.0.0', port=10000, debug=False, use_reloader=False)\n print(\"Sub-process(es) done.\")\n\n\nif __name__ == '__main__':\n try:\n ip = get_host_ip()\n main()\n except Exception as e:\n print(str(e))\n finally:\n print(\"del\")\n c.kv.delete(ip+\"_status\")\n"
},
{
"alpha_fraction": 0.5402635335922241,
"alphanum_fraction": 0.5529526472091675,
"avg_line_length": 27.068492889404297,
"blob_id": "36590c7ebf308751c416c34a907f1beb95172c60",
"content_id": "e5147d50a4bd6f78b0be30351b854f9b696e1360",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2049,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 73,
"path": "/consul_main.py",
"repo_name": "zxfskr/consul_test",
"src_encoding": "UTF-8",
"text": "import time\nimport requests\nimport random\nimport consul\n\nCheck = consul.Check\n\n\ndef check_node_status(node):\n ret = 0\n for check in node[\"Checks\"]:\n if check[\"Status\"] == \"passing\":\n print(check[\"CheckID\"], \" check passing.\")\n else:\n print(check[\"CheckID\"], \" check error.\")\n ret -= 1\n return ret\n\n\ndef check_node_resource(node):\n node_name = node[\"Node\"][\"Node\"]\n addr = node[\"Service\"][\"Address\"]\n port = node[\"Service\"][\"Port\"]\n url = \"http://\" + addr + \":\" + str(port)\n session = requests.Session()\n response = session.get(url, timeout=5)\n print(response.json()[\"ret\"])\n if response.json()[\"ret\"] == 0:\n return {\"ret\": 0, \"node_name\": node_name, \"url\": url}\n else:\n print(response.json()[\"msg\"])\n return {\"ret\": -1, \"node_name\": node_name, \"url\": url}\n\n\ndef get_idle_node(c, service_name):\n node_dict = {\"busy\": [], \"idle\": []}\n index, nodes = c.health.service(service_name)\n # node_list = [node for node in nodes]\n print(nodes)\n\n for node in nodes:\n # print(check_node_status(node))\n if (check_node_status(node) == 0):\n r = check_node_resource(node)\n if (r[\"ret\"] == 0):\n node_dict[\"idle\"].append(r[\"url\"])\n else:\n node_dict[\"busy\"].append(r['url'])\n leng = len(node_dict[\"idle\"])\n if leng > 0:\n return node_dict[\"idle\"][random.randint(0, leng-1)]\n else:\n print(\"resources are exhausted\")\n return False\n\n\nconsul_host = \"172.16.81.1\"\nconsul_port = 9000\nservice_name = \"foo\"\nc = consul.Consul(host=consul_host, port=consul_port)\n\nwhile True:\n node_url = get_idle_node(c, service_name)\n if (node_url):\n print(node_url)\n session = requests.Session()\n response = session.get(node_url + \"/?test=test\", timeout=5)\n print(response.json()[\"ret\"])\n if response.json()[\"ret\"] == 0:\n print(\"task start\")\n else:\n print(response.json()[\"msg\"])\n time.sleep(5)\n"
},
{
"alpha_fraction": 0.6470476388931274,
"alphanum_fraction": 0.6884374618530273,
"avg_line_length": 15.256173133850098,
"blob_id": "50e94007472916915cc7d5b7905185a19557b270",
"content_id": "67bd7e96f83bf6ed4a3be284f6cf2ed4ff2fb273",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 6677,
"license_type": "no_license",
"max_line_length": 129,
"num_lines": 324,
"path": "/README.md",
"repo_name": "zxfskr/consul_test",
"src_encoding": "UTF-8",
"text": "# consul_test\n\n```\nconsul_test/\n├── consul.ctmpl # 修改consul-template配置\n├── consul_deregister_test.py # 删除服务foo\n├── consul_main.py # 测试服务是否正常\n├── consul_register_test.py # 注册服务foo\n├── flask_test.py # foo服务程序\n├── README.md\n└── requirements.txt\n```\n\n## 一、确认软件安装\n\n0. 更新软件库\n```\nsudo apt update\n```\n\n1. 安装docker\n```\nwget -qO- https://get.docker.com/ | sh\n```\n\n2. 安装consul\n```\nsudo docker pull consul:latest\n```\n\n3. python使用python3.5\n```\npython3 --version\nPython 3.5.3\n```\n\n4. 安装pip3\n\n```\nsudo apt-get install python3-pip\n```\n\n5. 更新pip3\n\n```\nsudo pip3 install --upgrade pip\n```\n\n## 二、安装和使用虚拟环境\n\n- 安装虚拟环境管理工具\n\n```\nsudo pip3 install virtualenv\n```\n\n- 安装虚拟环境管理扩展包\n\n```\nsudo pip3 install virtualenvwrapper \n```\n\n- 编辑家目录下面的.bashrc文件,添加下面几行。\n\n```\nif [ -f /usr/local/bin/virtualenvwrapper.sh ]; then\n export WORKON_HOME=$HOME/.virtualenvs\n export VIRTUALENVWRAPPER_PYTHON=/usr/bin/python3\n source /usr/local/bin/virtualenvwrapper.sh\nfi\n```\n\n- 使用以下命令使配置立即生效\n\n```\nsource ~/.bashrc\n```\n\n- 创建虚拟环境命令(**需要连网**):\n\n```\n# 创建python2虚拟环境:\nmkvirtualenv 虚拟环境名\n\n# 创建python3虚拟环境:\nmkvirtualenv --python=/usr/bin/python3 consul_test\n```\n\n- 进入虚拟环境工作:\n\n```\nworkon 虚拟环境名\n```\n\n- 查看机器上有哪些虚拟环境:\n\n```\nworkon\n```\n\n- 退出虚拟环境:\n\n```\n# 使失效\ndeactivate \n```\n\n- 删除虚拟环境:\n\n```\nrmvirtualenv 虚拟环境名\n```\n\n## 三、Git安装与配置\n\n1. 安装git\n\n```\nsudo apt-get install git\n```\n\n2. 设置git用户名和邮箱\n\n```\ngit config --global user.name \"Your Name\"\ngit config --global user.email \"[email protected]\"\n```\n\n3. 生成ssh公私钥对\n\n```\nssh-keygen -t rsa -C [email protected]\n```\n\n4. 将公钥id_rsa.pub内容拷贝一份到GitLab设置中的SSH Keys中\n\n## 四、部署步骤\n\n1. 在家目录下创建workspace文件夹\n\n```\nmkdir ~/workspace\ncd ~/workspace\n```\n\n2. 克隆项目\n\n```\ngit clone [email protected]/zxxxf/consul_test.git\n```\n\n3. 进入虚拟环境,根据不同的模块,安装相应的依赖包\n\n```bash\ncd consul_test\nworkon consul_test\npip install -r requirements.txt\n```\n\n## 五、平台运行\n\n- 以3台机器为例,host分别为\n\n```\n172.16.81.1 node1\n172.16.81.130 node2\n172.16.81.129 node3\n```\n\n- 启动docker中的consul,推荐集群中有3到5个server,首先启动第一个server,设置为leader\n```\nsudo docker run -d --name=node1 --restart=always --net=host \\\n-e 'CONSUL_LOCAL_CONFIG={\"skip_leave_on_interrupt\": true}' \\\nconsul agent -server -bind=172.16.81.1 \\\n-bootstrap -node=node1 \\\n-data-dir=/tmp/data-dir -client 0.0.0.0 -ui\n```\ndocker run参数:\n```\n-d\n 后台运行\n--name=node1\n 设置容器名\n--restart=always\n 容器停止时,自动重启容器\n--net=host\n 容器使用实体机ip\n-e \n设置环境变量'CONSUL_LOCAL_CONFIG={\"skip_leave_on_interrupt\": true}'设置节点退出时不发送leave信号,保留raft peers list each.\n```\n\nconsul agent 参数\n```\n-server\n 以server模式启动\n-bind=172.16.81.1\n 绑定本机ip\n-bootstrap\n 不执行选举,直接本机启动时为leader\n-retry-join=172.16.81.1\n 加入一个集群\n-retry-interval=30s \n join失败时重连间隔\n-retry-max=0\n join失败时重连次数,0为不限次数。\n-node=node1\n 设置node_name\n-data-dir=/tmp/data-dir\n 数据存储目录\n-client 0.0.0.0 -ui\n web ui绑定本机ip\n```\n\n- 启动第二个consul,加入集群\n\n```\nsudo docker run -d --name=node2 --restart=always --net=host \\\n-e 'CONSUL_LOCAL_CONFIG={\"skip_leave_on_interrupt\": true}' \\\nconsul agent -server -bind=172.16.81.131 \\\n-retry-join=172.16.81.1 -retry-interval=30s -retry-max=0 \\\n-node=node2 \\\n-data-dir=/tmp/data-dir -client 0.0.0.0 -ui\n```\n\n- 启动第三个consul,加入集群\n\n```\nsudo docker run -d --name=node3 --restart=always --net=host \\\n-e 'CONSUL_LOCAL_CONFIG={\"skip_leave_on_interrupt\": true}' \\\nconsul agent -server -bind=172.16.81.129 \\\n-retry-join=172.16.81.131 -retry-interval=30s -retry-max=0 \\\n-node=node3 -client 0.0.0.0 -ui\n```\n\n- 访问任意一个节点的web ui,可以看到正常运行的三个节点\n\n\n\n- 使用模块python-consul通过api进行服务注册。在每个server上启动测试服务并注册。\n\n```\n#注册服务\npython consul_register_test.py\n#启动测试服务\npython flask_test.py\n```\n\nregister api:\n```\nc = consul.Consul(host=consul_host, port=consul_port)\nservice_id=\"foo\"+consul_host\naddr = \"http://{0}:{1}\".format(consul_host, '10000')\nc.agent.service.register(\n \"foo\",\n service_id=service_id,\n address=consul_host,\n port=10000,\n # ttl=\"10s\"\n check=Check.http(addr, \"10s\", deregister=\"90ms\"),\n)\n\n```\n\nderegister api:\n```\nc = consul.Consul(host=consul_host, port=consul_port)\nc.agent.service.deregister(service_id)\n```\n\n- 打开web ui查看已经注册的服务状况\n\n\n\n- 选取一个节点作为客户端,以node1为例安装nginx和consul-template\n\n```\n#下载最新版本:\nwget http://nginx.org/download/nginx-1.13.6.tar.gz\n#解压:\ntar -zxvf nginx-1.13.6.tar.gz\n#进入解压目录:\ncd nginx-1.13.6\n#配置:\n./configure --prefix=/usr/local/nginx \n#编译:\nmake\n#安装:\nsudo make install\n#修改配置文件\ncd /usr/local/nginx/conf\n#在nginx.conf的http模块添加以下语句\ninclude conf.d/*.conf;\n#新建consul测试配置文件夹\nsudo mkdir conf.d/\nsudo touch conf.d/test.conf\n#启动:\nsudo /usr/local/nginx/sbin/nginx -c /usr/local/nginx/conf/nginx.conf\n注意:-c 指定配置文件的路径,不加的话,nginx会自动加载默认路径的配置文件,可以通过-h查看帮助命令。\n#查看进程:\nps -ef | grep nginx\n\n#下载consul-template\n\nwget https://releases.hashicorp.com/consul-template/0.20.0/consul-template_0.20.0_linux_amd64.zip\n\nunzip consul-template_0.20.0_linux_amd64.zip\n\nmv consul-template /usr/local/bin/\n\n# 启动consul-template\ncd ~/workspace/consul_test\nsudo consul-template -template \"./consul.ctmpl:/usr/local/nginx/conf/conf.d/test.conf:sudo /usr/local/nginx/sbin/nginx -s reload\"\n```\n\n- 测试操作\n\n此时可以访问172.16.81.1:9000访问web ui\n\n\n\n测试服务是否正常执行,此程序会向服务发送任务。\n```\npython consul_main.py\n```\n"
},
{
"alpha_fraction": 0.59300696849823,
"alphanum_fraction": 0.6335664391517639,
"avg_line_length": 18.86111068725586,
"blob_id": "77f814b69ef96491af886e3ac76d6adfb5155ec0",
"content_id": "cedd33b592f9006fdd2111b64e1462410109d0b7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 715,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 36,
"path": "/consul_register_test.py",
"repo_name": "zxfskr/consul_test",
"src_encoding": "UTF-8",
"text": "import time\nimport requests\nimport random\nimport socket\n# import pytest\nimport consul\n# import consul.std\n\nCheck = consul.Check\n\n\ndef get_host_ip():\n try:\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n s.connect(('8.8.8.8', 80))\n ip = s.getsockname()[0]\n finally:\n s.close()\n return ip\n\n\nconsul_host = get_host_ip()\nconsul_port = 8500\nservice_name = \"foo\"\n# c = consul.Consul()\nc = consul.Consul(host=consul_host, port=consul_port)\n\naddr = \"http://{0}:{1}\".format(consul_host, '10000')\nc.agent.service.register(\n \"foo\",\n service_id=\"foo\"+consul_host,\n address=consul_host,\n port=10000,\n # ttl=\"10s\"\n check=Check.http(addr, \"10s\", deregister=\"90ms\"),\n)\n"
},
{
"alpha_fraction": 0.6374045610427856,
"alphanum_fraction": 0.6583969593048096,
"avg_line_length": 17.714284896850586,
"blob_id": "8db6fd93917a7687060070f1f9b71f4f57cc922a",
"content_id": "c28c9d422b18fdc6121007b2a954f495a9cc0205",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 524,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 28,
"path": "/consul_deregister_test.py",
"repo_name": "zxfskr/consul_test",
"src_encoding": "UTF-8",
"text": "import time\nimport requests\nimport random\nimport socket\n\n# import pytest\nimport consul\n# import consul.std\n\nCheck = consul.Check\n\n\ndef get_host_ip():\n try:\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n s.connect(('8.8.8.8', 80))\n ip = s.getsockname()[0]\n finally:\n s.close()\n return ip\n\n\nconsul_host = get_host_ip()\nconsul_port = 8500\nservice_name = \"foo\"\n# c = consul.Consul()\nc = consul.Consul(host=consul_host, port=consul_port)\nc.agent.service.deregister(\"foo\"+consul_host)\n"
}
] | 5 |
hosamn/FB-data-dump
|
https://github.com/hosamn/FB-data-dump
|
39b1e566a10ddc04349e1a23cdd7e3babf3b70e0
|
03edad77d910c7e4a1d98d73a8cd59e7b634e7d9
|
2a7611f942a7d0a9af536f7dd426ad4384c4e3a1
|
refs/heads/master
| 2022-11-22T05:27:16.668777 | 2020-07-19T08:30:21 | 2020-07-19T08:30:21 | 280,821,768 | 1 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7948718070983887,
"alphanum_fraction": 0.7948718070983887,
"avg_line_length": 38,
"blob_id": "2d0cc87e0f99f8a7951025c67729d79a33d5ddf7",
"content_id": "54c9b39335ffdfbebbe46988bdddb1169b646f83",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 78,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 2,
"path": "/README.md",
"repo_name": "hosamn/FB-data-dump",
"src_encoding": "UTF-8",
"text": "# FB-data-dump\nFacebook puplic post dumper utilizing \"facebook-scraper @PyPI\"\n"
},
{
"alpha_fraction": 0.6791510581970215,
"alphanum_fraction": 0.6828963756561279,
"avg_line_length": 28.66666603088379,
"blob_id": "c186271440358dbe0b6522173e5843ac584ade4d",
"content_id": "fa0e0372d5f05bfa52082cfce729794d63be13f0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 801,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 27,
"path": "/FB-Dump.py",
"repo_name": "hosamn/FB-data-dump",
"src_encoding": "UTF-8",
"text": "from os import chdir, path\nimport subprocess\nimport sys\n\ndef install(package):\n subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", package])\n \ntry :\n from facebook_scraper import get_posts\nexcept:\n install(facebook_scraper)\n from facebook_scraper import get_posts\n\n\npage_id = input('# Input Facebook Page ID, Or press Enter for \"mwrifb\" :')\nif not len(page_id): page_id = 'mwrifb'\n\nnum_pages = input('# Input Number of Pages to get, Or press Enter for 1 :')\nif not len(num_pages): num_pages = 1\n\nprint('\\n# Getting',num_pages,'page(s) from http://www.facebook.com/'+page_id+'\\n')\n\nchdir(path.dirname(path.abspath(__file__)))\n\nfor post in get_posts(page_id, pages=num_pages):\n with open('FBDataDump.txt', 'a', encoding='utf8') as myfile:\n myfile.write(repr(post))\n"
}
] | 2 |
imcj/django-oauthost
|
https://github.com/imcj/django-oauthost
|
94db1be28a33aedd6d6f472c7a87c488d9e5db95
|
1a00f09db9a6782b2fc18270b1a21599e9506d42
|
c2cc576a027e4d31275bd3d5585e526190772cbd
|
refs/heads/master
| 2018-05-30T13:27:19.503615 | 2013-02-27T14:23:29 | 2013-02-27T14:23:29 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5911352038383484,
"alphanum_fraction": 0.6076798439025879,
"avg_line_length": 41.295509338378906,
"blob_id": "60cebcdbc0d0cc96b98dabd15f94f0e0b3030648",
"content_id": "6bad9d5fb26406377e946994ea478a3e46f75cd7",
"detected_licenses": [
"BSD-2-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 17891,
"license_type": "permissive",
"max_line_length": 146,
"num_lines": 423,
"path": "/oauthost/tests.py",
"repo_name": "imcj/django-oauthost",
"src_encoding": "UTF-8",
"text": "from django.test import TestCase\nfrom django.utils import simplejson as json\nfrom django.test.client import Client as TestClient\nfrom django.contrib.auth.models import User\nfrom django.conf import settings\n\nfrom oauthost.models import AuthorizationCode, Token, Client, RedirectionEndpoint\n\n\nURL_TOKEN = '/token/'\nURL_AUTHORIZE = '/auth/'\n\n\nclass OAuthostCLient(TestClient):\n\n def post(self, path, data={}, **extra):\n response = super(OAuthostCLient, self).post(path, data=data, **extra)\n if path == URL_TOKEN:\n response.content_json = json.loads(response.content)\n return response\n\n\ndef parse_location_header(response, use_uri_fragment=False):\n delimiter = '?'\n if use_uri_fragment:\n delimiter = '#'\n query = response['Location'].split(delimiter)[1]\n query = query.split('&')\n parsed = {}\n for part in query:\n key, value = part.split('=')\n parsed[key] = value\n return parsed\n\n\nclass EndpointTokenCheck(TestCase):\n\n client_class = OAuthostCLient\n\n def test_grant_authorization_code(self):\n\n # Secure connection check\n settings.DEBUG = False\n resp = self.client.get(URL_TOKEN, {})\n self.assertEqual(resp.status_code, 403)\n settings.DEBUG = True\n\n resp = self.client.post(URL_TOKEN, {'grant_type': 'a'})\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.content_json['error'], 'unsupported_grant_type')\n\n user_1 = User(username='Fred')\n user_1.set_password('12345')\n user_1.save()\n\n client_1 = Client(user=user_1, title='OClient')\n client_1.save()\n\n redirect_1 = RedirectionEndpoint(client=client_1, uri='http://redirect-test.com')\n redirect_1.save()\n\n code_1 = AuthorizationCode(user=user_1, client=client_1, uri=redirect_1.uri)\n code_1.save()\n\n # Missing client authentication data.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'authorization_code'})\n self.assertEqual(resp.status_code, 401)\n self.assertEqual(resp.content_json['error'], 'invalid_client')\n\n # Missing all required params.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'authorization_code', 'client_id': client_1.identifier,\n 'client_secret': client_1.password})\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.content_json['error'], 'invalid_request')\n\n # Missing redirect URI.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'authorization_code', 'code': 'wrong_code',\n 'client_id': client_1.identifier, 'client_secret': client_1.password})\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.content_json['error'], 'invalid_request')\n\n # Missing code.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'authorization_code', 'redirect_uri': 'http://wrong-url.com',\n 'client_id': client_1.identifier, 'client_secret': client_1.password})\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.content_json['error'], 'invalid_request')\n\n # Wrong code.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'authorization_code', 'code': 'invalid',\n 'redirect_uri': 'http://localhost:8000/abc/',\n 'client_id': client_1.identifier, 'client_secret': client_1.password})\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.content_json['error'], 'invalid_grant')\n\n # Wrong URI.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'authorization_code', 'code': code_1.code,\n 'redirect_uri': 'http://wrong-url.com/', 'client_id': client_1.identifier,\n 'client_secret': client_1.password})\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.content_json['error'], 'invalid_grant')\n\n # Valid call for a token.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'authorization_code', 'code': code_1.code,\n 'redirect_uri': redirect_1.uri, 'client_id': client_1.identifier,\n 'client_secret': client_1.password})\n self.assertEqual(resp.status_code, 200)\n self.assertTrue('access_token' in resp.content_json)\n self.assertTrue('refresh_token' in resp.content_json)\n self.assertTrue('token_type' in resp.content_json)\n\n # An additional call for code issues token and code invalidation.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'authorization_code', 'code': '1234567',\n 'redirect_uri': 'http://localhost:8000/abc/',\n 'client_id': client_1.identifier, 'client_secret': client_1.password})\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.content_json['error'], 'invalid_grant')\n\n\nclass EndpointAuthorizeCheck(TestCase):\n\n client_class = OAuthostCLient\n\n def test_auth(self):\n\n # User is not logged in.\n resp = self.client.get(URL_AUTHORIZE, {'client_id': '100'})\n self.assertEqual(resp.status_code, 302)\n\n user_1 = User(username='Fred')\n user_1.set_password('12345')\n user_1.save()\n\n # Logging the user in.\n self.client.login(username='Fred', password='12345')\n\n # Secure connection check\n settings.DEBUG = False\n resp = self.client.get(URL_AUTHORIZE, {})\n self.assertEqual(resp.status_code, 403)\n settings.DEBUG = True\n\n # Missing client id.\n resp = self.client.get(URL_AUTHORIZE, {'response_type': 'code'})\n self.assertEqual(resp.status_code, 400)\n\n # Missing response type.\n resp = self.client.get(URL_AUTHORIZE, {'client_id': '100'})\n self.assertEqual(resp.status_code, 400)\n\n # Wrong response type\n resp = self.client.get(URL_AUTHORIZE, {'response_type': 'habrahabr'})\n self.assertEqual(resp.status_code, 400)\n\n # Invalid client id.\n resp = self.client.get(URL_AUTHORIZE, {'response_type': 'code', 'client_id': 'invalid'})\n self.assertEqual(resp.status_code, 400)\n\n client_1 = Client(user=user_1, title='OClient', identifier='cl012345')\n client_1.save()\n\n client_2 = Client(user=user_1, title='OGOClient')\n client_2.save()\n\n redirect_1 = RedirectionEndpoint(client=client_1, uri='http://redirect-test.com')\n redirect_1.save()\n\n redirect_2 = RedirectionEndpoint(client=client_2, uri='http://redirect-test1.com')\n redirect_2.save()\n\n redirect_3 = RedirectionEndpoint(client=client_2, uri='http://redirect-test2.com')\n redirect_3.save()\n\n # Client 2 - No redirect URI in request.\n resp = self.client.get(URL_AUTHORIZE, {'response_type': 'code', 'client_id': client_2.identifier})\n self.assertEqual(resp.status_code, 400)\n\n # Client 2 - Unknown URI in request.\n resp = self.client.get(URL_AUTHORIZE, {'response_type': 'code', 'redirect_uri': 'http://noitisnot.com', 'client_id': client_2.identifier})\n self.assertEqual(resp.status_code, 400)\n\n # Valid code request.\n resp = self.client.get(URL_AUTHORIZE, {'response_type': 'code', 'client_id': client_1.identifier})\n self.assertEqual(resp.status_code, 200)\n\n # User declines auth.\n resp = self.client.post(URL_AUTHORIZE, {'auth_decision': 'is_made'})\n self.assertEqual(resp.status_code, 302)\n self.assertEqual(parse_location_header(resp)['error'], 'access_denied')\n\n # Again Valid code request.\n resp = self.client.get(URL_AUTHORIZE, {'response_type': 'code', 'client_id': client_1.identifier})\n self.assertEqual(resp.status_code, 200)\n\n # User confirms auth.\n resp = self.client.post(URL_AUTHORIZE, {'auth_decision': 'is_made', 'confirmed': 'yes'})\n self.assertEqual(resp.status_code, 302)\n self.assertIn('code', parse_location_header(resp))\n\n # ============= Implicit grant tests.\n\n # Valid token request.\n resp = self.client.get(URL_AUTHORIZE, {'response_type': 'token', 'client_id': client_1.identifier})\n self.assertEqual(resp.status_code, 200)\n\n # User confirms token grant.\n resp = self.client.post(URL_AUTHORIZE, {'auth_decision': 'is_made', 'confirmed': 'yes'})\n self.assertEqual(resp.status_code, 302)\n params = parse_location_header(resp, True)\n self.assertIn('access_token', params)\n self.assertIn('token_type', params)\n\n\nclass GrantsCheck(TestCase):\n\n client_class = OAuthostCLient\n\n def test_authorization_code_unsafe(self):\n\n user_1 = User(username='Fred')\n user_1.set_password('12345')\n user_1.save()\n\n client_1 = Client(user=user_1, title='OClient')\n client_1.save()\n\n redirect_1 = RedirectionEndpoint(client=client_1, uri='http://redirect-test.com')\n redirect_1.save()\n\n # Logging the user in.\n self.client.login(username='Fred', password='12345')\n\n # Valid code request.\n resp = self.client.get(URL_AUTHORIZE, {'response_type': 'code', 'client_id': client_1.identifier})\n self.assertEqual(resp.status_code, 200)\n\n # User confirms auth.\n resp = self.client.post(URL_AUTHORIZE, {'auth_decision': 'is_made', 'confirmed': 'yes'})\n self.assertEqual(resp.status_code, 302)\n params = parse_location_header(resp)\n self.assertIn('code', params)\n\n # Auth code given.\n code = params['code']\n\n # Valid token by code request.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'authorization_code', 'code': code,\n 'redirect_uri': redirect_1.uri,\n 'client_id': client_1.identifier,\n 'client_secret': client_1.password})\n\n self.assertEqual(resp.status_code, 200)\n self.assertTrue('access_token' in resp.content_json)\n self.assertTrue('refresh_token' in resp.content_json)\n self.assertTrue('token_type' in resp.content_json)\n\n def test_authorization_code_http_basic(self):\n\n user_1 = User(username='Fred')\n user_1.set_password('12345')\n user_1.save()\n\n client_1 = Client(user=user_1, title='OClient', identifier='OClient', password='cl012345')\n client_1.save()\n\n redirect_1 = RedirectionEndpoint(client=client_1, uri='http://redirect-test.com')\n redirect_1.save()\n\n # Logging the user in.\n self.client.login(username='Fred', password='12345')\n\n # Valid code request.\n resp = self.client.get(URL_AUTHORIZE, {'response_type': 'code', 'client_id': client_1.identifier})\n self.assertEqual(resp.status_code, 200)\n\n # User confirms auth.\n resp = self.client.post(URL_AUTHORIZE, {'auth_decision': 'is_made', 'confirmed': 'yes'})\n self.assertEqual(resp.status_code, 302)\n params = parse_location_header(resp)\n self.assertIn('code', params)\n\n # Auth code given.\n code = params['code']\n\n # Invalid token by code request.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'authorization_code', 'code': code,\n 'redirect_uri': redirect_1.uri},\n Authorization='Basic Tqrqwer==')\n self.assertEqual(resp.status_code, 401)\n self.assertIn('www-authenticate', resp._headers)\n self.assertEqual(resp._headers['www-authenticate'][1], 'Basic')\n\n # Valid token by code request.\n # HTTP Basic data - OClient:cl012345 --> T0NsaWVudDpjbDAxMjM0NQ==\n resp = self.client.post(URL_TOKEN, {'grant_type': 'authorization_code', 'code': code,\n 'redirect_uri': redirect_1.uri},\n Authorization='Basic T0NsaWVudDpjbDAxMjM0NQ==')\n self.assertEqual(resp.status_code, 200)\n self.assertTrue('access_token' in resp.content_json)\n self.assertTrue('refresh_token' in resp.content_json)\n self.assertTrue('token_type' in resp.content_json)\n\n def test_token_by_user_credentials(self):\n\n user_1 = User(username='Fred')\n user_1.set_password('12345')\n user_1.save()\n\n client_1 = Client(user=user_1, title='OClient', identifier='OClient', password='cl012345')\n client_1.save()\n\n redirect_1 = RedirectionEndpoint(client=client_1, uri='http://redirect-test.com')\n redirect_1.save()\n\n # Missing params.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'password'}, Authorization='Basic T0NsaWVudDpjbDAxMjM0NQ==')\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.content_json['error'], 'invalid_request')\n\n # Invalid params.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'password', 'username': 'FalseUser', 'password': 'FalsePassword'},\n Authorization='Basic T0NsaWVudDpjbDAxMjM0NQ==')\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.content_json['error'], 'invalid_grant')\n\n # Valid token by password request.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'password', 'username': 'Fred',\n 'password': '12345'},\n Authorization='Basic T0NsaWVudDpjbDAxMjM0NQ==')\n\n self.assertEqual(resp.status_code, 200)\n self.assertTrue('access_token' in resp.content_json)\n self.assertTrue('refresh_token' in resp.content_json)\n self.assertTrue('token_type' in resp.content_json)\n self.assertTrue('expires_in' in resp.content_json)\n\n def test_token_by_client_credentials(self):\n\n user_1 = User(username='Fred')\n user_1.set_password('12345')\n user_1.save()\n\n client_1 = Client(user=user_1, title='OClient', identifier='OClient', password='cl012345')\n client_1.save()\n\n redirect_1 = RedirectionEndpoint(client=client_1, uri='http://redirect-test.com')\n redirect_1.save()\n\n # Valid token by client credentials request.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'client_credentials'},\n Authorization='Basic T0NsaWVudDpjbDAxMjM0NQ==')\n\n self.assertEqual(resp.status_code, 200)\n self.assertTrue('access_token' in resp.content_json)\n self.assertTrue('refresh_token' not in resp.content_json)\n self.assertTrue('token_type' in resp.content_json)\n\n access_token = resp.content_json['access_token']\n token = Token.objects.get(access_token=access_token)\n self.assertEqual(user_1, token.user)\n\n def test_refresh_token_http_basic(self):\n\n user_1 = User(username='Fred')\n user_1.set_password('12345')\n user_1.save()\n\n client_1 = Client(user=user_1, title='OClient', identifier='OClient', password='cl012345')\n client_1.save()\n\n client_2 = Client(user=user_1, title='OGOClient', identifier='OGOClient', password='cl543210')\n client_2.save()\n\n redirect_1 = RedirectionEndpoint(client=client_1, uri='http://redirect-test.com')\n redirect_1.save()\n\n token_1 = Token(client=client_1, user=user_1)\n token_1.save()\n\n token_2 = Token(client=client_2, user=user_1)\n token_2.save()\n\n date_issued = token_1.date_issued\n access_token = token_1.access_token\n refresh_token = token_1.refresh_token\n\n refresh_token_wrong_client = token_2.refresh_token\n\n # Missing required params.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'refresh_token'},\n Authorization='Basic T0NsaWVudDpjbDAxMjM0NQ==')\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.content_json['error'], 'invalid_request')\n\n # Invalid refresh token supplied.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'refresh_token', 'refresh_token': 'invalid'},\n Authorization='Basic T0NsaWVudDpjbDAxMjM0NQ==')\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.content_json['error'], 'invalid_grant')\n\n # Refresh token from another client is supplied.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'refresh_token', 'refresh_token': refresh_token_wrong_client},\n Authorization='Basic T0NsaWVudDpjbDAxMjM0NQ==')\n self.assertEqual(resp.status_code, 400)\n self.assertEqual(resp.content_json['error'], 'invalid_grant')\n\n # Valid request.\n resp = self.client.post(URL_TOKEN, {'grant_type': 'refresh_token', 'refresh_token': refresh_token},\n Authorization='Basic T0NsaWVudDpjbDAxMjM0NQ==')\n\n self.assertEqual(resp.status_code, 200)\n self.assertTrue('access_token' in resp.content_json)\n self.assertTrue('refresh_token' in resp.content_json)\n self.assertTrue('token_type' in resp.content_json)\n self.assertTrue('expires_in' not in resp.content_json)\n\n self.assertNotEqual(access_token, resp.content_json['access_token'])\n self.assertNotEqual(refresh_token, resp.content_json['refresh_token'])\n\n token_updated = Token.objects.get(access_token=resp.content_json['access_token'])\n self.assertNotEqual(date_issued, token_updated.date_issued)\n\n\n# TODO Add tests for Bearer auth.\n"
},
{
"alpha_fraction": 0.5749892592430115,
"alphanum_fraction": 0.5826529264450073,
"avg_line_length": 65.80382537841797,
"blob_id": "5b9dd9d33c665872523dc66427b3356c0b132c6e",
"content_id": "56e9aa345783374c1f6ef5f96a3b55e65b1a1f27",
"detected_licenses": [
"BSD-2-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 13962,
"license_type": "permissive",
"max_line_length": 182,
"num_lines": 209,
"path": "/oauthost/migrations/0001_initial.py",
"repo_name": "imcj/django-oauthost",
"src_encoding": "UTF-8",
"text": "# encoding: utf-8\nimport datetime\nfrom south.db import db\nfrom south.v2 import SchemaMigration\nfrom django.db import models\n\nclass Migration(SchemaMigration):\n\n def forwards(self, orm):\n \n # Adding model 'Scope'\n db.create_table('oauthost_scope', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('identifier', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)),\n ('title', self.gf('django.db.models.fields.CharField')(max_length=250)),\n ))\n db.send_create_signal('oauthost', ['Scope'])\n\n # Adding model 'Client'\n db.create_table('oauthost_client', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('date_registered', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),\n ('title', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)),\n ('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),\n ('description', self.gf('django.db.models.fields.TextField')(max_length=100)),\n ('link', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),\n ('identifier', self.gf('django.db.models.fields.CharField')(unique=True, max_length=250, blank=True)),\n ('token_lifetime', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),\n ('password', self.gf('django.db.models.fields.CharField')(max_length=250, blank=True)),\n ('type', self.gf('django.db.models.fields.IntegerField')(default=1)),\n ('hash_sign_supported', self.gf('django.db.models.fields.BooleanField')(default=True)),\n ))\n db.send_create_signal('oauthost', ['Client'])\n\n # Adding M2M table for field scopes on 'Client'\n db.create_table('oauthost_client_scopes', (\n ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),\n ('client', models.ForeignKey(orm['oauthost.client'], null=False)),\n ('scope', models.ForeignKey(orm['oauthost.scope'], null=False))\n ))\n db.create_unique('oauthost_client_scopes', ['client_id', 'scope_id'])\n\n # Adding model 'RedirectionEndpoint'\n db.create_table('oauthost_redirectionendpoint', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('client', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['oauthost.Client'])),\n ('uri', self.gf('django.db.models.fields.URLField')(max_length=200)),\n ))\n db.send_create_signal('oauthost', ['RedirectionEndpoint'])\n\n # Adding model 'AuthorizationCode'\n db.create_table('oauthost_authorizationcode', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('date_issued', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),\n ('code', self.gf('django.db.models.fields.CharField')(unique=True, max_length=7)),\n ('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),\n ('client', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['oauthost.Client'])),\n ('uri', self.gf('django.db.models.fields.URLField')(max_length=200)),\n ))\n db.send_create_signal('oauthost', ['AuthorizationCode'])\n\n # Adding M2M table for field scopes on 'AuthorizationCode'\n db.create_table('oauthost_authorizationcode_scopes', (\n ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),\n ('authorizationcode', models.ForeignKey(orm['oauthost.authorizationcode'], null=False)),\n ('scope', models.ForeignKey(orm['oauthost.scope'], null=False))\n ))\n db.create_unique('oauthost_authorizationcode_scopes', ['authorizationcode_id', 'scope_id'])\n\n # Adding model 'Token'\n db.create_table('oauthost_token', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('date_issued', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),\n ('expires_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),\n ('access_token', self.gf('django.db.models.fields.CharField')(unique=True, max_length=32)),\n ('refresh_token', self.gf('django.db.models.fields.CharField')(max_length=32, unique=True, null=True, blank=True)),\n ('access_token_type', self.gf('django.db.models.fields.CharField')(default='bearer', max_length=100)),\n ('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),\n ('client', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['oauthost.Client'])),\n ('code', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['oauthost.AuthorizationCode'], null=True, blank=True)),\n ))\n db.send_create_signal('oauthost', ['Token'])\n\n # Adding M2M table for field scopes on 'Token'\n db.create_table('oauthost_token_scopes', (\n ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),\n ('token', models.ForeignKey(orm['oauthost.token'], null=False)),\n ('scope', models.ForeignKey(orm['oauthost.scope'], null=False))\n ))\n db.create_unique('oauthost_token_scopes', ['token_id', 'scope_id'])\n\n\n def backwards(self, orm):\n \n # Deleting model 'Scope'\n db.delete_table('oauthost_scope')\n\n # Deleting model 'Client'\n db.delete_table('oauthost_client')\n\n # Removing M2M table for field scopes on 'Client'\n db.delete_table('oauthost_client_scopes')\n\n # Deleting model 'RedirectionEndpoint'\n db.delete_table('oauthost_redirectionendpoint')\n\n # Deleting model 'AuthorizationCode'\n db.delete_table('oauthost_authorizationcode')\n\n # Removing M2M table for field scopes on 'AuthorizationCode'\n db.delete_table('oauthost_authorizationcode_scopes')\n\n # Deleting model 'Token'\n db.delete_table('oauthost_token')\n\n # Removing M2M table for field scopes on 'Token'\n db.delete_table('oauthost_token_scopes')\n\n\n models = {\n 'auth.group': {\n 'Meta': {'object_name': 'Group'},\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),\n 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['auth.Permission']\", 'symmetrical': 'False', 'blank': 'True'})\n },\n 'auth.permission': {\n 'Meta': {'ordering': \"('content_type__app_label', 'content_type__model', 'codename')\", 'unique_together': \"(('content_type', 'codename'),)\", 'object_name': 'Permission'},\n 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['contenttypes.ContentType']\"}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})\n },\n 'auth.user': {\n 'Meta': {'object_name': 'User'},\n 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),\n 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),\n 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),\n 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['auth.Group']\", 'symmetrical': 'False', 'blank': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),\n 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),\n 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),\n 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),\n 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['auth.Permission']\", 'symmetrical': 'False', 'blank': 'True'}),\n 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})\n },\n 'contenttypes.contenttype': {\n 'Meta': {'ordering': \"('name',)\", 'unique_together': \"(('app_label', 'model'),)\", 'object_name': 'ContentType', 'db_table': \"'django_content_type'\"},\n 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})\n },\n 'oauthost.authorizationcode': {\n 'Meta': {'object_name': 'AuthorizationCode'},\n 'client': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['oauthost.Client']\"}),\n 'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '7'}),\n 'date_issued': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'scopes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': \"orm['oauthost.Scope']\", 'null': 'True', 'blank': 'True'}),\n 'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'}),\n 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['auth.User']\"})\n },\n 'oauthost.client': {\n 'Meta': {'object_name': 'Client'},\n 'date_registered': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),\n 'description': ('django.db.models.fields.TextField', [], {'max_length': '100'}),\n 'hash_sign_supported': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'identifier': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250', 'blank': 'True'}),\n 'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),\n 'password': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),\n 'scopes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': \"orm['oauthost.Scope']\", 'null': 'True', 'blank': 'True'}),\n 'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),\n 'token_lifetime': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),\n 'type': ('django.db.models.fields.IntegerField', [], {'default': '1'}),\n 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['auth.User']\"})\n },\n 'oauthost.redirectionendpoint': {\n 'Meta': {'object_name': 'RedirectionEndpoint'},\n 'client': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['oauthost.Client']\"}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'uri': ('django.db.models.fields.URLField', [], {'max_length': '200'})\n },\n 'oauthost.scope': {\n 'Meta': {'ordering': \"['title']\", 'object_name': 'Scope'},\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'identifier': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),\n 'title': ('django.db.models.fields.CharField', [], {'max_length': '250'})\n },\n 'oauthost.token': {\n 'Meta': {'object_name': 'Token'},\n 'access_token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),\n 'access_token_type': ('django.db.models.fields.CharField', [], {'default': \"'bearer'\", 'max_length': '100'}),\n 'client': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['oauthost.Client']\"}),\n 'code': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['oauthost.AuthorizationCode']\", 'null': 'True', 'blank': 'True'}),\n 'date_issued': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),\n 'expires_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'refresh_token': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True', 'blank': 'True'}),\n 'scopes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': \"orm['oauthost.Scope']\", 'null': 'True', 'blank': 'True'}),\n 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['auth.User']\", 'null': 'True', 'blank': 'True'})\n }\n }\n\n complete_apps = ['oauthost']\n"
},
{
"alpha_fraction": 0.7102803587913513,
"alphanum_fraction": 0.7102803587913513,
"avg_line_length": 44.85714340209961,
"blob_id": "790ae40a604f91dea862ced7ae065a1003a71c66",
"content_id": "4c5c7b525d4958f4bbf7c301affb846d0fb59856",
"detected_licenses": [
"BSD-2-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 321,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 7,
"path": "/oauthost/urls.py",
"repo_name": "imcj/django-oauthost",
"src_encoding": "UTF-8",
"text": "from django.conf.urls.defaults import patterns, url\n\nurlpatterns = patterns('oauthost',\n url(r'^auth/$', 'auth_views.endpoint_authorize', name='oauthost_authorize'),\n # SPEC: The [token] endpoint URI MUST NOT include a fragment component.\n url(r'^token/$', 'auth_views.endpoint_token', name='oauthost_token'),\n)\n"
},
{
"alpha_fraction": 0.6833880543708801,
"alphanum_fraction": 0.6857321262359619,
"avg_line_length": 34.54999923706055,
"blob_id": "8235d0f15ef07a42d9431d6b0bf0141264eb9d38",
"content_id": "7e0901cb69d519a5e9161e1b204ba0e4bdae3d30",
"detected_licenses": [
"BSD-2-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6399,
"license_type": "permissive",
"max_line_length": 113,
"num_lines": 180,
"path": "/oauthost/utils.py",
"repo_name": "imcj/django-oauthost",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\nfrom django.http import HttpResponse, HttpResponseRedirect\nfrom django.utils import simplejson\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom oauthost.config import *\nfrom oauthost.models import Scope\n\n\ndef filter_input_params(input_params):\n \"\"\"Filters request parameters and returns filtered dictionary.\n\n SPEC: Parameters sent without a value MUST be treated as if they were omitted from the request.\n\n \"\"\"\n params_filtered = {}\n for key, value in input_params.items():\n if value:\n params_filtered[key] = value\n return params_filtered\n\n\ndef get_remote_ip(request):\n \"\"\"Resolves and returns client IP.\"\"\"\n\n forwarded = request.META.get('HTTP_X_FORWARDED_FOR')\n ip = request.META.get('REMOTE_ADDR')\n if forwarded is not None:\n ip = forwarded.split(',')[-1].strip()\n return ip\n\n\ndef resolve_scopes_to_apply(scopes_requested, client):\n \"\"\"Gets space delimited list of scopes from client request,\n and returns a list of scope objects, corrected according\n to auth server settings.\n\n \"\"\"\n\n if scopes_requested is not None:\n scopes_requested = scopes_requested.split(' ')\n else:\n scopes_requested = []\n\n scopes_available = []\n scopes_to_apply = []\n\n # Scopes available to the client.\n for scope in client.scopes.all():\n scopes_available.append(scope)\n\n # Scopes selection is unrestricted by the client, and we make all scopes available.\n if not scopes_available:\n scopes = Scope.objects.all()\n for scope in scopes:\n scopes_available.append(scope)\n\n # SPEC: If the issued access token scope is different from the one\n # requested by the client, the authorization server SHOULD include\n # the \"scope\" response parameter to inform the client of the actual\n # scope granted.\n\n # No scopes requested, and we are giving an access to all scopes available.\n # TODO Needs revision.\n if not scopes_requested:\n scopes_to_apply = scopes_available\n\n # Unavailable scopes are requested.\n scopes_available_set = set(s.identifier for s in scopes_available)\n if set(scopes_requested).difference(scopes_available_set):\n scopes_to_apply_ids = scopes_available_set.intersection(scopes_requested)\n if not scopes_to_apply_ids:\n # Only unavailable scopes are requested.\n # TODO Needs revision.\n scopes_to_apply = []\n else:\n scopes_to_apply = []\n for scope in scopes_available:\n if scope.identifier in scopes_to_apply_ids:\n scopes_to_apply.append(scope)\n\n return scopes_to_apply\n\n\ndef ep_auth_response_error_page(request, error_text, http_status=400):\n \"\"\"For authorization endpoint. Renders a page with error description.\"\"\"\n data_dict = {'oauthost_title': _('Error'), 'oauthost_error_text': error_text}\n return render(request, OAUTHOST_TEMPLATE_AUTHORIZE_ERROR, data_dict, status=http_status)\n\n\ndef ep_auth_build_redirect_uri(redirect_base, params, use_uri_fragment):\n \"\"\"For authorization endpoint. Builds up redirection URL.\"\"\"\n if use_uri_fragment:\n redirect_base = '%s#' % redirect_base\n else:\n # SPEC: query component MUST be retained when adding additional query parameters.\n if '?' not in redirect_base:\n redirect_base = '%s?' % redirect_base\n return '%s%s' % (redirect_base, '&'.join(['%s=%s' % (key, value) for key, value in params.items()]))\n\n\ndef ep_auth_response(redirect_base, params, use_uri_fragment):\n \"\"\"For authorization endpoint. Issues response.\"\"\"\n return HttpResponseRedirect(ep_auth_build_redirect_uri(redirect_base, params, use_uri_fragment))\n\n\ndef ep_auth_response_error(redirect_to, uri_fragment, error_type, description):\n \"\"\"For authorization endpoint. Issues error response.\"\"\"\n return ep_auth_response(redirect_to, {'error': error_type, 'error_description': description}, uri_fragment)\n\n\ndef ep_auth_clear_session_data(request):\n \"\"\"For authorization endpoint. Clears oauth data from session.\"\"\"\n del request.session['oauth_client_id']\n del request.session['oauth_response_type']\n del request.session['oauth_redirect_uri']\n del request.session['oauth_scopes_ids']\n del request.session['oauth_state']\n\n\ndef ep_token_response(params, status=200, additional_headers={}):\n \"\"\"For token endpoint. Issues JSON response.\"\"\"\n response = HttpResponse(content_type='application/json;charset=UTF-8',\n content=simplejson.dumps(params), status=status)\n response['Cache-Control'] = 'no-store'\n response['Pragma'] = 'no-cache'\n for key, value in additional_headers.items():\n response[key] = value\n return response\n\n\ndef ep_token_response_error(error_type, description, status=400, additional_headers={}):\n \"\"\"For token endpoint. Issues JSON error response.\"\"\"\n return ep_token_response({'error': error_type, 'error_description': description}, status, additional_headers)\n\n\ndef forbidden_error_response(request):\n \"\"\"Renders `forbidden` page.\"\"\"\n return render(request, OAUTHOST_TEMPLATE_FORBIDDEN, {'oauthost_title': _('Access Denied')}, status=403)\n\n\ndef auth_handler_response(request, scope=None):\n \"\"\"Checks for token data in request using various\n methods depending on token types defined in REGISTRY_TOKEN_TYPE.\n\n ``scope`` - scope identifier string to check token has access to the scope.\n\n \"\"\"\n token_auth_classes = [item[2] for item in REGISTRY_TOKEN_TYPE]\n\n for auth_class in token_auth_classes:\n handler = auth_class(request, scope)\n response = handler.response()\n if response is not None:\n return response\n\n return None\n\n\nclass PistonAuthHelper(object):\n \"\"\"Authentication class for Piston resources.\n\n To be used in a usual piston-auth-way::\n\n from piston.resource import Resource\n from oauthost.utils import PistonAuthHelper\n\n my_resource_view = Resource(MyResourceHandler, authentication=PistonAuthHelper('my_resource:my_scope'))\n\n \"\"\"\n\n def __init__(self, target_scope):\n self.target_scope = target_scope\n\n def is_authenticated(self, request):\n self.auth_response = auth_handler_response(request, scope=self.target_scope)\n return self.auth_response is None\n\n def challenge(self):\n return self.auth_response\n"
},
{
"alpha_fraction": 0.6979591846466064,
"alphanum_fraction": 0.7061224579811096,
"avg_line_length": 60.5,
"blob_id": "fe95bc08f86cac513aa3b546db9b25c6fbcb4d8d",
"content_id": "4c3eb11d50499dbb8c8f23c85753ff0fcb425b88",
"detected_licenses": [
"BSD-2-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 245,
"license_type": "permissive",
"max_line_length": 152,
"num_lines": 4,
"path": "/oauthost/templates/oauthost/restricted.html",
"repo_name": "imcj/django-oauthost",
"src_encoding": "UTF-8",
"text": "{% extends 'common/base.html' %}{% load i18n %}\n{% block oauthost_contents %}\n <p>{% blocktrans %}Access to this resource is restricted. Please provide appropriate credentials with the request to proceed.{% endblocktrans %}</p>\n{% endblock %}"
},
{
"alpha_fraction": 0.714893639087677,
"alphanum_fraction": 0.7319148778915405,
"avg_line_length": 32.64285659790039,
"blob_id": "50a5386d36324a9464877a99138f34b568f3b436",
"content_id": "a7ca2dd63f35dbfbafd25370bcf4c8771211f8ec",
"detected_licenses": [
"BSD-2-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "reStructuredText",
"length_bytes": 470,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 14,
"path": "/docs/source/references.rst",
"repo_name": "imcj/django-oauthost",
"src_encoding": "UTF-8",
"text": "Things to read\n==============\n\nOAuth 2.0 Authorization Protocol - http://tools.ietf.org/html/draft-ietf-oauth-v2\n\nOAuth 2.0 Bearer Tokens - http://tools.ietf.org/html/draft-ietf-oauth-v2-bearer\n\n\nAll different flavors:\n\n* Yandex - http://api.yandex.ru/oauth/doc/dg/concepts/About.xml\n* GitHub - http://developer.github.com/v3/oauth/\n* Google - http://code.google.com/intl/en/apis/accounts/docs/OAuth2.html\n* Facebook - http://developers.facebook.com/docs/authentication/"
},
{
"alpha_fraction": 0.6952381134033203,
"alphanum_fraction": 0.7142857313156128,
"avg_line_length": 20.91666603088379,
"blob_id": "ae891f02503ebc8e413644b40a394f1db81816aa",
"content_id": "0360f900d5676c769b5bdff375167c18fc9533d2",
"detected_licenses": [
"BSD-2-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "reStructuredText",
"length_bytes": 525,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 24,
"path": "/README.rst",
"repo_name": "imcj/django-oauthost",
"src_encoding": "UTF-8",
"text": "django-oauthost\n===============\nhttp://github.com/idlesign/django-oauthost\n\n\n*ALPHA STAGE PROJECT. NOT TO BE USED IN PRODUCTION*\n\n\nWhat's that\n-----------\n\n*django-oauthost is a reusable application for Django, introducing OAuth2 server.*\n\n\nRequirements\n------------\n\n1. Django 1.3+\n2. Auth Django contrib package\n3. South 0.7.1+ for Django (required for version upgrades)\n4. Django Admin site contrib package (for quick oauthost data manipulation).\n\n\n**Documentation**: http://readthedocs.org/docs/django-oauthost/en/latest/"
},
{
"alpha_fraction": 0.638144850730896,
"alphanum_fraction": 0.6592261791229248,
"avg_line_length": 33.767242431640625,
"blob_id": "ef93c22b83a79c090bde66cf0488c56c97d293a0",
"content_id": "4778ee13c00f6d4b5eea8d4f01b05cdcbf380346",
"detected_licenses": [
"BSD-2-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "reStructuredText",
"length_bytes": 4032,
"license_type": "permissive",
"max_line_length": 130,
"num_lines": 116,
"path": "/docs/source/quickstart.rst",
"repo_name": "imcj/django-oauthost",
"src_encoding": "UTF-8",
"text": "Quick start\n===========\n\n.. warning::\n\n OAuth 2 requires secure connections, so oauthost will check for https\n *if your project is not in debug mode*, and will refuse to function\n if check fails.\n\n\nPreparations\n------------\n\n0. Initialize DB tables for oauthost, run from command line:\n\n ``python manage.py migrate``\n\n\n1. Attach `oauthost.urls` to project `urls`.\n\n Authorization endpoint would be available at `{ BASE_URL }auth/`.\n\n Token endpoint would be available at `{ BASE_URL }token/`.\n\n2. Decorate application views which require OAuth 2 authorization with `oauth_required`::\n\n @oauth_required(scope='my_articles_app:my_summary')\n def summary(request, article_id):\n \"\"\"Scope associated with this view is `my_articles_app:my_summary`.\"\"\"\n ...\n\n @oauth_required(scope_auto=True)\n def results(request, poll_id):\n \"\"\"Scope for this view would be evaluated to `polls:results` if this view is in `polls` app.\"\"\"\n ...\n\n3. Use Django's Admin site contrib package to manipulate oauthost data (e.g. register clients).\n\n 3.1. Register *scopes* for your Django application.\n\n Scope identifiers might be, e.g.: `polls:index`, `polls:detail`, `polls:results`.\n\n .. note::\n\n You can also use ``syncscopes`` management command which automatically creates\n scopes for `oauth_required` decorated views available in application(s) which\n names are passed to the command::\n\n python manage.py syncscopes polls\n\n 3.2. Register *client* which could be granted with access to your resources.\n\n .. note::\n\n Just right there on client registration page you can set up redirection endpoints,\n register authorization codes and issue tokens. Latter two should normally be\n issued to client itself as described in paragraph no 4.\n\n\nTokens and protected resources\n------------------------------\n\n4. Access authorization and/or token endpoints (see no 1 above) from within\nthe client (registered in no 3.2) to gain credentials (namely an *access token*)\nto access protected views.\n\n 4.1. First your client needs to get an access token and there are several ways to get it.\n\n .. note::\n\n In the examples below we use client with ID 1234, which has one redirection\n endpoint (e.g. `http://myapp.com/`).\n\n 4.1.1. Grant token through authorization code.\n\n 1. Request for authorization code with GET HTTP method::\n\n {BASE_URL}auth/?client_id=1234&response_type=code\n\n 2. Grab `code` param value from URL your client is redirected to (e.g. `http://myapp.com/`).\n 3. Exchange authorization code for access token using POST HTTP method::\n\n {BASE_URL}token/ grant_type=authorization_code&code={code_from_no_2}&redirect_uri=http://myapp.com/&client_id=1234\n\n 4. Get `access_token` param value from JSON document returned by server.\n\n 4.1.2. Grant token implicitly.\n\n 1. Request for authorization code with GET HTTP method::\n\n {BASE_URL}auth/?client_id=1234&response_type=token\n\n 2. Get `access_token` param value from JSON document returned by server.\n\n 4.2. Second your client should supply token from no 4.1 (or no 3.2) to server when\n accessing any protected views of your application.\n Currently there are three ways to do it. Let's suppose our access token is 987654.\n\n 4.2.1. Recommended way is to pass token in HTTP Authorization Bearer header::\n\n GET /polls HTTP/1.1\n Host: myapp.com\n Authorization: Bearer 987654\n\n 4.2.2. You can also use POST HTTP method (`access_token` param is checked)::\n\n POST /polls HTTP/1.1\n Host: myapp.com\n Content-Type: application/x-www-form-urlencoded\n\n access_token=987654\n\n 4.2.3. Finally you can use GET HTTP method (`access_token` param is checked)::\n\n GET /polls?access_token=987654 HTTP/1.1\n Host: myapp.com"
},
{
"alpha_fraction": 0.6387064456939697,
"alphanum_fraction": 0.6467912793159485,
"avg_line_length": 48.167701721191406,
"blob_id": "cd94d48741f7fdbce910393ff2557b1f88c2baf4",
"content_id": "cf45955d2c2cb192b951ea5781c2b19c966ff08d",
"detected_licenses": [
"BSD-2-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7916,
"license_type": "permissive",
"max_line_length": 240,
"num_lines": 161,
"path": "/oauthost/models.py",
"repo_name": "imcj/django-oauthost",
"src_encoding": "UTF-8",
"text": "from uuid import uuid4\nfrom random import randrange\n\nfrom django.db import models, IntegrityError\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.contrib.auth.models import User\n\nfrom oauthost.config import *\nfrom oauthost.fields import URLSchemeField\n\n\nclass Scope(models.Model):\n\n identifier = models.CharField(_('Scope ID'), max_length=100, help_text=_('Scope identifier.'), unique=True)\n title = models.CharField(_('Scope title'), max_length=250, help_text=_('Scope humanfriendly name.'))\n\n class Meta:\n verbose_name = _('Scope')\n verbose_name_plural = _('Scopes')\n ordering = ['title']\n\n def __unicode__(self):\n return '%s' % self.identifier\n\n\nclass Client(models.Model):\n\n TYPE_CONFIDENTIAL = 1\n TYPE_PUBLIC = 2\n\n TYPE_CHOICES = (\n (TYPE_CONFIDENTIAL, _('Confidential')),\n (TYPE_PUBLIC, _('Public')),\n )\n\n date_registered = models.DateTimeField(_('Registered at'), auto_now_add=True)\n title = models.CharField(_('Title'), max_length=100, help_text=_('Name of the client application.'), unique=True)\n user = models.ForeignKey(User, verbose_name=_('User'), help_text=_('User registered this client.'))\n description = models.TextField(_('Description'), max_length=100, help_text=_('Client application description.'))\n link = models.URLField(_('URL'), help_text=_('Client application URL.'), null=True, blank=True)\n identifier = models.CharField(_('Identifier'), max_length=250, help_text=_('Not secret client identifier. <i>If empty will be generated automatically based on client title</i>.'), unique=True, blank=True)\n token_lifetime = models.IntegerField(_('Token lifetime'), help_text=_('Time in seconds after which token expires.'), null=True, blank=True)\n password = models.CharField(_('Password'), max_length=250, help_text=_('Secret that can be used with HTTP Basic authentication scheme with identifier as username.'), blank=True)\n type = models.IntegerField(_('Type'),\n help_text=_('<b>Confidential</b> — Clients capable of maintaining the confidentiality of their credentials, or capable of secure client authentication using other means.<br /> \\\n <b>Public</b> — Clients incapable of maintaining the confidentiality of their credentials, and incapable of secure client authentication via any other means'),\n choices=TYPE_CHOICES, default=TYPE_CONFIDENTIAL)\n scopes = models.ManyToManyField(Scope, verbose_name=_('Scopes'), help_text=_('The scopes client is restricted to ask for tokens. <i>All scopes are available for client if none selected.</i>'), null=True, blank=True)\n hash_sign_supported = models.BooleanField(_('Supports # in \"Location\"'), help_text=_('Should be checked if th client supports fragment component (#) in the HTTP \"Location\" response header field'), default=True)\n\n class Meta:\n verbose_name = _('Client')\n verbose_name_plural = _('Clients')\n\n def __unicode__(self):\n return '%s' % self.title\n\n def generate_indentifier(self):\n \"\"\"Identifier length: 32 chars.\"\"\"\n return str(uuid4()).replace('-', '')\n\n def save(self, force_insert=False, force_update=False, **kwargs):\n if self.identifier == '':\n while True:\n self.identifier = self.generate_indentifier()\n try:\n super(Client, self).save(force_insert, force_update, **kwargs)\n except IntegrityError:\n pass\n else:\n break\n else:\n super(Client, self).save(force_insert, force_update, **kwargs)\n\n\nclass RedirectionEndpoint(models.Model):\n\n client = models.ForeignKey(Client, verbose_name=_('Client'))\n uri = URLSchemeField(_('URI'), help_text=_('URI or URI scheme for authorization server to redirect client when an interaction with a resource owner is complete.'))\n\n class Meta:\n verbose_name = _('Redirection Endpoint')\n verbose_name_plural = _('Redirection Endpoints')\n\n def __unicode__(self):\n return '%s' % self.uri\n\n\nclass AuthorizationCode(models.Model):\n\n # A maximum authorization code lifetime of 10 minutes is RECOMMENDED\n date_issued = models.DateTimeField(_('Issued at'), auto_now_add=True)\n code = models.CharField(_('Code'), max_length=7, help_text=_('Code issued upon authorization.'), unique=True)\n user = models.ForeignKey(User, verbose_name=_('User'), help_text=_('The user authorization is granted for.'))\n client = models.ForeignKey(Client, verbose_name=_('Client'), help_text=_('The client authorization is granted for.'))\n uri = URLSchemeField(_('Redirect URI'), help_text=_('The URI authorization is bound to.'))\n scopes = models.ManyToManyField(Scope, verbose_name=_('Scopes'), help_text=_('The scopes token issued from this code should be restricted to.'), null=True, blank=True)\n\n class Meta:\n verbose_name = _('Authorization code')\n verbose_name_plural = _('Authorization codes')\n\n def __unicode__(self):\n return '%s' % self.code\n\n def generate_code(self):\n \"\"\"Code length: 7 chars.\"\"\"\n return randrange(1000000, 9999999)\n\n def save(self, force_insert=False, force_update=False, **kwargs):\n if self.code == '':\n while True:\n self.code = self.generate_code()\n try:\n super(AuthorizationCode, self).save(force_insert, force_update, **kwargs)\n except IntegrityError:\n pass\n else:\n break\n else:\n super(AuthorizationCode, self).save(force_insert, force_update, **kwargs)\n\n\nclass Token(models.Model):\n\n # A maximum authorization code lifetime of 10 minutes is RECOMMENDED\n date_issued = models.DateTimeField(_('Issued at'), auto_now_add=True)\n expires_at = models.DateTimeField(_('Expires at'), help_text=_('Time when this token expires.'), null=True, blank=True)\n access_token = models.CharField(_('Access Token'), max_length=32, help_text=_('Token to be used to access resources.'), unique=True)\n refresh_token = models.CharField(_('Refresh Token'), max_length=32, help_text=_('Token to be used to refresh access token.'), unique=True, null=True, blank=True)\n access_token_type = models.CharField(_('Type'), max_length=100, help_text=_('Access token type client uses to apply the appropriate authorization method.'), choices=[(t[0], t[1]) for t in REGISTRY_TOKEN_TYPE], default=TOKEN_TYPE_BEARER)\n user = models.ForeignKey(User, verbose_name=_('User'), help_text=_('The user token is issued for.'), null=True, blank=True)\n client = models.ForeignKey(Client, verbose_name=_('Client'), help_text=_('The client application token is issued for.'))\n code = models.ForeignKey(AuthorizationCode, verbose_name=_('Code'), help_text=_('Authorization code used to generate this token.'), null=True, blank=True)\n scopes = models.ManyToManyField(Scope, verbose_name=_('Scopes'), help_text=_('The scopes token is restricted to.'), null=True, blank=True)\n\n class Meta:\n verbose_name = _('Token')\n verbose_name_plural = _('Tokens')\n\n def __unicode__(self):\n return '%s' % self.code\n\n def generate_token(self):\n \"\"\"Identifier length: 32 chars.\"\"\"\n return str(uuid4()).replace('-', '')\n\n def save(self, force_insert=False, force_update=False, **kwargs):\n if self.access_token == '':\n while True:\n self.access_token = self.generate_token()\n self.refresh_token = self.generate_token()\n\n try:\n super(Token, self).save(force_insert, force_update, **kwargs)\n except IntegrityError:\n pass\n else:\n break\n else:\n super(Token, self).save(force_insert, force_update, **kwargs)\n"
},
{
"alpha_fraction": 0.721800684928894,
"alphanum_fraction": 0.727870523929596,
"avg_line_length": 26.47222137451172,
"blob_id": "80bdd91c84b347f784fede89506e56cc4a89bdb3",
"content_id": "c97af5f06e7c193f4dec8adf2139568606676208",
"detected_licenses": [
"BSD-2-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "reStructuredText",
"length_bytes": 1979,
"license_type": "permissive",
"max_line_length": 116,
"num_lines": 72,
"path": "/docs/source/index.rst",
"repo_name": "imcj/django-oauthost",
"src_encoding": "UTF-8",
"text": "django-oauthost documentation\n=============================\n\n*django-oauthost is a reusable application for Django, introducing OAuth2 server functionality.*\n\n.. warning::\n\n Alpha stage project - not to be used in production.\n\n\nRequirements\n------------\n\n1. Django 1.3+\n2. Auth Django contrib package\n3. South 0.7.1+ for Django (required for version upgrades)\n4. Django Admin site contrib package (for quick oauthost data manipulation).\n\n\nCheck list\n----------\n\n* Do not use Django's brand new cookie-based session engine with oauthost, it may cause security issues.\n* Do not use OAuth1 clients as they probably won't work.\n* MIDDLEWARE_CLASSES has\n\n `django.contrib.sessions.middleware.SessionMiddleware`\n\n `django.middleware.csrf.CsrfViewMiddleware`\n\n* TEMPLATE_CONTEXT_PROCESSORS has\n\n `django.core.context_processors.request`\n\n* INSTALLED_APPS has\n\n `oauthost`\n\n\nTable of Contents\n-----------------\n\n.. toctree::\n :maxdepth: 2\n\n quickstart\n helpers\n references\n\n\nGet involved into django-oauthost\n---------------------------------\n\n**Submit issues.** If you spotted something weird in application behavior or want to propose a feature you can do\nthat at https://github.com/idlesign/django-oauthost/issues\n\n**Write code.** If you are eager to participate in application development, fork it\nat https://github.com/idlesign/django-oauthost, write your code, whether it should be a bugfix or a feature\nimplementation, and make a pull request right from the forked project page.\n\n**Translate.** If want to translate the application into your native language use Transifex:\nhttps://www.transifex.net/projects/p/django-oauthost/.\n\n**Spread the word.** If you have some tips and tricks or any other words in mind that you think might be of interest\nfor the others — publish it.\n\n\nThe tip\n-------\n\nIf the application is not what you want for site navigation, you might be interested in considering\nother choices at http://djangopackages.com/grids/g/oauth-servers/"
},
{
"alpha_fraction": 0.7802929282188416,
"alphanum_fraction": 0.7802929282188416,
"avg_line_length": 36.54999923706055,
"blob_id": "07dbe2041d19891db32eadccd234a3a1194f2eb9",
"content_id": "6866eafa60fa5271233781cfcd4a5206681efb6c",
"detected_licenses": [
"BSD-2-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 751,
"license_type": "permissive",
"max_line_length": 104,
"num_lines": 20,
"path": "/oauthost/config.py",
"repo_name": "imcj/django-oauthost",
"src_encoding": "UTF-8",
"text": "import logging\n\nfrom oauthost.auth_handlers import BearerAuthHandler\n\nLOGGER = logging.getLogger('django.oauthost')\n\nREGISTRY_EP_AUTH_RESPONSE_TYPE = ['code', 'token']\nREGISTRY_EP_TOKEN_GRANT_TYPE = ['authorization_code', 'password', 'client_credentials', 'refresh_token']\n\n# Someday here might be something more than bare Bearer.\nTOKEN_TYPE_BEARER = 'bearer'\nREGISTRY_TOKEN_TYPE = {\n (TOKEN_TYPE_BEARER, 'Bearer', BearerAuthHandler),\n}\n\nOAUTHOST_TEMPLATE_AUTHORIZE = 'oauthost/authorize.html'\nOAUTHOST_TEMPLATE_AUTHORIZE_ERROR = 'oauthost/authorize_error.html'\nOAUTHOST_TEMPLATE_AUTHORIZE_PROCEED = 'oauthost/authorize_proceed.html'\nOAUTHOST_TEMPLATE_FORBIDDEN = 'oauthost/forbidden.html'\nOAUTHOST_TEMPLATE_RESTRICTED = 'oauthost/restricted.html'\n"
},
{
"alpha_fraction": 0.6646562218666077,
"alphanum_fraction": 0.6663315892219543,
"avg_line_length": 43.6766471862793,
"blob_id": "0c47de5a12b8da4b80d9742182f6da4816a280fc",
"content_id": "524942828d57298e3b01c2df248f241b115e7d36",
"detected_licenses": [
"BSD-2-Clause"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 14922,
"license_type": "permissive",
"max_line_length": 190,
"num_lines": 334,
"path": "/oauthost/auth_views.py",
"repo_name": "imcj/django-oauthost",
"src_encoding": "UTF-8",
"text": "import base64\n\nfrom time import time\nfrom datetime import datetime\n\nfrom django.conf import settings\nfrom django.shortcuts import render\nfrom django.views.decorators.csrf import csrf_exempt\nfrom django.contrib.auth import SESSION_KEY\nfrom django.contrib.auth.decorators import login_required\nfrom django.contrib.auth.models import User\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom oauthost.models import Client, AuthorizationCode, Token\nfrom oauthost.utils import *\nfrom oauthost.config import *\n\n\n@login_required\ndef endpoint_authorize(request):\n \"\"\"\n SPEC: The authorization endpoint is used to interact with the resource\n owner and obtain an authorization grant.\n\n SPEC: The authorization server MUST support TLS...\n The authorization server MUST support the use of the HTTP \"GET\" method [RFC2616]\n for the authorization endpoint, and MAY support the use of the \"POST\" method as well.\n\n \"\"\"\n\n # Avoid usage of previous auth sessions.\n request.session[SESSION_KEY] = None\n\n # SPEC: Since requests to the authorization endpoint result in user authentication\n # and the transmission of clear-text credentials (in the HTTP response),\n # the authorization server MUST require the use of a transport-layer\n # security mechanism when sending requests to the authorization endpoint.\n if not request.is_secure() and not settings.DEBUG:\n # Insecure connections are only available in debug mode.\n return ep_auth_response_error_page(request, _('OAuth requires secure connection.'), 403)\n\n if request.POST.get('auth_decision') is None:\n # User has made no decision on auth confirmation yet.\n\n input_params = filter_input_params(request.REQUEST)\n\n response_type = input_params.get('response_type')\n client_id = input_params.get('client_id')\n\n redirect_uri = input_params.get('redirect_uri')\n redirect_uri_final = redirect_uri\n\n if client_id is None:\n # Fail fast without a DB hit.\n return ep_auth_response_error_page(request, _('Client ID must be supplied.'))\n\n if response_type not in REGISTRY_EP_AUTH_RESPONSE_TYPE:\n return ep_auth_response_error_page(request, _('Unknown response type requested. Expected: %s.') % ', '.join(REGISTRY_EP_AUTH_RESPONSE_TYPE))\n\n try:\n client = Client.objects.get(identifier=client_id)\n except ObjectDoesNotExist:\n LOGGER.error('Invalid client ID supplied. Value \"%s\" was sent from IP \"%s\".' % (client_id, get_remote_ip(request)))\n return ep_auth_response_error_page(request, _('Invalid client ID is supplied.'))\n\n # TODO There should be at least one redirection URI associated with a client. URI validity should be checked while such an association is made.\n registered_uris = [url[0] for url in client.redirectionendpoint_set.values_list('uri')]\n\n # Check redirection URI validity.\n if redirect_uri is None:\n # redirect_uri is optional and was not supplied.\n if len(registered_uris) == 1:\n # There is only one URI associated with client, so we use it.\n redirect_uri_final = registered_uris[0]\n else:\n # Several URIs are registered with the client, decision is ambiguous.\n LOGGER.error('Redirect URI was not supplied by client with ID \"%s\". Request from IP \"%s\".' % (client.id, get_remote_ip(request)))\n return ep_auth_response_error_page(request, _('Redirect URI should be supplied for a given client.'))\n\n # SPEC: The authorization server SHOULD NOT redirect the user-agent to unregistered or untrusted URIs\n # to prevent the authorization endpoint from being used as an open redirector.\n if redirect_uri_final not in registered_uris:\n LOGGER.error('An attempt to use an untrusted URI \"%s\" for client with ID \"%s\". Request from IP \"%s\".' % (redirect_uri_final, client.id, get_remote_ip(request)))\n return ep_auth_response_error_page(request, _('Redirection URI supplied is not associated with given client.'))\n\n # Access token scope requested,\n scopes_to_apply = resolve_scopes_to_apply(input_params.get('scope'), client)\n\n request.session['oauth_client_id'] = client.id\n request.session['oauth_response_type'] = response_type\n request.session['oauth_redirect_uri'] = redirect_uri_final\n request.session['oauth_scopes_ids'] = [s.id for s in scopes_to_apply]\n request.session['oauth_state'] = input_params.get('state')\n\n dict_data = {\n 'client': client,\n 'scopes_obj': scopes_to_apply,\n 'oauthost_title': _('Authorization Request')\n }\n return render(request, OAUTHOST_TEMPLATE_AUTHORIZE, dict_data)\n\n # ========================================================================================\n # User has made his choice using auth form.\n\n redirect_uri = request.session.get('oauth_redirect_uri')\n response_type = request.session.get('oauth_response_type')\n params_as_uri_fragment = (response_type == 'token')\n\n if request.POST.get('confirmed') is None:\n # User has declined authorization.\n ep_auth_clear_session_data(request)\n return ep_auth_response_error(redirect_uri, params_as_uri_fragment, 'access_denied', 'Authorization is canceled by user')\n\n # User confirmed authorization using a web-form.\n client = Client.objects.get(pk=request.session.get('oauth_client_id'))\n scopes_to_apply = Scope.objects.filter(id__in=request.session.get('oauth_scopes_ids')).all()\n\n output_params = {}\n auth_obj = None\n\n # Used for \"Authorization code\" Grant Type.\n if response_type == 'code':\n # Generating Authorization Code.\n auth_obj = AuthorizationCode(client=client, user=request.user, uri=redirect_uri)\n auth_obj.save()\n output_params['code'] = auth_obj.code\n\n # Used as \"Implicit\" Grant Type.\n if response_type == 'token':\n expires_in = client.token_lifetime\n expires_at = None\n if expires_in is not None:\n output_params['expires_in'] = expires_in\n expires_at = datetime.fromtimestamp(int(time() + expires_in))\n # Generating Token.\n auth_obj = Token(client=client, user=request.user, expires_at=expires_at)\n auth_obj.save()\n output_params['access_token'] = auth_obj.access_token\n output_params['token_type'] = auth_obj.access_token_type\n\n if auth_obj is not None:\n # Link scopes to auth object.\n for scope in scopes_to_apply:\n auth_obj.scopes.add(scope)\n\n state = request.session.get('state')\n if state is not None:\n output_params['state'] = state\n\n ep_auth_clear_session_data(request)\n\n # SPEC: Developers should note that some HTTP client implementations do not\n # support the inclusion of a fragment component in the HTTP \"Location\"\n # response header field. Such client will require using other methods\n # for redirecting the client than a 3xx redirection response.\n if not client.hash_sign_supported:\n data_dict = {'action_uri': ep_auth_build_redirect_uri(redirect_uri, output_params, params_as_uri_fragment)}\n return render(request, OAUTHOST_TEMPLATE_AUTHORIZE_PROCEED, data_dict)\n\n return ep_auth_response(redirect_uri, output_params, params_as_uri_fragment)\n\n\n@csrf_exempt\ndef endpoint_token(request):\n \"\"\"\n SPEC: The token endpoint is used by the client to obtain an access\n token by presenting its authorization grant or refresh token. The token\n endpoint is used with every authorization grant except for the\n implicit grant type (since an access token is issued directly).\n\n SPEC: The authorization server MUST support TLS...\n The client MUST use the HTTP \"POST\" method when making access token requests.\n\n \"\"\"\n\n # SPEC: Since requests to the token endpoint result in the transmission\n # of clear-text credentials (in the HTTP request and response),\n # the authorization server MUST require the use of a transport-layer\n # security mechanism when sending requests to the token endpoint.\n if not request.is_secure() and not settings.DEBUG:\n # Insecure connections are only available in debug mode.\n return ep_auth_response_error_page(request, _('OAuth requires secure connection.'), 403)\n\n input_params = filter_input_params(request.POST)\n\n grant_type = input_params.get('grant_type')\n if grant_type not in REGISTRY_EP_TOKEN_GRANT_TYPE:\n return ep_token_response_error('unsupported_grant_type', 'Unsupported grant type is requested. Expected: `%s`. Given: `%s`' % ('`, `'.join(REGISTRY_EP_TOKEN_GRANT_TYPE), grant_type))\n\n token_obj_params = {}\n error_out_headers = {}\n client = None\n client_id = None\n client_secret = None\n\n # TODO More client authentication methods implementations needed.\n authorization_method = request.META.get('Authorization')\n if authorization_method is not None:\n # Authorization header detected.\n auth_method_type, auth_method_value = authorization_method.split(' ', 1)\n error_out_headers['WWW-Authenticate'] = auth_method_type\n # Handle client auth through HTTP Basic.\n if auth_method_type == 'Basic':\n try:\n client_id, client_secret = base64.b64decode(auth_method_value).split(':')\n except Exception:\n pass\n else:\n # SPEC: Including the client credentials in the request body using the two\n # parameters is NOT RECOMMENDED, and should be limited to clients\n # unable to directly utilize the HTTP Basic authentication scheme (or other\n # password-based HTTP authentication schemes).\n client_id = input_params.get('client_id')\n client_secret = input_params.get('client_secret')\n\n if client_id is not None:\n try:\n client = Client.objects.get(identifier=client_id)\n except ObjectDoesNotExist:\n client = None\n\n # SPEC: A public client that was not issued a client password MAY use\n # the \"client_id\" request parameter to identify itself when sending requests\n # to the token endpoint.\n if client is not None:\n if client.password.strip() != '' and client.password != client_secret:\n client = None\n\n if client is None:\n return ep_token_response_error('invalid_client', 'Unable to authenticate client by its credentials.', 401, error_out_headers)\n\n # Calculate token expiration datetime.\n expires_in = client.token_lifetime\n expires_at = None\n if expires_in is not None:\n expires_at = datetime.fromtimestamp(int(time() + expires_in))\n\n # TODO Scopes handling implementation required.\n\n if grant_type == 'authorization_code': # Grant Type: Authorization code.\n code = input_params.get('code')\n redirect_uri = input_params.get('redirect_uri')\n\n if code is None or redirect_uri is None:\n return ep_token_response_error('invalid_request', 'Required param(s) are missing. Expected: `code` and `redirect_uri`.')\n\n try:\n code = AuthorizationCode.objects.get(code=code)\n except ObjectDoesNotExist:\n return ep_token_response_error('invalid_grant', 'Invalid authorization code is supplied.')\n\n # SPEC: If an authorization code is used more than once, the authorization\n # server MUST deny the request and SHOULD attempt to revoke all tokens\n # previously issued based on that authorization code.\n previous_tokens = Token.objects.filter(code=code).all()\n if len(previous_tokens) > 0:\n previous_tokens.delete()\n code.delete()\n return ep_token_response_error('invalid_grant', 'Authorization code is used more than once. Code and tokens are revoked.')\n\n if code.uri != redirect_uri:\n return ep_token_response_error('invalid_grant', 'Supplied URI does not match the URI associated with authorization code.')\n\n if code.client.id != client.id:\n return ep_token_response_error('invalid_grant', 'Authorization code supplied was issued to another client.')\n\n user = code.user\n token_obj_params['code'] = code\n\n elif grant_type == 'password': # Grant type: Resource Owner Password Credentials.\n username = input_params.get('username')\n password = input_params.get('password')\n\n if username is None or password is None:\n return ep_token_response_error('invalid_request', 'Required param(s) are missing. Expected: `username` and `password`.')\n\n invalid_credentials = False\n try:\n user = User.objects.get(username=username)\n except ObjectDoesNotExist:\n invalid_credentials = True\n else:\n if not user.check_password(password):\n invalid_credentials = True\n\n if invalid_credentials:\n return ep_token_response_error('invalid_grant', 'Supplied resource owner credentials are invalid.')\n\n elif grant_type == 'client_credentials': # Grant type: Client Credentials.\n # That one is somewhat unclear.\n # So let's suppose that the user is one, who has registered the client.\n user = client.user\n\n elif grant_type == 'refresh_token': # Refreshing an Access Token.\n refresh_token = input_params.get('refresh_token')\n\n if refresh_token is None:\n return ep_token_response_error('invalid_request', 'Required `refresh_token` param is missing.')\n\n try:\n token = Token.objects.get(refresh_token=refresh_token)\n except ObjectDoesNotExist:\n return ep_token_response_error('invalid_grant', 'Refresh token supplied is invalid.')\n else:\n if token.client_id != client.id:\n return ep_token_response_error('invalid_grant', 'Refresh token supplied was issued to another client.')\n\n # For refresh token grant we only swap token values.\n token.date_issued = datetime.now()\n token.access_token = token.generate_token()\n token.refresh_token = token.generate_token()\n\n if grant_type != 'refresh_token':\n token = Token(client=client, user=user, expires_at=expires_at, **token_obj_params)\n\n token.save()\n\n output_params = {\n 'access_token': token.access_token,\n 'token_type': token.access_token_type,\n 'refresh_token': token.refresh_token\n }\n\n if expires_in is not None:\n output_params['expires_in'] = expires_in\n\n if grant_type == 'client_credentials':\n del(output_params['refresh_token'])\n\n # TODO Some auth methods require additional parameters to be passed as spec puts it.\n additional_params = {}\n\n return ep_token_response(output_params)\n"
}
] | 12 |
manishdm123/Projects
|
https://github.com/manishdm123/Projects
|
b95897a2401baa512729313b3edeb7ec6ef535e1
|
39e3562ed94aeb10feefd2b4b67455dd0fdfff74
|
770a4ad8328023e66c5f5ae1e2e4af2f4eb81578
|
refs/heads/master
| 2023-05-07T09:08:46.319217 | 2021-05-26T10:50:08 | 2021-05-26T10:50:08 | 104,698,533 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7733333110809326,
"alphanum_fraction": 0.7866666913032532,
"avg_line_length": 20.571428298950195,
"blob_id": "a629fef9e205cd53eb0d480fde0e5aaa3a5b2e97",
"content_id": "67d6b303aa92d453567f710f934758781f92a3a6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 150,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 7,
"path": "/turicreate.sh",
"repo_name": "manishdm123/Projects",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\ncd\nsource venv/bin/activate\ncd /mnt/c/Users/abc12/Documents/Projects\njupyter notebook\nread -p \"Press enter to exit\"\n#jupyter notebook stop"
},
{
"alpha_fraction": 0.7611940503120422,
"alphanum_fraction": 0.7611940503120422,
"avg_line_length": 31.5,
"blob_id": "84b009239968ed5a0d22f4d334da9b8b3da61238",
"content_id": "64d20113e240232a19a56d08236c94891fb05016",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 67,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 2,
"path": "/README.md",
"repo_name": "manishdm123/Projects",
"src_encoding": "UTF-8",
"text": "# Projects\r\nLinedin profile https://www.linkedin.com/in/manishdm/\r\n"
},
{
"alpha_fraction": 0.603371798992157,
"alphanum_fraction": 0.6379769444465637,
"avg_line_length": 23.65116310119629,
"blob_id": "1ace5229a816266902de45b3eeadd7152d12a212",
"content_id": "122b9b04be513fbdd698e552c4c4847e1bb655b4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1127,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 43,
"path": "/Convolutional Neural Networks/CNN-work-in-progress/load_code.py",
"repo_name": "manishdm123/Projects",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Nov 3 23:26:43 2017\r\n\r\n@author: abc12\r\n\"\"\"\r\n\r\nfrom keras.preprocessing.image import ImageDataGenerator\r\nfrom keras.models import Sequential\r\nfrom keras.layers import Conv2D, MaxPooling2D\r\nfrom keras.layers import Activation, Dropout, Flatten, Dense\r\nfrom keras import backend as K\r\nimport os\r\nfrom keras.models import load_model\r\nfrom keras.preprocessing.image import img_to_array, load_img\r\nfrom PIL import Image\r\nimport numpy as np\r\n\r\nos.chdir(\"C:/Users/abc12/Desktop/Cats Dogs dataset\");\r\n\r\n#model.save(\"first_try.h5\")\r\ntest_model=load_model(\"Test.h5\")\r\n\r\nimg_width,img_height = im.size\r\nfor i in range(1,40):\r\n str1 = 'test1/' +str(i)+'.jpg'\r\n img=load_img(str1,False,target_size=(150,150))\r\n\r\n\r\n#img=load_img('test1/1.jpg')\r\n x=img_to_array(img)\r\n x=np.expand_dims(x,axis=0)\r\n \r\n preds=test_model.predict_classes(x)\r\n \r\n prob=test_model.predict_proba(x)\r\n print(preds,prob)\r\n str2= str(preds)\r\n \r\n if (int(str2[2]) == 1):\r\n img.save('dogs/'+str(i)+'.jpg')\r\n else:\r\n img.save('cats/'+str(i)+'.jpg')\r\n \r\n \r\n"
}
] | 3 |
RalfZhang/political-compass-backend
|
https://github.com/RalfZhang/political-compass-backend
|
59d6970c222fd0cfdcabbee5edacf3a59adc7e37
|
70aa7209ce7b19629cb82e267779e60d52b3ec3c
|
f25bded2cc30fd43bc4c802f0f811034a289e58c
|
refs/heads/master
| 2021-09-01T10:51:33.054040 | 2017-12-26T15:59:01 | 2017-12-26T15:59:01 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.670412003993988,
"alphanum_fraction": 0.6779026389122009,
"avg_line_length": 28.77777862548828,
"blob_id": "491369a8b49207071c364068dd1a6886228e09d7",
"content_id": "35485247ac6b0f5afa169801c1e25c28551e3a08",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 267,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 9,
"path": "/qaa/urls.py",
"repo_name": "RalfZhang/political-compass-backend",
"src_encoding": "UTF-8",
"text": "from django.conf.urls import url\nfrom . import views\n\nurlpatterns = [\n url(r'^$', views.index, name='index'),\n url(r'^questions$', views.getQuestionList),\n url(r'^questions/(?P<question_id>[0-9]+)$', views.getQuestionById),\n url(r'^answers$', views.postAnswer),\n]"
},
{
"alpha_fraction": 0.39039409160614014,
"alphanum_fraction": 0.5178571343421936,
"avg_line_length": 18.117647171020508,
"blob_id": "a2b1bbb4d0e24d0e3b8581fbaff4f81afc65e6c1",
"content_id": "88be1978935696bc822fe6a36c176fb476896921",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2026,
"license_type": "no_license",
"max_line_length": 469,
"num_lines": 85,
"path": "/README.md",
"repo_name": "RalfZhang/political-compass-backend",
"src_encoding": "UTF-8",
"text": "# An Django Projcet\n\nzuobiao.me 内容与分析后端接口\n\n**Developing...**\n\n## 接口列表\n\n### 问题 questions\n\n**接口入口** \n```api/v1/```\n\n1. 获取所有问题 \n/questions/ \nGET\n```json\n[\n {\n \"id\": 1,\n \"content\": \"如果人民没有受过民主教育,他们是不应该拥有普选权的。\",\n \"order_id\": 101,\n \"question_type\": 1,\n \"q_id\": \"q101\",\n \"rev\": -1,\n \"short\": \"普选权\"\n },\n {\n \"id\": 2,\n \"content\": \"人权高于主权。\",\n \"order_id\": 102,\n \"question_type\": 1,\n \"q_id\": \"q102\",\n \"rev\": 1,\n \"short\": \"人权与主权\"\n },\n // ...\n]\n```\n\n2. 获取单个问题详情 \n/questions/{id}/ \nGET \n```json\n{\n \"id\": 23,\n \"content\": \"在重大工程项目的决策中,个人利益应该为社会利益让路。\",\n \"order_id\": 303,\n \"question_type\": 3,\n \"q_id\": \"q303\",\n \"rev\": -1,\n \"short\": \"集体利益优先\"\n}\n```\n\n### 回答 answer\n\n1. 添加回答 \n/answers/ \nPOST \n字段: 'time', 'q101', 'q102', 'q104', 'q103', 'q106', 'q105', 'q107', 'q108', 'q109', 'q111', 'q110', 'q112', 'q113', 'q114', 'q115', 'q117', 'q119', 'q116', 'q120', 'q118', 'q301', 'q302', 'q303', 'q304', 'q305', 'q306', 'q309', 'q307', 'q308', 'q310', 'q311', 'q312', 'q313', 'q314', 'q316', 'q317', 'q318', 'q319', 'q320', 'q315', 'q201', 'q202', 'q203', 'q204', 'q205', 'q206', 'q207', 'q208', 'q209', 'q210', 'q1001', 'q1002', 'q1003', 'q1004', 'ip', 'device', 'add'\n\n\n### 统计 stats\n\n1. 获取单个问题回答的分布 \n/stats/question_distribution?id=''\n问题选项数量,比例,均分,和方差\n\n2. 获取总体分布 \n/stats/compass_distribution?type=1\n\n3. 以一个问题选项分类三个坐标平均值 \n/stats/compass_distribution_by_question\n\n4. 以一个问题选项分类所有问题回答均值 \n/stats/question_distribution_by_question \n\n### 分析 analyse\n\n1. 获取两个问题相关性 \n/analyse/dependent?ids=[1,2] \n\n2. 获取坐标相关性 \n(?)"
},
{
"alpha_fraction": 0.7512315511703491,
"alphanum_fraction": 0.7512315511703491,
"avg_line_length": 28.071428298950195,
"blob_id": "4b88340e43821b9d6b24e223a44f0d7e9e50ea34",
"content_id": "430ab20b9714b80ff21276e9c54d8d532167e7ac",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 406,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 14,
"path": "/qaa/admin.py",
"repo_name": "RalfZhang/political-compass-backend",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\n\n# Register your models here.\nfrom .models import Question, Answer, Choice\n\nclass QuestionAdmin(admin.ModelAdmin):\n list_display = ('content', 'q_id', 'short')\n\nclass ChoiceAdmin(admin.ModelAdmin):\n list_display = ('id', 'content', 'order_id' ,'group_id',)\n\nadmin.site.register(Question, QuestionAdmin)\nadmin.site.register(Answer)\nadmin.site.register(Choice, ChoiceAdmin)"
},
{
"alpha_fraction": 0.6776034235954285,
"alphanum_fraction": 0.6947218179702759,
"avg_line_length": 19.625,
"blob_id": "b94bf3a5f2de206f4bd608085221ff249f385021",
"content_id": "475da095c3611030f63b3a92ebf9d64cd9fc2260",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3114,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 136,
"path": "/doc/django.md",
"repo_name": "RalfZhang/political-compass-backend",
"src_encoding": "UTF-8",
"text": "开启一个项目\n`django-admin startproject mysite`\n\n数据库 settings.py \n```py\nDATABASES = {\n 'default': {\n # 'ENGINE': 'django.db.backends.sqlite3',\n # 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),\n 'ENGINE': 'django.db.backends.mysql',\n 'NAME': 'pynewtest', ## 数据库名称\n 'USER': 'root',\n 'PASSWORD': '123654Ab', ## 安装 mysql 数据库时,输入的 root 用户的密码\n 'HOST': '127.0.0.1',\n 'PORT': '3306',\n }\n}\n\n\nTIME_ZONE = 'Asia/Shanghai'\n```\n\n创建数据表\n`python manage.py migrate`\n\n 如果报错\n ```\n import MySQLdb as Database\n ModuleNotFoundError: No module named 'MySQLdb'\n ```\n 则 pip install mysqlclient\n ref: https://github.com/PyMySQL/mysqlclient-python\n\n\n\n运行服务器\n`python manage.py runserver [8000]`\n\n创建模型\n`python manage.py startapp qaa`\n\n新建模型类 qaa/models.py\n```py\nfrom django.db import models\nclass Question(models.Model):\n question_text = models.CharField(max_length=200)\n pub_date = models.DateTimeField('date published')\n\n\nclass Choice(models.Model):\n question = models.ForeignKey(Question)\n choice_text = models.CharField(max_length=200)\n votes = models.IntegerField(default=0)\n```\n\n激活模型 mysite/settings.py\n```py\nINSTALLED_APPS = (\n 'django.contrib.admin',\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.messages',\n 'django.contrib.staticfiles',\n 'qaa',\n)\n```\n\n检查更新\n`python manage.py makemigrations qaa`\n`python manage.py sqlmigrate qaa 0001`\n\n修改你的模型(在models.py文件中)。\n运行python manage.py makemigrations ,为这些修改创建迁移文件\n运行python manage.py migrate ,将这些改变更新到数据库中。\n\n\n启动 shell 玩转 API\n`python manage.py shell`\n```python\nfrom qaa.models import Question, Choice\nQuestion.objects.all()\nQuestion.objects.filter(id=1)\nQuestion.objects.get(pk=1)\n```\n\n后台管理\n1. 启动\n`python manage.py createsuperuser`\n`python manage.py runserver`\nhttp://localhost:8000/admin/\n2. 添加模块\nqaa/admin.py\n```python\nfrom django.contrib import admin\nfrom .models import Question\nclass QuestionAdmin(admin.ModelAdmin):\n list_display = ('q_id', 'short', 'content') # 列表输出项\nadmin.site.register(Question, QuestionAdmin) # 注册\n```\n\nURL 与视图\nqaa/views.py\n```py\nfrom django.http import HttpResponse\nfrom .models import Question\ndef questions(request):\n questions = Question.objects.all()\n output = ', '.join(p.content for p in questions)\n return HttpResponse(output)\n```\nqaa/urls.py\n```py\nfrom django.conf.urls import url\nfrom . import views\nurlpatterns = [\n url(r'^questions$', views.questions),\n]\n```\nmysite/urls.py\n```py\n url(r'^qaa/', include('qaa.urls')),\n```\n\n\n问题:\npostman CSRF错误:\n设定Headers里的 X-CSRFToken 和 Cookie,值可以参考任意网页 get 请求发送数据\n\n\n\nhttp://python.usyiyi.cn/translate/django_182/intro/tutorial01.html\n\nhttp://blog.csdn.net/SVALBARDKSY/article/details/50548073\n\nhttps://eliyar.biz/django_api_design/"
},
{
"alpha_fraction": 0.6785714030265808,
"alphanum_fraction": 0.7193251252174377,
"avg_line_length": 39.75,
"blob_id": "1e80fd84f16fd5ed259a82acb987bcc3f7b842dc",
"content_id": "388f2da5364ceb42d2e8da35b0f37f46a9a2e47d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4564,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 112,
"path": "/qaa/models.py",
"repo_name": "RalfZhang/political-compass-backend",
"src_encoding": "UTF-8",
"text": "import datetime\n\n# Create your models here.\n\nfrom django.db import models\nfrom django.utils import timezone\n\nclass Choice(models.Model):\n content = models.CharField(max_length=255)\n # question_id = models.ForeignKey(Question)\n value = models.IntegerField()\n group_id = models.IntegerField()\n order_id = models.IntegerField()\n def __str__(self):\n return '%d.%s' % (self.id, self.content)\n def toDict(self):\n return {\n 'content': self.content,\n 'value': self.value,\n 'group_id': self.group_id,\n 'order_id': self.order_id\n }\n\nclass Question(models.Model):\n content = models.CharField(max_length=255)\n order_id = models.IntegerField()\n question_type = models.IntegerField()\n addtion = models.CharField(max_length=50, blank=True)\n q_id = models.CharField(max_length=50)\n rev = models.IntegerField(blank=True)\n short = models.CharField(max_length=50, blank=True)\n choice_group = models.IntegerField(null=True, blank=True)\n def __str__(self):\n if self.short:\n return self.short\n else: \n return self.content\n def toDict(self):\n choices = Choice.objects.filter(group_id = self.choice_group).order_by('order_id')\n choicesList = []\n for item in choices:\n choicesList.append(item.toDict())\n return {\n 'content': self.content,\n 'order_id': self.order_id,\n 'question_type': self.question_type,\n 'addtion': self.addtion,\n 'q_id': self.q_id,\n 'rev': self.rev,\n 'short': self.short,\n 'choice_group': choicesList\n }\n\nclass Answer(models.Model):\n time = models.DateTimeField()\n add = models.CharField(max_length=50, null=True, blank=True)\n q101 = models.IntegerField(null=True, blank=True)\n q102 = models.IntegerField(null=True, blank=True)\n q104 = models.IntegerField(null=True, blank=True)\n q103 = models.IntegerField(null=True, blank=True)\n q106 = models.IntegerField(null=True, blank=True)\n q105 = models.IntegerField(null=True, blank=True)\n q107 = models.IntegerField(null=True, blank=True)\n q108 = models.IntegerField(null=True, blank=True)\n q109 = models.IntegerField(null=True, blank=True)\n q111 = models.IntegerField(null=True, blank=True)\n q110 = models.IntegerField(null=True, blank=True)\n q112 = models.IntegerField(null=True, blank=True)\n q113 = models.IntegerField(null=True, blank=True)\n q114 = models.IntegerField(null=True, blank=True)\n q115 = models.IntegerField(null=True, blank=True)\n q117 = models.IntegerField(null=True, blank=True)\n q119 = models.IntegerField(null=True, blank=True)\n q116 = models.IntegerField(null=True, blank=True)\n q120 = models.IntegerField(null=True, blank=True)\n q118 = models.IntegerField(null=True, blank=True)\n q301 = models.IntegerField(null=True, blank=True)\n q302 = models.IntegerField(null=True, blank=True)\n q303 = models.IntegerField(null=True, blank=True)\n q304 = models.IntegerField(null=True, blank=True)\n q305 = models.IntegerField(null=True, blank=True)\n q306 = models.IntegerField(null=True, blank=True)\n q309 = models.IntegerField(null=True, blank=True)\n q307 = models.IntegerField(null=True, blank=True)\n q308 = models.IntegerField(null=True, blank=True)\n q310 = models.IntegerField(null=True, blank=True)\n q311 = models.IntegerField(null=True, blank=True)\n q312 = models.IntegerField(null=True, blank=True)\n q313 = models.IntegerField(null=True, blank=True)\n q314 = models.IntegerField(null=True, blank=True)\n q316 = models.IntegerField(null=True, blank=True)\n q317 = models.IntegerField(null=True, blank=True)\n q318 = models.IntegerField(null=True, blank=True)\n q319 = models.IntegerField(null=True, blank=True)\n q320 = models.IntegerField(null=True, blank=True)\n q315 = models.IntegerField(null=True, blank=True)\n q201 = models.IntegerField(null=True, blank=True)\n q202 = models.IntegerField(null=True, blank=True)\n q203 = models.IntegerField(null=True, blank=True)\n q204 = models.IntegerField(null=True, blank=True)\n q205 = models.IntegerField(null=True, blank=True)\n q206 = models.IntegerField(null=True, blank=True)\n q207 = models.IntegerField(null=True, blank=True)\n q208 = models.IntegerField(null=True, blank=True)\n q209 = models.IntegerField(null=True, blank=True)\n q210 = models.IntegerField(null=True, blank=True)\n q1001 = models.IntegerField(null=True, blank=True)\n q1002 = models.IntegerField(null=True, blank=True)\n q1003 = models.IntegerField(null=True, blank=True)\n q1004 = models.IntegerField(null=True, blank=True)\n ip = models.CharField(max_length=255, blank=True)\n device = models.CharField(max_length=255, blank=True)\n"
},
{
"alpha_fraction": 0.695652186870575,
"alphanum_fraction": 0.6970546841621399,
"avg_line_length": 30,
"blob_id": "39d978233932b2a1df121df1467bdccbe7e86134",
"content_id": "a071084b9c01d991b07c2b55c967856d6ad9bff0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 713,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 23,
"path": "/backend/urls.py",
"repo_name": "RalfZhang/political-compass-backend",
"src_encoding": "UTF-8",
"text": "from django.conf.urls import include, url\nfrom django.contrib import admin\nfrom rest_framework import routers\nimport sys\nsys.path.append(\"..\")\nfrom qaa import views\n\nrouter = routers.DefaultRouter()\nrouter.register(r'questions', views.QuestionViewSet)\nrouter.register(r'answers', views.AnswerViewSet)\nrouter.register(r'choices', views.ChoiceViewSet)\n\nurlpatterns = [\n # Examples:\n # url(r'^$', 'backend.views.home', name='home'),\n # url(r'^blog/', include('blog.urls')),\n url(r'^admin/', include(admin.site.urls)),\n\n url(r'^api/rest/question/', include(router.urls)),\n url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),\n\n url(r'^api/v1/', include('qaa.urls')),\n]\n"
},
{
"alpha_fraction": 0.4681861400604248,
"alphanum_fraction": 0.6258309483528137,
"avg_line_length": 57.55555725097656,
"blob_id": "c5586ad8274aea9053cb34df8c12a7256bcc6c1c",
"content_id": "c525e40d2a6227dc64515a7d8d55a861c2097683",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1053,
"license_type": "no_license",
"max_line_length": 480,
"num_lines": 18,
"path": "/qaa/serializers.py",
"repo_name": "RalfZhang/political-compass-backend",
"src_encoding": "UTF-8",
"text": "from .models import Question, Answer, Choice\nfrom rest_framework import serializers\n\nclass QuestionSerializer(serializers.HyperlinkedModelSerializer):\n # choice = ChoiceSerializer()\n class Meta:\n model = Question\n fields = ('id', 'content', 'order_id', 'question_type', 'q_id', 'rev', 'short', 'choice_group')\n\nclass ChoiceSerializer(serializers.HyperlinkedModelSerializer):\n class Meta:\n model = Choice\n fields = ('content', 'value', 'order_id', 'group_id')\n\nclass AnswerSerializer(serializers.HyperlinkedModelSerializer):\n class Meta: \n model = Answer\n fields = ('time', 'q101', 'q102', 'q104', 'q103', 'q106', 'q105', 'q107', 'q108', 'q109', 'q111', 'q110', 'q112', 'q113', 'q114', 'q115', 'q117', 'q119', 'q116', 'q120', 'q118', 'q301', 'q302', 'q303', 'q304', 'q305', 'q306', 'q309', 'q307', 'q308', 'q310', 'q311', 'q312', 'q313', 'q314', 'q316', 'q317', 'q318', 'q319', 'q320', 'q315', 'q201', 'q202', 'q203', 'q204', 'q205', 'q206', 'q207', 'q208', 'q209', 'q210', 'q1001', 'q1002', 'q1003', 'q1004', 'ip', 'device', 'add')"
},
{
"alpha_fraction": 0.5258620977401733,
"alphanum_fraction": 0.5258620977401733,
"avg_line_length": 8,
"blob_id": "f6dc5e9c72af26d71cd02e0649bd504512acfda5",
"content_id": "8abb407090eb4905d4427d09420e5aca6e1b34c3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 190,
"license_type": "no_license",
"max_line_length": 20,
"num_lines": 13,
"path": "/doc/devInfo.md",
"repo_name": "RalfZhang/political-compass-backend",
"src_encoding": "UTF-8",
"text": "# 开发备注 \n\n## Git \n\n### Git 提交 \n\n`[类别] 主体内容`\n\n类别项目:\n- [feat] 添加新功能\n- [update] 对已有功能进行更新\n- [fix] 修复 bug\n- [doc] 更新文档"
}
] | 8 |
umerfarooq01/OOP-in-Python
|
https://github.com/umerfarooq01/OOP-in-Python
|
8bf15a91bc51bc4189dd7cc696fdae101176da9e
|
2ee63b88406888776a79b4126f0b1b4c7eba8285
|
22325d3c9af19f7a1f3390eb23a13deb3c4706bd
|
refs/heads/master
| 2021-05-20T19:00:06.825606 | 2020-04-02T07:14:48 | 2020-04-02T07:14:48 | 252,381,880 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6601941585540771,
"alphanum_fraction": 0.6796116232872009,
"avg_line_length": 19.799999237060547,
"blob_id": "f4a5b46c92d9ee8c078fbf2b5ce73a09bf82b59c",
"content_id": "496c9f0d4f5e9a857db2b56bb25d39dc1f009261",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 103,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 5,
"path": "/class.py",
"repo_name": "umerfarooq01/OOP-in-Python",
"src_encoding": "UTF-8",
"text": "# Here we learn how to create a class in python \nclass course:\n pass\n# c1=course()\n# print(type(c1))"
},
{
"alpha_fraction": 0.643750011920929,
"alphanum_fraction": 0.6625000238418579,
"avg_line_length": 21.428571701049805,
"blob_id": "8bb74e76759c48abacc4599ae3f4ae6a954bc161",
"content_id": "44e5366b145888c28b869d227c4b490264baa782",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 160,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 7,
"path": "/properties.py",
"repo_name": "umerfarooq01/OOP-in-Python",
"src_encoding": "UTF-8",
"text": "# Here we learn about class properties in python\nclass course:\n name=\"Python\"\n students=[\"Umer\",\"Adil\",\"Rehman\"]\n# c1=course()\n# c1.name\n# c1.students "
},
{
"alpha_fraction": 0.6325088143348694,
"alphanum_fraction": 0.6360424160957336,
"avg_line_length": 34.375,
"blob_id": "28d2a7478d124309583033a9cb6ccb66ac77774b",
"content_id": "a9264070c69f651643ad1ce8a2ea4d6d9ae3e24f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 566,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 16,
"path": "/private_fun.py",
"repo_name": "umerfarooq01/OOP-in-Python",
"src_encoding": "UTF-8",
"text": "# Here we learn about Python Class Private Functions\nclass course:\n def __init__(self, name):\n # When we want to private a property in python we just use __ in the start of the property name\n self.__name=\"Python\"\n self.students=[]\n def add_students(self,student):\n self.students.append(student) \n self.__student_write(student)\n def students_count(self):\n return len(self.students) \n def __student_write(self,student):\n print(\"Hello \"+ student) \n\nc1=course(\"Python\")\nprint(c1.add_students(\"Umer\"))\n"
},
{
"alpha_fraction": 0.6292682886123657,
"alphanum_fraction": 0.6414633989334106,
"avg_line_length": 26.399999618530273,
"blob_id": "c26998cd5759473253d89604077363184b1dccbd",
"content_id": "ab4f8535aa8595db6f9744e5f6735d6cdaf96eb9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 410,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 15,
"path": "/return.py",
"repo_name": "umerfarooq01/OOP-in-Python",
"src_encoding": "UTF-8",
"text": "# Here we learn about Class Function return\nclass course:\n def __init__(self, name):\n self.name=\"Python\"\n self.students=[]\n def add_students(self,student):\n self.students.append(student) \n def students_count(self):\n return len(self.students) \n\n# c1=course(\"Python\")\n# c1.add_students(\"Umer\")\n# c1.add_students(\"Farooq\")\n# print(c1.students)\n# print(c1.students_count())"
},
{
"alpha_fraction": 0.5583333373069763,
"alphanum_fraction": 0.5833333134651184,
"avg_line_length": 20.18181800842285,
"blob_id": "8b8e242acf46c869a468be8b994fb53b9e296775",
"content_id": "9dd7f8ccc2791a3e4edaf7b772d46d31699e857c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 240,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 11,
"path": "/class_init.py",
"repo_name": "umerfarooq01/OOP-in-Python",
"src_encoding": "UTF-8",
"text": "class course:\n def __init__(self, name):\n self.name=\"Python\"\n self.students=[]\n# c1=course(\"Python\")\n# c1.name\n# c1.students \n# Here we have two seperate instances.\n# c1=course(\"Html\")\n# c1.name\n# c1.students "
},
{
"alpha_fraction": 0.6361031532287598,
"alphanum_fraction": 0.6504297852516174,
"avg_line_length": 25.923076629638672,
"blob_id": "bf46193677af2e1abcedd29de35463f0961a9999",
"content_id": "50b3c61d77036183264fcc15de318cf9b3b6ddfe",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 349,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 13,
"path": "/class_fun.py",
"repo_name": "umerfarooq01/OOP-in-Python",
"src_encoding": "UTF-8",
"text": "# Here we learn how to create Python Class Functions\nclass course:\n def __init__(self, name):\n self.name=\"Python\"\n self.students=[]\n def add_students(self,student):\n self.students.append(student) \n\n# c1=course(\"Python\")\n# c1.add_students(\"Umer\")\n# c1.add_students(\"Farooq\")\n# print(c1.students)\n# print(len(c1.students))"
},
{
"alpha_fraction": 0.6441717743873596,
"alphanum_fraction": 0.650306761264801,
"avg_line_length": 36.07692337036133,
"blob_id": "a8d41e3e0ffa2da187fafcf44c41c7b4f9b17a0c",
"content_id": "10b685f4b9aa4a06362c5eda9b3bbe336e1a11e1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 489,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 13,
"path": "/private.py",
"repo_name": "umerfarooq01/OOP-in-Python",
"src_encoding": "UTF-8",
"text": "# Here we learn about Python Private Properties\nclass course:\n def __init__(self, name):\n # When we want to private a property in python we just use __ in the start of the property name\n self.__name=\"Python\"\n self.students=[]\n def add_students(self,student):\n self.students.append(student) \n def students_count(self):\n return len(self.students) \nc1=course(\"Python\")\n# Now the name varable name is not shown in c1. \nprint(c1.students) "
}
] | 7 |
rrw23/NoiseAndVibrationTools
|
https://github.com/rrw23/NoiseAndVibrationTools
|
7589bceceb5aafc64574aff7e7a44be76618ce26
|
e3fb19e2ce1d02b4820e52b8acfbd0730fdc52f9
|
95f325b85186ba4cb825dd6554743e9adcfa029f
|
refs/heads/master
| 2021-07-19T13:14:38.765595 | 2017-10-23T06:37:51 | 2017-10-23T06:37:51 | 104,631,158 | 0 | 2 | null | 2017-09-24T08:55:53 | 2017-09-24T08:56:03 | 2017-10-23T06:37:52 |
Python
|
[
{
"alpha_fraction": 0.7566964030265808,
"alphanum_fraction": 0.7633928656578064,
"avg_line_length": 48.66666793823242,
"blob_id": "7bd17e31e3a3c1b9a48145bb9081779b1a07725d",
"content_id": "de8613be71ffda4734d7e3953e8664f1ea659c66",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "INI",
"length_bytes": 448,
"license_type": "no_license",
"max_line_length": 303,
"num_lines": 9,
"path": "/.spyproject/workspace.ini",
"repo_name": "rrw23/NoiseAndVibrationTools",
"src_encoding": "UTF-8",
"text": "[workspace]\nrestore_data_on_startup = True\nsave_data_on_exit = True\nsave_history = True\nsave_non_project_files = False\n\n[main]\nversion = 0.1.0\nrecent_files = ['D:\\\\Python Development\\\\General Noise Tools\\\\NoiseAndVibrationTools\\\\acoustics\\\\empericalTools.py', 'D:\\\\Python Development\\\\General Noise Tools\\\\NoiseAndVibrationTools\\\\acoustics\\\\__init__.py', 'D:\\\\Python Development\\\\General Noise Tools\\\\NoiseAndVibrationTools\\\\acoustics\\\\tools.py']\n\n"
},
{
"alpha_fraction": 0.6363979578018188,
"alphanum_fraction": 0.6998301148414612,
"avg_line_length": 38.529850006103516,
"blob_id": "1e1ab607a73ff2db661b130b5fbd9c02dea30107",
"content_id": "8c5ff47aa9159fb98e09701839818a56ce545861",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5297,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 134,
"path": "/acoustics/empericalTools.py",
"repo_name": "rrw23/NoiseAndVibrationTools",
"src_encoding": "UTF-8",
"text": "\"\"\"\nCreated on Sun Oct 1 08:03:39 2017\n\n@author: Robin Wareing\n\nSeries of functions to produce emperical sound powers/pressures\nBased on Chapter 11 of Engineering Noise Control, Bies & Hansen\n\nVibration based on TRL and NZTA research report 268?\n\"\"\"\nimport math\n\ndef BladePassFrequency(numBlades,RPM):\n '''Calculates blade pass frequency of a fan'''\n return numBlades*RPM*1./60\n\ndef LargeCompressorExt(compressorType,compressorPower):\n '''Calculates the noise emissions of a large compressor.\n Using Eqns. 11.13, 11.14, 11.15\n Compressor types: rotary/reciprocating, centrifugal - casing,\n centrifugal - inlet'''\n if compressorType == \"rotary/reciprocating\":\n Lw = 90+math.log10(compressorPower)\n elif compressorType == \"centrifugal - casing\":\n Lw = 79+math.log10(compressorPower) \n elif compressorType == \"centrifugal - inlet\":\n Lw = 80+math.log10(compressorPower)\n return Lw\n\ndef LargeCompressorExtOctBand(compressorType,compressorPower):\n '''Produces 1/1 octave band compressor noise'''\n Lw = LargeCompressorExt(compressorType,compressorPower)\n if compressorType == \"rotary/reciprocating\":\n correction = [11,15,10,11,13,10,5,8,15]\n elif compressorType == \"centrifugal - casing\":\n correction = [10,10,11,13,13,11,7,8,12]\n elif compressorType == \"centrifugal - inlet\":\n correction = [18,16,14,10,8,6,5,10,16]\n LwBands = []\n for item in correction:\n LwBands.append(Lw-item)\n return LwBands\n\ndef CoolingTower(coolingTowerType,coolingTowerPower):\n '''Calculates the noise emissions of a large compressor.\n Using Eqns. 11.16, 11.17, 11.18, 11.19\n Compressor types: propeller, centrifugal'''\n if coolingTowerType == \"propeller\":\n if coolingTowerPower <= 75:\n Lw = 100+8*math.log10(coolingTowerPower)\n elif coolingTowerPower > 75:\n Lw = 96+10*math.log10(coolingTowerPower)\n elif coolingTowerType == \"centrifugal\":\n if coolingTowerPower <= 60:\n Lw = 85+11*math.log10(coolingTowerPower)\n elif coolingTowerPower > 60:\n Lw = 93+7*math.log10(coolingTowerPower)\n return Lw\n\ndef CoolingTowerOctBand(coolingTowerType,coolingTowerPower):\n '''Produces 1/1 octave band compressor noise'''\n Lw = CoolingTower(coolingTowerType,coolingTowerPower)\n if coolingTowerType == \"propeller\":\n correction = [8,5,5,8,11,15,18,21,29]\n elif coolingTowerType == \"centrifugal\":\n correction = [6,6,8,10,11,13,12,18,25]\n LwBands = []\n for item in correction:\n LwBands.append(Lw-item)\n return LwBands\n\ndef TunnelingVibration(tunnelDist):\n '''From Table E1 in BS5228-2:2009'''\n return 180./(math.pow(tunnelDist,1.3))\n\ndef TunnelingGroundBournNoise(tunnelDist):\n '''From Table E1 in BS5228-2:2009'''\n return 127-54*math.log10(tunnelDist)\n\ndef VibroStoneColumns(receiverDist):\n '''From Table E1 in BS5228-2:2009\n Produces a range of vibration levels\n [5%, 33%, 50%]'''\n v5Percent = 95./math.pow(receiverDist)\n v33Percent = 44./math.pow(receiverDist)\n v50Percent = 33./math.pow(receiverDist)\n return [v5Percent,v33Percent,v50Percent]\n\ndef BlastionOverpressure(chargeMass,receiverDist,siteCondition):\n if siteCondition == \"Unconfined\":\n siteConstant = 516\n elif siteCondition == \"Confined\":\n siteConstant = 100\n overpressure = siteConstant*(receiverDist/(chargeMass**(1/3)))**-1.45\n return overpressure\n\ndef AirBlastNoise(chargeMass,receiverDist,siteCondition):\n overpressure = BlastionOverpressure(chargeMass,receiverDist,siteCondition)\n return 20*math.log10(overpressure/0.02)\n\ndef VibrationCompaction(runCondition,drumAmplitude,drumNumber,drumWidth,receiverDist):\n if runCondition == \"steady-state\":\n v5Percent = 75*math.sqrt(drumNumber)*(drumAmplitude/(receiverDist+drumWidth))**1.5\n v33Percent = 143*math.sqrt(drumNumber)*(drumAmplitude/(receiverDist+drumWidth))**1.5\n v50Percent = 276*math.sqrt(drumNumber)*(drumAmplitude/(receiverDist+drumWidth))**1.5\n elif runCondition == \"start-up\":\n v5Percent = 65*math.sqrt(drumNumber)*((drumAmplitude**1.5)/((receiverDist+drumWidth)**1.3))\n v33Percent = 106*math.sqrt(drumNumber)*((drumAmplitude**1.5)/((receiverDist+drumWidth)**1.3))\n v50Percent = 177*math.sqrt(drumNumber)*((drumAmplitude**1.5)/((receiverDist+drumWidth)**1.3))\n return [v5Percent,v33Percent,v50Percent]\n\ndef PercussivePiling(pileDiveCondition,hammerEnergy,receiverDist):\n if pileDiveCondition == \"to refusal\":\n scalingFactor = 5\n elif pileDiveCondition == \"driven through\":\n scalingFactor = 3\n elif pileDiveCondition == \"not driven through\":\n scalingFactor = 1.5\n return scalingFactor*(math.sqrt(hammerEnergy)/(receiverDist**1.3))\n\ndef VibroPiling(receiverDist,runCondition):\n if runCondition == \"start-up\":\n decayFactor = 1.2\n elif runCondition == \"steady-state\":\n decayFactor = 1.4\n else:\n decayFactor = 1.3\n v5Percent = 60/(receiverDist**decayFactor)\n v33Percent = 126/(receiverDist**decayFactor)\n v50Percent = 266/(receiverDist**decayFactor)\n return [v5Percent,v33Percent,v50Percent]\n\ndef DynamicCompaction(receiverDist,damperEnergy):\n return 0.037*(math.sqrt(damperEnergy)/receiverDist)**1.7\n"
},
{
"alpha_fraction": 0.48814329504966736,
"alphanum_fraction": 0.571308434009552,
"avg_line_length": 33.8768310546875,
"blob_id": "4689cb8b8f98085520dc71110c17f5fd80a9eabf",
"content_id": "8c6c6054d528caaa75242c0bc4fd49881fc19165",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11892,
"license_type": "no_license",
"max_line_length": 142,
"num_lines": 341,
"path": "/acoustics/tools.py",
"repo_name": "rrw23/NoiseAndVibrationTools",
"src_encoding": "UTF-8",
"text": "# Basic tools to calculate a range of acoustic properties\n#\n# Authors:\n# Robin Wareing - [email protected]\n# John Bull - \n# Michael Smith\n#\n# Sources:\n# Engineering Noise Control - Fourth Edition - Bies & Hansen\n# Guide to assessing road traffic noise - NZ Trasport Agency\n# BS5228-1\n# BS5228-2\n\nimport math\n\ndef BS5228_PropigationLoss(receiverDist,sourceDist):\n '''Calculates the propigation loss over a set distance using BS5338-1 method'''\n if receiverDist < 25:\n propigationLoss = 20*math.log10(1.*receiverDist/sourceDist)\n else:\n propigationLoss = 25*math.log10(1.*receiverDist/sourceDist)-2\n return propigationLoss\n\ndef OpenSpacePowerToPressure(soundPowerLevel,distance,geometricFactor):\n '''Basic conversion from sound power to sound pressure. \n Based on geometical spreading with no ground absorption'''\n if geometricFactor == \"spherical\":\n Q = 1\n elif geometricFactor == \"hemispherical\":\n Q = 2\n elif geometricFactor == \"quarter-spherical\":\n Q = 4\n elif geometricFactor == \"eigth-spherical\":\n Q = 8\n soundPressure = soundPowerLevel-math.log10((4*math.pi*distance**2)/Q)\n return soundPressure\n\ndef BuildThirdOctave(startFreq = 12.5,stopFreq = 20000):\n '''Generates array of 1/3rd octaves between start and stop frequency'''\n referenceThirdOctaves = [12.5,16,20,25,\n 31.5,40,50,63,\n 80,100,125,160,\n 200,250,315,400,\n 500,630,800,1000,\n 1250,1600,2000,2500,\n 3150,4000,5000,6300,\n 8000,10000,12500,16000,\n 20000]\n thirdOctaves = []\n for band in referenceThirdOctaves:\n if band >= startFreq and band <= stopFreq:\n thirdOctaves.append(band)\n return thirdOctaves\n \ndef BuildOctave(startFreq = 16,stopFreq = 10000):\n '''Generates array of octaves between start and stop frequency'''\n referenceOctaves = [16,31.5,63,125,\n 250,500,1000,2000,\n 4000,8000,10000]\n\n octaves = []\n for band in referenceOctaves:\n if band >= startFreq and band <= stopFreq:\n octaves.append(band) \n return octaves\n\ndef SpeedOfSound(temp = 20):\n '''Calculates speed of sound based on air temperature'''\n R = 8.314\n M = 0.029\n y = 1.4\n T = 273 + temp\n return math.sqrt(y*R*T/M)\n\ndef AirDensity(temp = 20):\n R = 287.058\n P = 101.325\n T = 273 + temp\n return P/(R*T)\n\ndef Wavelength(frequency):\n C = SpeedOfSound()\n return C/frequency\n\ndef ThirdOctaveWeightingCurves(weightingType,startFreq,stopFreq):\n '''Generates a 2D array of third octave band centre frequencies,\n and weighting values for A and C weighing curves'''\n if weightingType == \"A\":\n rawWeighting = [[10,12.5,16,20,\n 35,31.5,40,50,\n 63,80,100,125,\n 160,200,250,315,\n 400,500,630,800,\n 1000,1250,1600,2000,\n 2500,3150,4000,5000,\n 6300,8000,10000,12500,\n 16000,20000],\n [-70.4,-63.4,-56.7,-50.5,\n -44.7,-39.4,-34.6,-30.2,\n -26.2,-22.5,-19.1,-16.1,\n -13.4,-10.9,-8.6,-6.6,\n -4.8,-3.2,-1.9,-0.8,\n 0.0,0.6,1.0,1.2,\n 1.3,1.2,1.0,0.5,\n -0.1,-1.1,-2.5,-4.3,\n -6.6,-9.3]]\n elif weightingType == \"C\":\n rawWeighting = [[10,12.5,16,20,\n 35,31.5,40,50,\n 63,80,100,125,\n 160,200,250,315,\n 400,500,630,800,\n 1000,1250,1600,2000,\n 2500,3150,4000,5000,\n 6300,8000,10000,12500,\n 16000,20000],\n [-14.3,-11.2,-8.5,-6.2,\n -4.4,-3.0,-2.0,-1.3,\n -0.8,-0.5,-0.3,-0.2,\n -0.1,0.0,0.0,0.0,\n 0.0,0.0,0.0,0.0,\n 0.0,0.0,-0.1,-0.2,-0.3,\n -1.3,-2.0,-3.0,-4.4,\n -6.2,-8.5,-11.2]]\n weights = []\n frequencies = []\n for i in range(len(rawWeighting[0])):\n if rawWeighting[0][i] >= startFreq and rawWeighting[0][i] <= stopFreq:\n weights.append(rawWeighting[1][i])\n frequencies.append(rawWeighting[0][i])\n weighting = [frequencies,weights]\n return weighting\n\ndef OctaveWeightingCurves(weightingType,startFreq,stopFreq):\n '''Generates a 2D array of octave band centre frequencies,\n and weighting values for A and C weighing curves'''\n if weightingType == \"A\":\n rawWeighting = [[31.5,63,125,\n 250,500,1000,2000,\n 4000,8000],\n [-39.4,-26.2,-16.1,-8.6,\n -3.2,0,1.2,1,-1.1\n ]]\n elif weightingType == \"C\":\n rawWeighting = [[31.5,63,125,\n 250,500,1000,2000,\n 4000,8000],\n [-3,-0.8,-0.2,0,\n 0,0,-0.2,-0.8,-3]]\n weights = []\n frequencies = []\n for i in range(len(rawWeighting[0])):\n if rawWeighting[0][i] >= startFreq and rawWeighting[0][i] <= stopFreq:\n weights.append(rawWeighting[1][i])\n frequencies.append(rawWeighting[0][i])\n weighting = [frequencies,weights]\n return weighting\n \ndef dBA(levels,freq,bandType):\n '''Calculates total dBA level for either 1/3 or 1/1 octave band data'''\n weightedLevel = []\n if bandType == \"1/1\":\n weightingCurve = OctaveWeightingCurves(\"A\",freq[0],freq[-1])\n elif bandType == \"1/3\":\n weightingCurve = ThirdOctaveWeightingCurves(\"A\",freq[0],freq[-1])\n for i in range(len(levels)):\n weightedLevel.append(levels[i] + weightingCurve[1][i])\n return dBadd(weightedLevel)\n \ndef dBC(levels,freq,bandType):\n '''Calculates total dBC level for either 1/3 or 1/1 octave band data'''\n weightedLevel = []\n if bandType == \"1/1\":\n weightingCurve = OctaveWeightingCurves(\"C\",freq[0],freq[-1])\n elif bandType == \"1/3\":\n weightingCurve = ThirdOctaveWeightingCurves(\"C\",freq[0],freq[-1])\n for i in range(len(levels)):\n weightedLevel.append(levels[i] + weightingCurve[1][i])\n return dBadd(weightedLevel)\n \ndef dBadd(levels):\n '''Performs dB addition (logarithmic addition)'''\n pressureAbs = 0\n for level in levels:\n pressureAbs = pressureAbs + 10**(level/10)\n return 10*math.log10(pressureAbs)\n \ndef dBavg(levels):\n '''Performs dB averaging (logarithmic average)'''\n pressureAbs = 0\n for level in levels:\n pressureAbs = pressureAbs + 10**(level/10)\n return 10*math.log10(pressureAbs/len(levels))\n \ndef SimpleCRTN(annualAvgTraffic,percentageHeavyVehicles,speed,gradient,\n surfaceCorrection,receiverDist,receiverHeight,\n angleOfView,percentAbsorption):\n ''' Performs a simple CoRTN calculation,.\n This does NOT include screening/barriers'''\n Cdist = -10.0*math.log10(receiverDist/13.5)\n Cuse = (33.0*math.log10(speed+40+(500./speed))+\n 10.0*math.log10(1+(5.0*percentageHeavyVehicles/speed))-\n 68.8)\n Cgrad = 0.2*gradient\n Ccond = 0\n if receiverHeight >= 1.0 and receiverHeight <= (receiverDist/3-1.2):\n Cground = 5.2*percentAbsorption*math.log10(3*receiverHeight/(receiverDist+3.5))\n elif receiverHeight > (receiverDist/3-1.2):\n Cground = 0\n elif receiverHeight <= 1.0:\n Cground = 5.2*percentAbsorption*math.log10(3/(receiverDist+3.5)) \n Cbarrier = 0\n Cview = 10*math.log10(angleOfView/180)\n LA10 = (29.1 + 10*math.log10(annualAvgTraffic)+Cdist+Cuse+Cgrad+Ccond+\n Cground+Cbarrier+Cview)\n LAeq = LA10 - 3\n return LAeq\n \ndef SimpleISO9140():\n '''Nothin in here yet'''\n\ndef SourceCountCorrectinon(num):\n '''Correction factor for multiple sources'''\n return 10*math.log10(num)\n\ndef DutyCycleCorrection(dutyCycle):\n '''Correction factor for duty cycle of sources'''\n return 10*math.log10(dutyCycle)\n\ndef NZS6806_Category(level,roadType):\n '''Returns the NZS6806 category (A, B or C) based on the input level and \n road type (New, New(HighFlow), or Altered)'''\n if roadType == \"New\":\n if level <= 57:\n return \"A\"\n elif level > 57 and level <= 64:\n return \"B\"\n elif level > 64:\n return \"C\"\n if roadType == \"New(HighFlow)\":\n if level <= 64:\n return \"A\"\n elif level > 64 and level <= 67:\n return \"B\"\n elif level > 67:\n return \"C\"\n if roadType == \"Altered\":\n if level <= 64:\n return \"A\"\n elif level > 64 and level <= 67:\n return \"B\"\n elif level > 67:\n return \"C\"\n\ndef RoadSurfaceCorrection():\n '''nothin in 'ere yet...'''\n\ndef TeirOneRoadTrafficScreen(numberOfPPFs,AADT):\n '''Performs teir 1 assessment of road traffic noise'''\n '''PPF risk rating'''\n if numberOfPPFs == 0:\n riskPPFs = \"N/A\"\n elif numberOfPPFs > 0 and numberOfPPFs <= 50:\n riskPPFs = \"Low\"\n elif numberOfPPFs > 50 and numberOfPPFs <= 200:\n riskPPFs = \"Medium\"\n elif numberOfPPFs > 200:\n riskPPFs = \"High\"\n '''AADT risk rating'''\n if AADT <= 2000:\n riskAADT = \"N/A\"\n elif numberOfPPFs > 2000 and numberOfPPFs <= 10000:\n riskAADT = \"Low\"\n elif numberOfPPFs > 10000 and numberOfPPFs <= 50000:\n riskAADT = \"Medium\"\n elif numberOfPPFs > 50000:\n riskAADT = \"High\"\n '''Total risk rating'''\n if riskPPFs == \"N/A\" or riskAADT == \"N/A\":\n teir1Risk = \"N/A\"\n elif riskPPFs == \"Low\" and riskAADT == \"Low\":\n teir1Risk = \"Low\"\n elif riskPPFs == \"High\" or riskAADT == \"High\":\n teir1Risk = \"High\"\n else:\n teir1Risk = \"Medium\"\n return [riskPPFs,riskAADT,teir1Risk]\n \ndef InfiniteSeriesOfPoints(coherance,sourceSpacing,receiverDist,sourcePower):\n speedOfSound = SpeedOfSound(20)\n airDensity = AirDensity(20)\n if coherance == \"Coherant\":\n sourceLevel = (sourcePower - 6 - 10*math.log10(receiverDist)- 10*math.log10(sourceSpacing)+10*math.log10(speedOfSound*airDensity/400))\n elif coherance == \"Incoherant\":\n sourceLevel = (sourcePower - 8 - 10*math.log10(receiverDist)- 10*math.log10(sourceSpacing)+10*math.log10(speedOfSound*airDensity/400))\n return sourceLevel\n \ndef DirectivityIndex(directivityFactor):\n if directivityFactor == 1:\n directivityIndex = 0\n elif directivityFactor == 2:\n directivityIndex = 3\n elif directivityFactor == 3:\n directivityIndex = 6\n elif directivityFactor == 4:\n directivityIndex = 9\n return directivityIndex\n\ndef VibrationAtADistance(soilAttenuation,measuredVibration,measurementDist,receiverDist):\n return measuredVibration*((measurementDist/receiverDist)**0.5)*math.exp(-1*soilAttenuation*(receiverDist-measurementDist))\n\n\n#==============================================================================\n# \n# Below is a list of items to be included into the tool set (RW 20170930)\n# Add/update this list accordingly.\n#\n# In additin \n#\n# - flow resistivity\n# - Wavenumber\n# - ISO9613-2 propagation tools\n# - CONCAWE propagation tools\n# - CNOSSOS propagation tools\n# - NZS6806 screening tools (teir 1 assessment done)\n# Propagation loss for the following:\n# - Point source\n# - line source\n# - plane source\n# - array of points source\n# - Angle of view correction\n# - SEL calculation tools\n# Single number ratings:\n# - Rw\n# -Ctr\n# -Rw+C\n# -Rw+Ctr\n# -STC\n# - Ln,w\n# - IIC\n#=============================================================================="
}
] | 3 |
luckyfrog1/flask_web
|
https://github.com/luckyfrog1/flask_web
|
03cf61ad68ea64e2b55bfe54ad6354f5c4c07ec5
|
d067c0c53c9c0a6480055375483fabe088cbd358
|
2b03d6bd5ae481e30529aa6064dd06e00403ab73
|
refs/heads/master
| 2018-11-07T15:10:57.776125 | 2018-11-06T09:15:25 | 2018-11-06T09:15:25 | 136,686,499 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5427441596984863,
"alphanum_fraction": 0.5573471188545227,
"avg_line_length": 23.348148345947266,
"blob_id": "69d7be62b4951b2662d2f942c8b8821466e504f4",
"content_id": "5a73a4431a30f3648845186b16e5b0ad3b63de9c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3435,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 135,
"path": "/flask_web.py",
"repo_name": "luckyfrog1/flask_web",
"src_encoding": "UTF-8",
"text": "from flask import Flask,url_for,request,render_template,flash,redirect,jsonify\nfrom wtforms import FileField, StringField, TextAreaField\nfrom wtforms.validators import DataRequired\nfrom flask_wtf import FlaskForm\nimport requests\nfrom werkzeug.utils import secure_filename\nimport os\n\npath = os.getcwd()\nprint(path)\n\nclass UploadForm(FlaskForm):\n file = FileField()\n\nclass ApiForm(FlaskForm):\n text1 = StringField()\n text2 = StringField()\n excel = FileField()\n textArea = TextAreaField()\n\n\n\n\napp = Flask(__name__)\napp.config[\"SECRET_KEY\"] = \"12345678\"\nupload_path = os.path.join(path, 'media')\n# 将当前文件所在的目录和medias拼接,作为接收文件的路径\n\n# 路由\n\[email protected]('/')\ndef index():\n return render_template('index.html')\n\[email protected]('/exceltools', methods=['GET', 'POST'])\ndef excel():\n form = UploadForm()\n # 创建一个对象\n if request.method == 'POST':\n file = form.data['file']\n print(form.data)\n print(file)\n # 获取表单提交的文件\n # 打印文件名\n if file:\n print(file.filename)\n path_save = os.path.join(upload_path, file.filename)\n # 生成save路径\n try:\n file.save(path_save)\n # 保存文件\n return jsonify({'tasks': {\"success\": \"true\"}})\n # 上传成功后回传json信息\n except:\n return jsonify({'tasks': {\"success\": \"false\"}})\n return render_template(\"excel.html\", form=form)\n\[email protected]('/more')\ndef h2():\n return render_template(\"more.html\")\n\[email protected]('/js')\ndef js():\n return render_template(\"js.html\")\n# @app.route('/excelsumbit')\n# def excelsubmit():\n\n\[email protected]('/form')\ndef projects():\n return render_template(\"form.html\")\n\[email protected]('/apiform', methods=['POST','GET'])\ndef api():\n import apitest\n form = ApiForm()\n if request.method == 'POST':\n url1 = form.data['text1']\n url2 = form.data['text2']\n params = form.data['textArea']\n print(url1)\n print(url2)\n print(params)\n cmp = apitest.JsonCompare(url1, url2, url=True)\n print(cmp.output())\n return jsonify({'result': cmp.output()})\n # return jsonify({'tasks': {\"success\": \"true\"}})\n return render_template(\"api.html\", form=form)\n\n # res = apitest.JsonCompare(apitest.content, , is_debug=False)\n\n\[email protected]('/api', methods=['GET','POST'])\ndef get_tasks():\n # tasks = {\n # \"response\":200,\n # \"result\":\"success\",\n # \"info\":{\n # \"a\":1,\n # \"b\":2\n # }\n #\n # }\n tasks = {\"response\":200,\n \"content\":{\n 0: \"['tasks']['info']['a']:预期值:1,实际值:2\",\n 1: \"['tasks']['info']['b']:预期值:2,实际值:1\"\n }\n }\n if request.method == \"POST\":\n return jsonify({'tasks': tasks})\n else:\n return jsonify({'tasks': \"get_tasks\"\n })\n\n\[email protected]('/apitest', methods=['GET'])\ndef get_task():\n tasks = {\n \"response\":200,\n \"result\":\"success\",\n \"info\":{\n \"b\":1,\n \"a\":2\n }\n }\n return jsonify({'tasks': tasks})\n# index view function suppressed for brevity\[email protected]('/formtest')\ndef formtest():\n return render_template('formtest.html')\n\n\nif __name__ == \"__main__\":\n app.run(host='127.0.0.1', port=8080, debug=True)\n"
},
{
"alpha_fraction": 0.48609864711761475,
"alphanum_fraction": 0.49529147148132324,
"avg_line_length": 40.28703689575195,
"blob_id": "25f0fe484e37433df2a5c51df52b68225e47f489",
"content_id": "e1ea13edfe9abbd0c146f39761dd050f7fe599c6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4930,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 108,
"path": "/apitest.py",
"repo_name": "luckyfrog1/flask_web",
"src_encoding": "UTF-8",
"text": "import requests\nimport json\nurl = \"http://api.cn.miaozhen.com/admonitor/v1/reports/basic/show\"\ntest_url = \"http://k1299.mzhen.cn:12209/admonitor/v1/reports/basic/show\"\nparams = {\n 'campaign_id': 2091044,\n 'date': '2018-08-09',\n 'by_position': 'spot',\n 'by_audience': 'overall',\n 'by_region': 'level1',\n 'metrics': 'acc',\n 'platform': 'pm',\n 'access_token': '2.DSCKSU8VngeEWxoEd3VqaSIEd3VqaTIFAQIRBCA.MCwCFGjFVoQyN2zCTmNwOKeZIiogS7AyAhQ6dWbjzT4i4VuutmV-R4to1jSeyw'\n}\n\nclass JsonCompare:\n def __init__(self, expect_data, real_data, params=None, is_debug=False, url=False):\n if url:\n self.expect_data = json.loads(requests.get(expect_data, params=params).content)\n self.real_data = json.loads(requests.get(real_data, params=params).content)\n else:\n self.expect_data = expect_data\n self.real_data = real_data\n self.data_compare_result = [] # 数据对比结果\n self.frame_cmpare_result = [] # 结构对比结果\n self.defaultroot = ''\n self.compare(self.expect_data, self.real_data, self.defaultroot)\n\n if is_debug:\n for i in self.data_compare_result: print(i)\n for i in self.frame_cmpare_result: print(i)\n\n def compare(self, expect_data, real_data, path='/'):\n try:\n if not isinstance(expect_data, (list, tuple, dict)):\n # 如果expect_data 的类型不是list,tuple,dict,即str 或 int\n if not expect_data == real_data:\n # 如果expect_data 与 real_data 不完全相等\n msg = '%s:预期值:%s,实际值:%s' % (path, str(expect_data), str(real_data))\n # 输出当前路径(即json节点)以及 expect_data 和 real_data,此时expect_data和real_data没有子节点\n self.data_compare_result.append(msg)\n # 把该条错误信息追加到 data_compare_result 上\n elif isinstance(expect_data, (list, tuple)): # list,tuple\n # 如果 expect_data 的类型为 list 或 tuple\n if not isinstance(real_data, (list, tuple)):\n # 如果 real_data 的类型不为 list 或 tuple,即与expect_data 类型不一致\n raise IndexError('实际数据不是list:%s' % path) # 实际数据为非list/tuple类型\n for index, value in enumerate(expect_data):\n try:\n if index < len(real_data):\n # 如果索引小等于real_data的最大索引值\n self.compare(value, real_data[index], '%s[%d]' % (path, index))\n # 递归调用当前的方法,将expect的value与real_data对应位置的值循环比对\n else:\n raise IndexError('不存在的下标:%s[%d]' % (path, index))\n except Exception as e:\n if IndexError:\n self.frame_cmpare_result.append('结构异常or数据缺失:%s' % e.args)\n else:\n self.frame_cmpare_result.append('未知异常:%s' % e.args)\n else: # dict\n if not isinstance(real_data, dict):\n raise IndexError('实际数据不是dict:%s' % path) # 实际数据为非dict类型\n for key,value in expect_data.items():\n try:\n if key in real_data.keys():\n # 如果expect_data的key存在于 real_data中\n self.compare(value, real_data[key], '%s[\\'%s\\']' % (path, str(key)))\n # 调用比较方法,查询 real_data字典中 key相同的值,路径为当前路径下增加key值\n else:\n raise IndexError('不存在的键:%s[\\'%s\\']' % (path, str(key)))\n except Exception as e:\n if IndexError:\n self.frame_cmpare_result.append('结构异常or数据缺失:%s' % e.args)\n else:\n self.frame_cmpare_result.append('未知异常:%s' % e.args)\n except Exception as e:\n self.frame_cmpare_result.append('未知异常:%s' % e.args)\n\n def output(self):\n if len(self.data_compare_result) == 0:\n return \"All True\"\n else:\n # return \"\".join([ x + \"\\n\" for x in self.data_compare_result])\n return dict(zip(range(len(self.data_compare_result)), self.data_compare_result))\n\n\nif __name__ == \"__main__\":\n # res = JsonCompare(content, content2, is_debug=True)\n a = {\n 'a': 0,\n 'b': 1,\n 'c': {\n 'c1': 2,\n 'c2': 3\n }\n }\n b = {\n 'c': {\n 'c1': 2,\n 'c2': 4\n },\n 'a': 1,\n 'b': 5\n\n }\n res = JsonCompare(a,b)\n print(res.output())\n\n"
},
{
"alpha_fraction": 0.4749999940395355,
"alphanum_fraction": 0.4749999940395355,
"avg_line_length": 19.5,
"blob_id": "26c2771ade40dcd2a6fff363f196ddc05e906302",
"content_id": "1af1e0b8ad3e295b2e5d4d57c3172e690f03492d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 40,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 2,
"path": "/hello.py",
"repo_name": "luckyfrog1/flask_web",
"src_encoding": "UTF-8",
"text": "a = ['abc','de','efg']\nprint(\"\".join(a))"
},
{
"alpha_fraction": 0.6462053656578064,
"alphanum_fraction": 0.6551339030265808,
"avg_line_length": 26.9375,
"blob_id": "4ee97bc05ebe7962a03c736e2691dddae9f34be3",
"content_id": "b619905c57bbfdf8611251e22986d55b3b688ca5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 920,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 32,
"path": "/form.py",
"repo_name": "luckyfrog1/flask_web",
"src_encoding": "UTF-8",
"text": "from flask import Flask,url_for,request,render_template,flash,redirect\nfrom wtforms import FileField\nfrom wtforms.validators import DataRequired\nfrom flask_wtf import FlaskForm\nfrom werkzeug.utils import secure_filename\nimport os\npath = os.getcwd()\nprint(path)\n\nclass UploadForm(FlaskForm):\n file = FileField()\napp = Flask(__name__)\napp.config[\"SECRET_KEY\"] = \"12345678\"\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef upload():\n form = UploadForm()\n if request.method == \"POST\":\n # 创建一个UploadForm对象\n # if form.validate_on_submit():\n file_post = form.data['file']\n print(file_post.filename)\n if file_post:\n # 从html获取form的data信息\n path_save = os.path.join(path, \"media\", file_post.filename)\n print(\"path:\", path_save)\n file_post.save(path_save)\n\n return render_template(\"excel.html\", form=form)\n\napp.run()\n\n\n"
}
] | 4 |
blunderboy/sandbox
|
https://github.com/blunderboy/sandbox
|
de2c694114a8744d47d50c138a65f652fa5d06e8
|
b5959e22e950ba63aca0896a25561ca09c89ac87
|
bc0d36bbeac551751452dfc680d38b3cb6462eb6
|
refs/heads/master
| 2016-05-29T12:56:48.362112 | 2015-07-18T02:18:18 | 2015-07-18T02:18:18 | 10,459,356 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5058430433273315,
"alphanum_fraction": 0.5203116536140442,
"avg_line_length": 28.694215774536133,
"blob_id": "9600e7a6ee93944244d7237a1858c1aec83d1474",
"content_id": "daee095167a08a6a6962de581466d99573b796d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 3594,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 121,
"path": "/projects/minimal_ospf/Project_Final/selectimplementation.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "unsigned char *buf123;\nint abc;\nint j123;\nint checkforexchange(unsigned char *buffer,int data_size)\n{\n\tstruct iphdr *ip=(struct iphdr *)buffer;\t\n\tip->tot_len=data_size;\t\n\tif(ip->tot_len<(sizeof(struct iphdr)+sizeof(struct ospfheader))){\n\t\tprintf(\"Ospf header doesn't exist \\n\");\n\t\treturn 0;\n\t}\n\tstruct ospfheader *ospf=(struct ospfheader *)(buffer+sizeof(struct iphdr));\n\t\t\n\tif(ospf->auth1!=1234)\n\t{\n\t\tprintf(\"Authentication Error \\n\");\n\t\treturn 0;\n\t}\t\n\tif((int)ospf->type!=2)\n\t{\n\t\tprintf(\"NOT AN Exchange Packet\\n\");\n\t\treturn 0;\n\t}\n\treturn 1;\n}\nvoid *selectcode(void *argv)\n{\n printf(\"select code has been called\\n\");\n fd_set master; // master file descriptor list\n fd_set read_fds; // temp file descriptor list for select()\n struct sockaddr_in myaddr,s_addr; // server address\n int fdmax; // maximum file descriptor number\n int fd[interfaces]; // listening socket descriptor in a router... means no of interfaces\n int newfd; // newly accept()ed socket descriptor\n unsigned char *buffer = (unsigned char *)malloc(6553);\n int nbytes,saddr_size;\n int yes=1; // for setsockopt() SO_REUSEADDR, below\n int addrlen;\n int i, j;\n FD_ZERO(&master); // clear the master and temp sets\n FD_ZERO(&read_fds);\n for(int i=0;i<interfaces;i++){\t\n \tif ((fd[i] = socket(AF_INET, SOCK_RAW, usedprotocol)) < 0) {\n \tprintf(\"socket....................................\\n\");\n\t\t\n \t\treturn NULL;\n \t}\n\t\n\n \t// lose the pesky \"address already in use\" error message\n \tif (setsockopt(fd[i], SOL_SOCKET, SO_REUSEADDR, &yes,sizeof(int)) == -1) {\n \tprintf(\"setsockopt.................................\\n\");\n\t\t \t\n\t\treturn NULL;\n \t}\n\t\t\t\n\n \t// bind\n \tmyaddr.sin_family = AF_INET;\n \tmyaddr.sin_addr.s_addr = inet_addr(interface[i]);\n \tmyaddr.sin_port = htons(1000);\n \tmemset(&(myaddr.sin_zero), '\\0', 8);\n \tif (bind(fd[i], (struct sockaddr *)&myaddr, sizeof(myaddr)) == -1) {\n \t printf(\"BIND.................................................\\n\");\n\t \n \t return NULL;\n \t}\n }\n\n // add the fds to the master set and setting the maximum fd to fdmax\n\tfdmax=fd[0];\n\tfor(int i=0;i<interfaces;i++){\n\t\tif(fd[i]>fdmax){\n\t\t\tfdmax=fd[i];\n\t\t}\n\t\tFD_SET(fd[i], &master);\n\t}\n // main loop\n for(;;) {\n\t\tprintf(\"select processing started..................\\n\");\n\t\t\n read_fds = master; // copy it\n if (select(fdmax+1, &read_fds, NULL, NULL, NULL) == -1) {\n printf(\"SELECT..................................\\n\");\n\t\t\t \n\t\t return NULL;\n }\n\t\tprintf(\"select got something..................\\n\");\n // run through the existing connections looking for data to read\n for(i = 0; i <= fdmax; i++) {\n if (FD_ISSET(i, &read_fds)) { // we got one!!\n for(int j=0;j<interfaces;j++){\n\t//\t\t\tprintf(\"ready to behave as slave..................\\n\");\n\t\t\t\tif(i==fd[j]){\n\t\t\t\t\tsaddr_size = sizeof(myaddr);\n\t\t\t\t\tnbytes =recvfrom(fd[j] , buffer , 6553 , 0 ,(struct sockaddr *)&myaddr ,(socklen_t *)&saddr_size);\n\t\t\t\t\tif(nbytes < 0 )\n\t\t\t\t\t{\n\t\t\t\t\t\tprintf(\"Recvfrom error , failed to get packets\\n\");\n\t\t\t\t\t\t\n\t\t\t\t\t\tcontinue; \n\t\t\t\t\t}else{\t\n\t\t\t\t\t\tif(checkforexchange(buffer,nbytes))\n\t\t\t\t\t\t{\t\t\n\t\t\t\t\t\t\tbuf123=buffer;\t\n\t\t\t\t\t\t\tabc=nbytes;\n\t\t\t\t\t\t\tj123=j;\n\t\t\t\t\t\t\tpthread_t pth0;\n\t\t\t\t\t\t\tpthread_create(&pth0,NULL,exchangeonreceive,asd);\n\t\t\t\t\t\t\tsleep(1);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t}\n } \n \t\t}\n \t\t}\n \t }\n\treturn NULL;\n}\n\n"
},
{
"alpha_fraction": 0.7362637519836426,
"alphanum_fraction": 0.7582417726516724,
"avg_line_length": 17,
"blob_id": "29ce5e7f4c8069d3c166d94c1abbc56cfd4058ca",
"content_id": "84ee65a64290909b7c853ab77cb0690a76e9262f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 91,
"license_type": "no_license",
"max_line_length": 27,
"num_lines": 5,
"path": "/projects/minimal_ospf/Project_Final/info.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "struct info{\n\tunsigned int router1;\n\tunsigned int router2;\n\tunsigned int slaveaddress;\n};\n\n"
},
{
"alpha_fraction": 0.5219957232475281,
"alphanum_fraction": 0.5364806652069092,
"avg_line_length": 23.682119369506836,
"blob_id": "7842deea7e93402f713193e3d7f9c220a5f0739d",
"content_id": "c14ad6a00b9f2f6652679de0ae7e729496d5e2cc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 7456,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 302,
"path": "/projects/minimal_ospf/Project_Final/RoutingPacket.cpp",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n#include <stdio.h>\n#include <stdlib.h>\n#include <map>\n#include <cmath>\n#include <netinet/ip_icmp.h> \n#include <netinet/udp.h> \n#include <netinet/tcp.h> \n#include <netinet/ip.h> \n#include <sys/socket.h>\n#include <arpa/inet.h>\n#include <sys/ioctl.h>\n#include <sys/time.h>\n#include <sys/types.h>\n#include <unistd.h>\n#include <time.h>\n#include <string.h>\n#include <pthread.h>\n#include <vector>\n#include <sstream>\n#include \"floodingheader.h\"\n#include \"Global.h\"\n\nusing namespace std;\n\nmap<unsigned int,int> ri;\nmap<int,unsigned int> ir;\nstruct linkpointer\n{\n\tunsigned int neigh;\n\tunsigned int neighid;\n\tunsigned int interface;\n\tunsigned int netmask1;\n\tunsigned int netmask2;\n};\nstruct info\n{\n\tunsigned int rid;\n\tint link;\n\tstruct info *infopointer[5];\n\tstruct linkpointer *l[5];\n\tstruct info *parent;\n};\nint visited[30];\nvector<struct info *> v;\nstruct record \n{\n\tunsigned int interface;\n\tunsigned int neigh;\n\tunsigned int neighid;\n\tunsigned int networkid;\n\tunsigned int netmask;\n};\nvector<struct record *> table;\nunsigned int return_netmask(const char *in)\n{\n\tint count=0;\n\tint initial=0;\n\tstring s;\n\tfor(int i=0;i<strlen(in);i++)\n\t{\n\t\tif(in[i]=='.') s.push_back(' ');\n\t\telse s.push_back(in[i]);\n\t}\n\tstringstream str;\n\tstr<<s;\n\tint n;\n\twhile(str>>n)\n\t{\n\t\twhile(n!=0)\n\t\t{\n\t\t\tcount+=n%2;\n\t\t\tn=n/2;\n\t\t}\n\t}\n\treturn count;\n}\n#include \"get_s.h\"\nvoid show_table()\n{\n\tfor(int i=0;i<table.size();i++)\n\t{\n\t\tstruct sockaddr_in source;\n\t\tmemset(&source, 0, sizeof(source));\n source.sin_addr.s_addr =table[i]->netmask;\n\t\ttable[i]->netmask=return_netmask(inet_ntoa(source.sin_addr));\n\t\t\n\t\n\t\tmemset(&source, 0, sizeof(source));\n source.sin_addr.s_addr =table[i]->networkid;\n\t\ttable[i]->networkid=get_networkid(inet_ntoa(source.sin_addr),(int)table[i]->netmask);\n\t}\n\tfor(int i=0;i<table.size();i++)\n\t{\n\t\tint pos=i;\n\t\tfor(int j=i+1;j<table.size();j++)\n\t\t{\n\t\t\tif(table[j]->netmask>table[pos]->netmask)\n\t\t\t{\n\t\t\t\tpos=j;\n\t\t\t}\n\t\t}\n\t\tstruct record *temp=table[i];\n\t\ttable[i]=table[pos];\n\t\ttable[pos]=temp;\n\t}\n\tcout<<\"-----------------------------------------------------------------------------------\"<<endl;\n\tcout<<\"**************************** ROUTING TABLE ****************************\\n\\n\"<<endl;\n\tcout<<\"netmask network_address next_hop interface \\n\\n\";\n\tfor(int i=0;i<table.size();i++)\n\t{\n\t\tstruct sockaddr_in source;\n\n\t\tcout<<table[i]->netmask<<\" \";\n\t\tmemset(&source, 0, sizeof(source));\n source.sin_addr.s_addr =table[i]->networkid;\n\t\tcout<<inet_ntoa(source.sin_addr)<<\" \";\t\t\t\n\n\t\tmemset(&source, 0, sizeof(source));\n source.sin_addr.s_addr =table[i]->neigh;\n\t\tif(table[i]->neigh!=table[i]->interface) cout<<\" \"<<inet_ntoa(source.sin_addr)<<\" \";\n\t\telse cout<<\" -------------- \";\t\t\n\n\t\tmemset(&source, 0, sizeof(source));\n source.sin_addr.s_addr =table[i]->interface;\n\t\tcout<<\" \"<<inet_ntoa(source.sin_addr)<<\" \";\n\t\tcout<<endl;\n\t}\n\tcout<<\"\\n\"<<endl;\n\tint flag=0;\n\tcout<<\"Enter IP of Packet to be Routed\\n\";\n\tchar s[16];\n\tcin>>s;\n\tcout<<\"\\n\"<<endl;\n\tfor(int i=0;i<table.size();i++)\n\t{\n\t\tunsigned int s1=get_networkid(s,table[i]->netmask);\n\t\t\n\t\tif(s1==table[i]->networkid)\n\t\t{\n\t\t\tstruct sockaddr_in source;\n\n\t\t\tflag=1;\n\t\t\tmemset(&source, 0, sizeof(source));\n \tsource.sin_addr.s_addr =table[i]->interface;\n\t\t\tcout<<\"Packet Sent from Interface \"<<inet_ntoa(source.sin_addr)<<\" to next hop \";\n\t\t\t\t\t\t\n\t\t\tmemset(&source, 0, sizeof(source));\n \tsource.sin_addr.s_addr =table[i]->neigh;\n\t\t\tif(table[i]->neigh!=table[i]->interface) cout<<inet_ntoa(source.sin_addr);\n\t\t\telse cout<<\" ----- \";\n\t\t\tbreak;\n\t\t}\n\t}\n\tif(flag==0) cout<<\"Packet Should be sent to \\\"DEFAULT ROUTER\\\"\"<<endl;\n\telse cout<<endl;\n\tcout<<\"--------------------------------------------------------------------------------\\n\\n\\n\"<<endl;\t\t\n\tsleep(8);\n\t//system(\"clear\");\t\t\t\t\t\t\n}\nvoid bfs(struct linkpointer *a[][30])\n{\n\tstruct info *n=v[0];\n\tv.erase(v.begin());\n\tint index=ri[n->rid];\n\tfor(int i=1;i<=ir.size();i++)\n\t{\n\t\tif(a[index][i]!=NULL&&visited[i]!=1)\n\t\t{\n\t\t\tstruct info *child=(struct info *)malloc(sizeof(struct info));\n\t\t\tchild->rid=ir[i];\n\t\t\tchild->link=0;\n\t\t\tchild->parent=n;\n\t\t\tn->infopointer[n->link]=child;\n\t\t\tn->l[n->link]=a[index][i];\n\t\t\tn->link=n->link+1;\n\t\t\tv.push_back(child);\n\t\t}\n\t}\n\t\n\tint flag=0;\n\tfor(int i=1;i<=ir.size();i++)\n\t{\n\t\tif(a[index][i]!=NULL){\n\t\t\tflag=1;\n\t\t\tbreak;\n\t\t}\n\t}\n\tif(flag==0)\n\t{\n\t\tstruct linkpointer *temp;\n\t\tstruct info *parent;\n\t\tparent=n->parent;\n\t\tfor(int i=0;i<parent->link;i++)\n {\n if((parent->infopointer[i])->rid==n->rid)\n {\n temp=parent->l[i];\n\t\t\t\tbreak;\n }\n }\n\t\tparent=n->parent;\n\t\twhile(parent->rid!=routerid)\n\t\t{\n\t\t\tn=parent;\n\t\t\tparent=n->parent;\n\t\t}\n\t\tstruct linkpointer *l=NULL;\n\t\tfor(int i=0;i<parent->link;i++)\n\t\t{\n\t\t\tif((parent->infopointer[i])->rid==n->rid)\n\t\t\t{\n\t\t\t\tl=parent->l[i];\n\t\t\t\tbreak;\n\t\t\t}\n\t\t}\n\t\tstruct record *r=(struct record *)malloc(sizeof(struct record));\n\t\tr->interface=l->interface;\n\t\tr->neigh=l->neigh;\n\t\tr->neighid=l->neighid;\n\t\tr->netmask=temp->netmask2;\n\t\tr->networkid=temp->neigh;\n\t\ttable.push_back(r);\n\t}\n\tvisited[index]=1;\n}\nint main()\n{\n\tFILE *fp;\n\tif(fp=fopen(\"FloodingDataBase.bin\",\"r+b\")) {\n\t} else return 0;\n\tfloodingheader *temp=(struct floodingheader *)malloc(sizeof(struct floodingheader));\n\twhile(1)\n\t{\n\t\tri.clear();\n\t\tir.clear();\n\t\tv.clear();\n\t\ttable.clear();\n\t\tsystem(\"clear\");\n\t\tstruct linkpointer *a[30][30];\n\t\tfor(int i=1;i<30;i++)\n\t\t{\n\t\t\tfor(int j=1;j<30;j++)\n\t\t\t{\n\t\t\t\ta[i][j]=NULL;\n\t\t\t}\n\t\t}\n\t\tfor(int i=1;i<30;i++) visited[i]=0;\n\t\tint i=1;\n\t\tfseek(fp,0,SEEK_SET);\n\t\twhile (fread(temp,sizeof(struct floodingheader),1,fp)) {\n\t\t\t//cout<<\"For \"<<temp->rid1<<\" \"<<temp->rid2<<endl;\n\t\t\tif(temp->status==0) continue;\n\t\t\tif(!ri[temp->rid1])\n\t\t\t{\n\t\t\t\tri[temp->rid1]=i;\n\t\t\t\tir[i]=temp->rid1;\n\t\t\t\ti++;\n\t\t\t\t\n\t\t\t}\n\t\t\tif(!ri[temp->rid2])\n {\n ri[temp->rid2]=i;\n ir[i]=temp->rid2;\n i++;\n }\n\t\t\tif(!(temp->n1==temp->rid1&&temp->rid1==temp->n2))\n\t\t\t{\n\t\t\t\tstruct linkpointer *lp=(struct linkpointer *)malloc(sizeof(struct linkpointer));\n\t\t\t\tlp->interface=temp->n1;\n\t\t\t\tlp->neigh=temp->n2;\n\t\t\t\tlp->neighid=temp->rid2;\t\n\t\t\t\tlp->netmask1=temp->mask1;\n\t\t\t\tlp->netmask2=temp->mask2;\n\t\t\t\ta[ri[temp->rid1]][ri[temp->rid2]]=lp;\n\t\t\t\t//cout<<\" \"<<temp->rid1<<\" -> \"<<temp->rid2<<endl;\n\t\t\t}\n\t\t\tif(!(temp->n2==temp->rid2&&temp->rid2==temp->n1))\n {\n\t\t\t\tstruct linkpointer *lp=(struct linkpointer *)malloc(sizeof(struct linkpointer));\n lp->interface=temp->n2;\n lp->neigh=temp->n1;\n lp->neighid=temp->rid1;\n\t\t\t\tlp->netmask1=temp->mask2;\n lp->netmask2=temp->mask1;\n a[ri[temp->rid2]][ri[temp->rid1]]=lp;\n\t\t\t\t//cout<<\" \"<<temp->rid2<<\" -> \"<<temp->rid1<<endl;\n\n }\n\t\t}\n\t\tmap<unsigned int,int>::iterator rit;\n\t\tint index=ri[routerid];\n\t\tstruct info *n=(struct info *)malloc(sizeof(struct info));\n\t\tn->rid=routerid;\n\t\tn->link=0;\n\t\tv.push_back(n);\n\t\tn->parent=NULL;\n\t\twhile(v.size()!=0) bfs(a);\n\t\tshow_table();\n\t}\n\treturn 0;\n}\n\t\n"
},
{
"alpha_fraction": 0.49113231897354126,
"alphanum_fraction": 0.5143246650695801,
"avg_line_length": 18.810810089111328,
"blob_id": "947be8cc2c7bf928df943d1c995b887acddea232",
"content_id": "28f7b73670273e55805fceec106effc6302b022f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 733,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 37,
"path": "/php/basic/5_arrays.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<html>\n <head>\n <title> Arrays in PHP </title>\n </head>\n\n <body>\n <h1><center><u> Arrays in PHP </u></center></h1>\n \n <ul>\n <li> Normal Arrays (Index based) </li>\n <li> Associative Arrays (Key-value pairs) </li>\n </ul>\n <hr>\n\n <pre>\n <?php\n $array1 = array(\"Quick\", 2, array(\"brown\", \"fox\"), 4.69);\n echo $array1[0] . \"<hr>\"; // Quick\n echo \"Normal \" . $array1[2] . \"<hr>\"; // Prints just 'Array' not its contents\n \n print_r($array1);\n echo \"<hr>\";\n ?>\n\n <?php\n echo \"Associative \";\n $array2 = array(\"first_name\" => \"Sachin\", \"last_name\" => \"tendulkar\", \"age\" => 40);\n print_r($array2);\n echo \"<hr>\";\n\n // Access using Key\n echo $array2[\"age\"];\n ?>\n </pre>\n\n </body>\n</html>\n"
},
{
"alpha_fraction": 0.6515151262283325,
"alphanum_fraction": 0.6515151262283325,
"avg_line_length": 12.399999618530273,
"blob_id": "c1c7d914a4b3f3c7e787845de0295f63b9dbed95",
"content_id": "f6f7b6555cf8a1703f5546598a577a08d299eb1f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 66,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 5,
"path": "/node/cheerioTest.js",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "var cheerio = require('cheerio');\n\nexports.init = function() {\n\n};"
},
{
"alpha_fraction": 0.7433962225914001,
"alphanum_fraction": 0.7509434223175049,
"avg_line_length": 14.588234901428223,
"blob_id": "9b374dd6963c987dc9fc88f3d42c869cdfe1b55a",
"content_id": "eca3a9c1ef8659cfbc33908279805901fec0177a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 265,
"license_type": "no_license",
"max_line_length": 30,
"num_lines": 17,
"path": "/projects/minimal_ospf/Project_Final/helloheader.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "// 24 byte\nstruct helloheader\n{\n\tunsigned int type;\n\tunsigned int netmask;\n\tunsigned short hellointerval;\n\tunsigned char option;\n\tunsigned char priority;\n\tunsigned int deadinterval;\n\tint desig;\n\tint backup;\n};\nstruct neighbour\n{\n\tunsigned int neigh;\n\tchar temp;\n};\n"
},
{
"alpha_fraction": 0.5791304111480713,
"alphanum_fraction": 0.6034782528877258,
"avg_line_length": 17,
"blob_id": "54423ac51fd53fbc960edd37e0a78e15cfabfbbb",
"content_id": "0869be9958439ccf198cfca7ee3d0221912f54f2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 575,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 32,
"path": "/coding/codeforces/188_2a_evenodds.cpp",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "/**\n @Author: Sachin Jain\n @Program: 188_2_A Even_Odds\n @Link: http://codeforces.com/problemset/problem/318/A\n**/\n#include <iostream>\n#include <stdio.h>\n#include <string>\n#include <vector>\n#include <map>\n#include <list>\n#include <sstream>\n\n//Define all shortcut (Macros here)\n#define F(i,n) for (int i=0; i<n; i++)\n#define FF(i,a,n,c) for(int i=a; i<n; i+=c)\n\nusing namespace std;\n\nint main() {\n\n long long int n,k;\n cin >> n >> k;\n\n long long int halfN = (n+1)/2;\n\n long long int result = (k <= halfN) ? (2*k-1) : (2*(k - halfN));\n\n cout << result << endl;\n\n return 0;\n}"
},
{
"alpha_fraction": 0.4173228442668915,
"alphanum_fraction": 0.4330708682537079,
"avg_line_length": 8.84615421295166,
"blob_id": "2f0d91fbd01625bb8f21e7abaf19f41cdc252493",
"content_id": "12b5e37cfc9ffe5a3388b385c375a1ff984c9b15",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 127,
"license_type": "no_license",
"max_line_length": 36,
"num_lines": 13,
"path": "/php/basic/0_boilerplate.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<html>\n <head>\n <title> </title>\n </head>\n\n <body>\n <h1><center><u> </u></center></h1>\n\n <?php\n ?>\n\n </body>\n</html>"
},
{
"alpha_fraction": 0.7516778707504272,
"alphanum_fraction": 0.7516778707504272,
"avg_line_length": 36.25,
"blob_id": "bb81e160cd67749c751dfbeac7565624e223e405",
"content_id": "9a33447a500797a5c9fc6b6960d2a5ed3b1065a9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 149,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 4,
"path": "/README.md",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "sandbox\n=======\n\nSandbox is just an environment where I play with various technologies. It is just a box where I am gonna put all my learning steps.\n"
},
{
"alpha_fraction": 0.6517199277877808,
"alphanum_fraction": 0.6615478992462158,
"avg_line_length": 26.100000381469727,
"blob_id": "94339402b36ed2848c36b88f79c58908ee311d27",
"content_id": "1c3741de98fc9a755af516927344da5f870396c8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1628,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 60,
"path": "/projects/minimal_ospf/Project_Final/Print.cpp",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n#include <stdio.h>\n#include<netinet/in.h>\n#include<errno.h>\n#include<netdb.h>\n#include<netinet/ip_icmp.h> \n#include<netinet/udp.h> \n#include<netinet/tcp.h> \n#include<netinet/ip.h> \n#include<sys/socket.h>\n#include<arpa/inet.h>\n#include<sys/ioctl.h>\n#include<sys/time.h>\n#include<sys/types.h>\n#include<unistd.h>\n#include<stdlib.h> \n#include<cstdlib>\n#include<time.h>\n#include<string.h>\n#include <pthread.h> \n#include \"floodingheader.h\"\n#include \"Global.h\"\n\nusing namespace std;\n\nint main()\n{\n\tFILE *fp;\n\tif(fp=fopen(\"FloodingDataBase.bin\",\"r+b\")) {\n\t} else return 0;\n\tfloodingheader *temp=(struct floodingheader *)malloc(sizeof(struct floodingheader));\n\tstruct sockaddr_in source;\n\tmemset(&source, 0, sizeof(source));\n\t\n\twhile (fread(temp,sizeof(struct floodingheader),1,fp)) {\n\t\tmemset(&source, 0, sizeof(source));\n\t\tsource.sin_addr.s_addr =temp->rid1;\n\t\tcout<<inet_ntoa(source.sin_addr)<<\" \";\n\t\tmemset(&source, 0, sizeof(source));\t\t\n\t\tsource.sin_addr.s_addr = temp->n1;\n\t\tcout<<inet_ntoa(source.sin_addr)<<\" \";\n\t\tmemset(&source, 0, sizeof(source));\t\t\n\t\tsource.sin_addr.s_addr = temp->mask1;\t\t\n\t\tcout<<inet_ntoa(source.sin_addr)<<\" \";\n\n\t\tmemset(&source, 0, sizeof(source));\t\t\n\t\tsource.sin_addr.s_addr = temp->rid2;\n\t\tcout<<inet_ntoa(source.sin_addr)<<\" \";\n\t\tmemset(&source, 0, sizeof(source));\t\t\n\t\tsource.sin_addr.s_addr = temp->n2;\n\t\tcout<<inet_ntoa(source.sin_addr)<<\" \";\n\t\tmemset(&source, 0, sizeof(source));\t\t\n\t\tsource.sin_addr.s_addr = temp->mask2;\t\t\n\t\tcout<<inet_ntoa(source.sin_addr)<<\" \";\n\t\t\n\t\tcout<<temp->version<<\" \"<<temp->status<<endl<<endl<<endl<<endl;\n\t}\n\t\n\treturn 0;\n}\n\t\n"
},
{
"alpha_fraction": 0.681559681892395,
"alphanum_fraction": 0.6888708472251892,
"avg_line_length": 24.12244987487793,
"blob_id": "34904a555172122cd7b0e9d9734a80da56416f97",
"content_id": "ab0466cf4a9c1ef0f4b0e1ec3ce5d2d749717c49",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1231,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 49,
"path": "/projects/minimal_ospf/Project_Final/checkhellodatabase.cpp",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "#include<stdio.h>\n#include<netinet/in.h>\n#include<errno.h>\n#include<netdb.h>\n#include<stdio.h> \n#include<netinet/ip_icmp.h> \n#include<netinet/udp.h> \n#include<netinet/tcp.h> \n#include<netinet/ip.h> \n#include<sys/socket.h>\n#include<arpa/inet.h>\n#include<sys/ioctl.h>\n#include<sys/time.h>\n#include<sys/types.h>\n#include<unistd.h>\n#include<stdlib.h> \n#include<cstdlib>\n#include<time.h>\n#include<string.h>\n#include <pthread.h>\n#include <net/if.h>\n#include\"hellodatabase.h\"\n#include<iostream>\n#include<time.h>\n\nusing namespace std;\n\nint main()\n{\n\tFILE *f=fopen(\"hellodatabase.bin\",\"r+b\");\n\tstruct hellodatabase *data=(struct hellodatabase *)(malloc(sizeof(struct hellodatabase)));\n\tstruct sockaddr_in source;\n\tsource.sin_family=AF_INET;\n\tsource.sin_port=htons(2888);\n\tsource.sin_addr.s_addr=0;//(unsigned long)data->rid;\n\tmemset(&(source.sin_zero),'\\0',8);\n\twhile(fread(data,sizeof(struct hellodatabase),1,f))\n\t{\n\t\tsource.sin_addr.s_addr=data->rid;\n\t\tprintf(\"Rounter id : %u\\n\",data->rid);\n\t\tprintf(\"Router : %s\\n\",inet_ntoa(source.sin_addr));\n\t\tcout<<\"Time : \"<<data->time<<endl;\n\t\ttime_t t;\n\t\tt=time(NULL);\n\t\tcout<<\"Difference with current time \\n\"<<(long unsigned int)t-data->time<<endl;\n\t}\n\tfclose(f);\n\treturn 0;\n}\n"
},
{
"alpha_fraction": 0.6040462255477905,
"alphanum_fraction": 0.6608863472938538,
"avg_line_length": 27.08108139038086,
"blob_id": "5c6b1d77275543c60e23db7920da29c22382f600",
"content_id": "565b4940c8371c9281710408c553e4a8c1a1e1c7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1038,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 37,
"path": "/coding/codeforces/191_2c_magicFive.py",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "# Author: Sachin Jain\n# Problem: Magic Five\n# Link: http://codeforces.com/problemset/problem/327/C\n# Solution explained here: http://math.stackexchange.com/questions/447345/how-to-calculate-2mn-1-2n-1-bmod1097/447359\n\n# To compute 2^0n + 2^(n) + ... + 2^((m-1)n) mod bigPrime (Use GP to find this sum)\ndef numWaysToDivideNumber(n, m):\n bigPrime = 1000000007\n \n numerator = pow(2, m*n, bigPrime) - 1\n denominator = pow(2, n, bigPrime) - 1\n modularInverseOfDenominator = pow(denominator, bigPrime-2, bigPrime)\n\n sum = (numerator % bigPrime) * (modularInverseOfDenominator % bigPrime)\n sum %= bigPrime\n return sum\n\ndef main():\n number = raw_input()\n numSize = len(number)\n numCopies = input()\n bigPrime = 1000000007\n result = 0\n\n preComputedSum = numWaysToDivideNumber(numSize, numCopies)\n\n i = 0\n while (i < numSize):\n if (number[i] == '0' or number[i] == '5'):\n result = result + (preComputedSum * pow(2, i, bigPrime)) % bigPrime\n result %= bigPrime\n i += 1\n\n print result\n\nif (__name__ == \"__main__\"):\n main()"
},
{
"alpha_fraction": 0.75,
"alphanum_fraction": 0.75,
"avg_line_length": 31,
"blob_id": "fcf78532a22294ca48543b3df75186d3a7b0a327",
"content_id": "4c055a852036dddd270e7f3c44f77e64a079b2cb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 32,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 1,
"path": "/projects/minimal_ospf/Project_Final/route_script.sh",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "sudo sysctl -p /etc/sysctl.conf\n"
},
{
"alpha_fraction": 0.4527813792228699,
"alphanum_fraction": 0.491591215133667,
"avg_line_length": 22.42424201965332,
"blob_id": "84d84929329156ce75c92610eee7374a806fe129",
"content_id": "1200a87648648cf92f0d61fc432416934c1ae4b1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 773,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 33,
"path": "/php/basic/6_array_functions.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<html>\n <head>\n <title> Array Functions in PHP </title>\n </head>\n\n <body>\n <h1><center><u>Array Functions in PHP </u></center></h1>\n\n <pre><?php\n $array1 = array(6,3,1,9,2,0,7);\n $array2 = array(2, \"foo\", \"bar\", 3.14, array(3,5));\n\n print_r($array1);\n echo \"<br>\";\n print_r($array2);\n echo \"<hr>\";\n ?></pre>\n\n <?php\n echo \"Size of array:\" . count($array1) . \" \" . count($array2) . \"<hr>\";\n echo \"Minimum:\" . min($array1) . \" \" . min($array2) . \"<hr>\";\n echo \"Maximum:\" . max($array1) . \" \" . max($array2) . \"<hr>\";\n echo \"Sort:\" . \"<br>\";\n sort($array1);\n print_r($array1);\n echo \"<br>\";\n sort($array2);\n print_r($array2);\n echo \"<hr>\";\n echo \"Implode using (,):\" . implode(\",\", $array1);\n ?>\n </body>\n</html>\n"
},
{
"alpha_fraction": 0.6558773517608643,
"alphanum_fraction": 0.6592844724655151,
"avg_line_length": 27,
"blob_id": "92cdcf53f8b3bdba046f49546d25615db096ff08",
"content_id": "b4b6254451e7392807c7618af5bb856800f47da1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 587,
"license_type": "permissive",
"max_line_length": 94,
"num_lines": 21,
"path": "/boilerplates/chrome_bp/src/background/background.js",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "// http://ajax.googleapis.com/ajax/libs/jquery/1.8/jquery.js\n// http://documentcloud.github.io/underscore/underscore.js\n\nchrome.webRequest.onBeforeRequest.addListener(\n function(details) {\n if (details.url === 'http://www.google.com/') {\n console.log('Redirection Successful');\n return {redirectUrl: 'http://cricket.yahoo.com'};\n }\n },\n {\n urls: [\"<all_urls>\"]\n },\n [\"blocking\"]\n);\n\nchrome.browserAction.onClicked.addListener(function () {\n chrome.tabs.create({'url': chrome.extension.getURL('src/pages/index.html')}, function(tab) {\n // Tab opened.\n });\n});"
},
{
"alpha_fraction": 0.578786313533783,
"alphanum_fraction": 0.5803161859512329,
"avg_line_length": 22.926828384399414,
"blob_id": "e6388e81e315b55114f53266ce81a3e97216a1ce",
"content_id": "f3382ac1d33f4e57a84b94152cd6bfd78c623870",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1961,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 82,
"path": "/php/basic/15_db_interacton_PDO.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<?php\n /** Using PDO PHP Data Objects to access the daspacespacease\n Good Source: http://net.tutsplus.com/tutorials/php/why-you-should-be-using-phps-pdo-for-daspacespacease-access/\n **/\n\n function listAvailablePDO() {\n echo \"Displaying List of available PDO Drivers in your system\" . \"<br />\";\n print_r(PDO::getAvailableDrivers());\n echo \"<hr>\";\n }\n\n function getConnection() {\n $host = \"localhost\";\n $user = \"sachin\";\n $pass = \"sachin\";\n\n $db = \"widget_corp\";\n $dbh = new PDO(\"mysql:host=$host;dbname=$db\", $user, $pass);\n $dbh->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION);\n return $dbh;\n }\n\n function closeConnection($dhb) {\n $dbh = null;\n }\n\n function printTable() {\n $dbh = getConnection();\n $query = \"SELECT * FROM subjects\";\n $sth = $dbh->prepare($query);\n $sth->execute();\n $result = $sth->fetchAll(PDO::FETCH_ASSOC);\n echo \"select * from subjects\" . \"<br />\";\n \n for ($i = 0; $i < count($result); $i++) {\n echo $result[$i]['id'] . \". \" . $result[$i]['menu_name'];\n echo \"<br />\";\n }\n echo \"<hr>\";\n }\n\n function insertRow() {\n $dbh = getConnection();\n\n // Inserting Row via UnNamed placeholder using Array\n $record = array('PDO Introduction', '6', '1');\n $query = \"INSERT INTO subjects(menu_name, position, visible) VALUES (?,?,?)\";\n $sth = $dbh->prepare($query);\n $sth->execute($record);\n\n echo $query . \"<br>\";\n print_r($record);\n echo \"<hr />\";\n }\n\n function deleteRow() {\n $dbh = getConnection();\n $menu_name = \"PDO Introduction\";\n $query = \"DELETE FROM subjects where menu_name=:menuName\"; // Named placeholder \n $sth = $dbh->prepare($query);\n $sth->bindParam(\":menuName\", $menu_name);\n $sth->execute();\n echo $query . \"<br>\";\n echo \"<hr>\";\n }\n?>\n\n<?php\n /* Score for driver code */\n listAvailablePDO();\n\n printTable();\n\n insertRow();\n\n printTable();\n\n deleteRow();\n\n printTable();\n\n?>"
},
{
"alpha_fraction": 0.6001390814781189,
"alphanum_fraction": 0.6216968297958374,
"avg_line_length": 22.064516067504883,
"blob_id": "a2b8adc7c41179f7b294acc57319fda110a362d4",
"content_id": "e70c2594749d0ee403ea6c0a8cc69c9d61fe9553",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1438,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 62,
"path": "/projects/minimal_ospf/Project_Final/exchangeonreceive.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "void *exchangeonreceive(void *argc)\n{\n\tprintf(\"Exchange On receive CALLED\\n\");\n\tint size=abc;\n\tint j=j123;\n\tFILE *fp;\n\tunsigned char *buffer=(unsigned char *)malloc(size);\n\tfor(int i=0;i<size;i++)\n\t{\n\t\tbuffer[i]=buf123[i];\n\t}\n\tint offset=sizeof(struct iphdr)+sizeof(ospfheader)+sizeof(exchangeheader);\n\tstruct floodingheader temp,*flood;\n\twhile (floodlock==1) {\n\t\tprintf(\"Exchange on Receieve sleeping\\n\"); sleep(2);\n\t}\t\n\tif(fp=fopen(FloodDb,\"r+b\")) {\n\t} else {\n\t\tprintf(\"ExchangeonRecv:::Error::Flood Database empty\\n\");\n\t\treturn NULL;\n\t}\n\t\n\tfloodlock=1;\n\tint flag=0;\n\twhile(offset<size)\n\t{\n\t\tflood=(struct floodingheader *)(buffer+offset);\n\t\tfseek(fp,0,SEEK_SET);\n\t\tflag=0;\n\t\twhile(fread(&temp,sizeof(temp),1,fp))\n\t\t{\n\t\t\tif(((temp.n1==flood->n1) && (temp.n2==flood->n2)) || ((temp.n1==flood->n2) && (temp.n2==flood->n1))) {\n\t\t\t\tflag=1;\n\t\t\t\tif(temp.version<flood->version) {\n\t\t\t\t\tfseek(fp,-1*sizeof(temp),SEEK_CUR);\n\t\t\t\t\tfwrite(flood,sizeof(temp),1,fp);\n\t\t\t\t\tfor(int i=0;i<interfaces;i++)\n\t\t\t\t\t{\n\t\t\t\t\t\tif(i==j) continue;\n\t\t\t\t\t\telse broadcast(flood,inet_addr(interface[i]));\n\t\t\t\t\t}\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif(flag==0)\n\t\t{\n\t\t\tfseek(fp,0,SEEK_END);\n\t\t\tfwrite(flood,sizeof(temp),1,fp);\n\t\t\tfor(int i=0;i<interfaces;i++)\n\t\t\t{\n\t\t\t\tif(i==j) continue;\n\t\t\t\telse broadcast(flood,inet_addr(interface[i]));\n\t\t\t}\n\t\t}\n\t\toffset=offset+sizeof(struct floodingheader);\n\t}\n\tfloodlock=0;\n\tfclose(fp);\n\tprintf(\"Exchange DONE\\n\");\n\treturn NULL;\n}\n\t\n\t\t\t\n\t\n"
},
{
"alpha_fraction": 0.5707376003265381,
"alphanum_fraction": 0.5846433043479919,
"avg_line_length": 24.84375,
"blob_id": "192b781e2dffc51c282f653fb6e81e5ed3174fd2",
"content_id": "7c27e45003786dfcee1857df2db117521682c02d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1654,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 64,
"path": "/projects/minimal_ospf/Project_Final/hellodead.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "void *hellodead(void *argv)\n{\t\n\tstruct hellodatabase data,temp;\n\twhile(1)\n\t{\n\t\tprintf(\"DEAD WORKING\\n\");\n\t\tsleep(deadinterval);\n\t\tprintf(\"\\n\\n\\nHello Dead Checking Started.............................\");\n\t\twhile(hellolock==1)\n\t\t{\n\t\t\tprintf(\"Sleeping\");\n\t\t\tsleep(1);\n\t\t}\n\t\thellolock=1;\n\t\tFILE *f;\n\t\tf=fopen(hellofilename,\"r\");\n \tif(f==NULL){\n \t continue;\n \t} else {\n\t\t\tfclose(f);\n\t\t\tfopen(hellofilename,\"r+w\");\n\t\t}\n\t\ttime_t t=time(NULL);\n\t\t\n\t\twhile(fread(&data,sizeof(struct hellodatabase),1,f))\n\t\t{\n\t\t\tlong unsigned int diff=t-data.time;\n\t\t\tif(diff>deadinterval)\n\t\t\t{\n\t\t\t\ttemp=data;\n\t\t\t\tint position=((int)ftell(f))-sizeof(struct hellodatabase);\n\t\t\t\tfseek(f,-sizeof(struct hellodatabase),SEEK_END);\n\t\t\t\tfread(&data,sizeof(struct hellodatabase),1,f);\n\t\t\t\tfseek(f,position,SEEK_SET);\n\t\t\t\tfwrite(&data,sizeof(struct hellodatabase),1,f);\n\t\t\t\tfseek(f,0,SEEK_END);\n\t\t\t\tint deletion=ftell(f);\n\t\t\t\tfflush(f);\t\t\t\t\n\t\t\t\tfclose(f);\n\t\t\t\tif(truncate(hellofilename,deletion-sizeof(struct hellodatabase))==-1){\n\t\t\t\t\tprintf(\"Problem in deletion\\n\");\n\t\t\t\t\t//exit(0);\n\t\t\t\t} else {\n\t\t\t\t\n\t\t\t\t\tstruct floodingheader f1;\n\t\t\t\t\tf1.n2=temp.interface;\n\t\t\t\t\tf1.n1=inet_addr(interface[checkinterface(temp.interface)]);\n\t\t\t\t\tf1.status=0;\n\t\t\t\t\tcout<<\"Record to be deleted is:\"<<f1.n1<<\" \"<<f1.n2<<endl;\n\t\t\t\t\tReceivefromHello(&f1);\n\t\t\t\t\tprintf(\"Record Deleted\");\n\t\t\t\t}\n\t\t\t\tf=fopen(hellofilename,\"r+w\");\n\t\t\t\tfseek(f,position,SEEK_SET);\n\t\t\t\t//////////////////////////Sachin Node delete call ( Data stored in temp )//////////////////////////////////////\n\t\t\t}\n\t\t}\n\t\n\t\tfclose(f);\n\t\thellolock=0;\n\t\tprintf(\"Hello Dead Checking Completed\\n\\n\\n\");\n\t}\n\treturn NULL;\n}\n"
},
{
"alpha_fraction": 0.5926948189735413,
"alphanum_fraction": 0.6154173016548157,
"avg_line_length": 25.576271057128906,
"blob_id": "ed950d90d3257945d158d2852e82c8818b49eadc",
"content_id": "d6c20015ec010c4ee8c85bce382ab4e90aaf71f7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4709,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 177,
"path": "/projects/minimal_ospf/Project_Final/main.cpp",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "#include<netinet/in.h>\n#include<errno.h>\n#include<netdb.h>\n#include<iostream>\n#include<stdio.h> \n#include<netinet/ip_icmp.h> \n#include<netinet/udp.h> \n#include<netinet/tcp.h> \n#include<netinet/ip.h> \n#include<sys/socket.h>\n#include<arpa/inet.h>\n#include<sys/ioctl.h>\n#include<sys/time.h>\n#include<sys/types.h>\n#include<unistd.h>\n#include<stdlib.h> \n#include<cstdlib>\n#include<time.h>\n#include<string.h>\n#include <pthread.h>\n#include <net/if.h>\n\nusing namespace std;\n#include \"ospfheader.h\"\n//#include \"unp.h\"\n//#include \"unpifi.h\"\n\n#include \"Global.h\"\nvoid addnetworkinfo();\n\n//////////////////////////////Interface Information/////////////////////////////////////////\n\n////////////////////////////////////////////////////////////////////////////////////\n#include \"floodingheader.h\"\n//#include \"exchangedatabase.h\"\n#include \"exchangeheader.h\"\n//#include \"info.h\"\n//#include \"exchangeunicast.h\"\nvoid *broadcast(struct floodingheader temp,unsigned int MyinterfaceIP);\n\n#include \"exchangemain.h\"\n#include \"selectimplementation.h\"\n#include \"floodbroadcast.h\"\n#include \"ReceivefromHello.h\"\n#include \"hello.h\" \n#include \"ReceivefromMain.h\"\n#include \"exchangeonreceive.h\"\nvoid addnetworkinfo();\n\nint main(int n,char *s[20])\n{\n/////////////////////////////Setting up interfaces by command line//////////////////////////////////////\n\tif(n==1){\n\t\tprintf(\"Usage : ./program interface1_ip interface1_netmask interface2_ip interface2_netmask........\\n\");\n\t\treturn 0;\n\t} \n\tn--;\n\tinterfaces=n/3;\n\tint j=0;\n\tint counter1=0;\n\tint counter2=0;\n\tchar *temporary;\n\tfor(int i=0;i<interfaces;i++)\n\t{\n\t\ttemporary=(char *)malloc(strlen(s[j+1]+1));\n\t\ttemporary[strlen(s[j+1])]='\\0';\n\t\tstrncpy(temporary,s[j+1],strlen(s[j+1]));\n\t\t//printf(\"Temporary %s %s\\n\",s[j+1],temporary);\t\t\n\t\tj++;\n\t\tif(strcmp(temporary,\"0\")==0)\n\t\t{\t\t\t\t\n\t\t\tinterface[counter1]=(char *)malloc(strlen(s[j+1])+1);\n\t\t\tinterface[counter1][strlen(s[j+1])]='\\0';\n\t\t\tstrncpy(interface[counter1],s[j+1],strlen(s[j+1]));\n\t\t\tj++;\n\t\t\tnetmask[counter1]=(char *)malloc(strlen(s[j+1])+1);\n\t\t\tnetmask[counter1][strlen(s[j+1])]='\\0';\n\t\t\tstrncpy(netmask[counter1],s[j+1],strlen(s[j+1]));\n\t\t\tj++;\n\t\t\tcounter1++;\n\t\t} else {\t\n\t\t\tnetworkip[counter2]=(char *)malloc(strlen(s[j+1])+1);\n\t\t\tnetworkip[counter2][strlen(s[j+1])]='\\0';\n\t\t\tstrncpy(networkip[counter2],s[j+1],strlen(s[j+1]));\n\t\t\tj++;\n\t\t\tnetworkmask[counter2]=(char *)malloc(strlen(s[j+1])+1);\n\t\t\tnetworkmask[counter2][strlen(s[j+1])]='\\0';\n\t\t\tstrncpy(networkmask[counter2],s[j+1],strlen(s[j+1]));\n\t\t\tj++;\n\t\t\tcounter2++;\n\t\t}\n\t\tfree(temporary);\t\t\t\t\t\n\t}\n\tnetworklink=counter2;\n\tinterfaces=counter1;\n\tprintf(\"interfaces %d networklink %d\\n\",interfaces,networklink);\n\tprintf(\"Our Router Id %u \\n\",routerid);\n\taddnetworkinfo();\n////////////////////////////////////////////////////////////////////////////////////////////////////////\t\n\t\t\n///////////////////////Thread for Broadcasting/////////////////////////////\n\tpthread_t hbroadcast,hdead,pth;\n\t\n\tpthread_create(&hbroadcast,NULL,broadcast,asd);\n\tpthread_create(&hdead,NULL,hellodead,asd);\n\tpthread_create(&pth,NULL,selectcode,asd);\t\t\n\tint saddr_size;\n\tstruct sockaddr_in saddr,source;\n\tstruct in_addr in;\n\t \n\tprintf(\"Starting...\\n\");\n\tint sock_raw = socket(AF_INET , SOCK_RAW , usedprotocol);\n\tif(sock_raw < 0)\n\t{\n\t\tprintf(\"Socket Errorn\");\n\t\treturn 1;\n\t}\n\tsource.sin_family=AF_INET;\n\tsource.sin_port=htons(2888);\n\tsource.sin_addr.s_addr=INADDR_BROADCAST;\n\tmemset(&(source.sin_zero),'\\0',8);\n\tbind(sock_raw,(struct sockaddr *)&source,sizeof(source));\n\twhile(1)\n\t{\n\t\tsaddr_size = sizeof saddr;\n\t\tdata_size =recvfrom(sock_raw , buffer123 , 6553 , 0 ,(struct sockaddr *)&saddr ,(socklen_t *)&saddr_size);\n\t\tif(data_size <0 )\n\t\t{\n\t\t\tprintf(\"Recvfrom error , failed to get packetsn\");\n\t\t\treturn 1;\n\t\t}\n\t\tstruct ospfheader *ospf=(struct ospfheader *)(buffer123+sizeof(struct iphdr));\n\t\tpthread_t pths;\t\n\t\tif(ospf->type==1)\n\t\t{ \n\t\t\tpthread_create(&pths,NULL,onReceiveHello,asd);\n\t\t} else if(ospf->type==4) {\n\t\t\tprintf(\"Flood Packet Received\\n\\n\");\t\t\t\n\t\t\tpthread_create(&pths,NULL,onReceiveFlood,asd);\n\t\t}\n\t\tsleep(1);\n\t}\n\tclose(sock_raw);\n\tprintf(\"Finished\");\n\treturn 0;\n} \n\nvoid addnetworkinfo()\n{\n\t\n\tFILE *fp;\n\twhile (floodlock==1) { printf(\"Adding Network Link Sleeping\\n\"); sleep(2); }\n\tfloodlock=1;\n\tif(fp=fopen(FloodDb,\"r+b\")) {\n\t\tfclose(fp);\n\t\tfloodlock=0;\n\t\treturn;\n\t} else {\n\t\tfp=fopen(FloodDb,\"w+b\");\n\t}\n\tstruct floodingheader temp;\n\tfor(int i=0;i<networklink;i++)\n\t{\n\t\ttemp.rid1=routerid;\n\t\ttemp.n1=inet_addr(networkip[i]);\n\t\ttemp.mask1=inet_addr(networkmask[i]);\n\t\ttemp.rid2=inet_addr(networkip[i]);\n\t\ttemp.n2=temp.n1;\n\t\ttemp.mask2=temp.mask1;\n\t\ttemp.status=1;\n\t\ttemp.version=0;\n\t\tfwrite(&temp,sizeof(temp),1,fp);\n\t}\n\tfloodlock=0;\n\tfclose(fp);\n\treturn;\t\n} \n \n \n"
},
{
"alpha_fraction": 0.6322969794273376,
"alphanum_fraction": 0.6500634551048279,
"avg_line_length": 25.478992462158203,
"blob_id": "8884c011553c74eedabc01005bf66874a8b42fdd",
"content_id": "0555a728364ae758e905b6fe573841246b99668f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 3152,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 119,
"path": "/projects/minimal_ospf/Project_Final/ReceivefromMain.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "void ReceivefromMain(struct floodingheader *New,int ReceivedInterface)\n{\n\tint flag=0;\n\tFILE *fp;\n\twhile (floodlock==1){\n\t\tsleep(2);\n\t}\n\tfloodlock=1;\n\tif(fp=fopen(FloodDb,\"r+b\")) {\n\t} else {\n\t\tprintf(\"ERROR in W+B MAIN\\n\");\n\t\tfp=fopen(FloodDb,\"w+b\");\n\t}\n\t\n\tstruct floodingheader temp;\n\tprintf(\"------------Flooding Packet Received-------------\\n\");\n\twhile(fread(&temp,sizeof(temp),1,fp)) {\n\t\tif(((temp.n1==New->n1) && (temp.n2==New->n2)) || ((temp.n1==New->n2) && (temp.n2==New->n1))) {\n\t\t\tprintf(\"---------Flood Packet Received and DataBase Updated-----\\n\");\n\t\t\tflag=1; //Record exist in database\n\t\t\tif (New->version==temp.version) { cout<<\"Record with same version found\\n\"; floodlock=0; fclose(fp); return; }\n\t\t\tif (New->version<temp.version) {\n\t\t\t\t// Write code to send temp on the received interface\n\t\t\t\tfloodlock=0;\n\t\t\t\tfclose(fp);\t\t\t\t\n\t\t\t\tbroadcast(&temp,ReceivedInterface);\n\t\t\t\tsleep(2);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tif (New->version>temp.version) {\n\t\t\t\ttemp.status=New->status;\n\t\t\t\ttemp.version=New->version;\n\t\t\t\tfseek(fp,-1*sizeof(temp),SEEK_CUR);\n\t\t\t\tfwrite(&temp,sizeof(temp),1,fp);\n\t\t\t\tfclose(fp);\n\t\t\t\tfloodlock=0;\n\t\t\t\t// broadcast this now except on received interface\n\t\t\t\tfor(int i=0;i<interfaces;i++) {\n\t\t\t\t\tif(inet_addr(interface[i])==ReceivedInterface) continue;\n\t\t\t\t\telse { broadcast(&temp,inet_addr(interface[i])); sleep(2); }\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\t}\n\t// When record does not exist in database\n\tif(flag==0) {\n\t\tfseek(fp,0,SEEK_END);\n\t\tfwrite(New,sizeof(struct floodingheader),1,fp);\n\t\tprintf(\"---------------Flooding Record received and Record added in the database-----\\n\");\n\t\t// Now Broadcasting this\n\t\tfloodlock=0;\n\t\tfclose(fp);\n\t\tfor(int i=0;i<interfaces;i++) {\n\t\t\tif(inet_addr(interface[i])==ReceivedInterface) continue;\n\t\t\telse {broadcast(New,inet_addr(interface[i])); sleep(2);}\n\t\t}\n\t}\n}\n\nvoid *onReceiveFlood(void *argv)\n{\n\tunsigned char *buffer=(unsigned char *)malloc(sizeof(unsigned char)*data_size);\n\tint size=data_size;\n\tfor(int i=0;i<data_size;i++)\n\t{\n\t\tbuffer[i]=buffer123[i];\n\t}\n\tstruct iphdr *ip=(struct iphdr *)buffer;\t\n\tif(!correctdestination(ip)){\n\t\t\t\t\n\t\treturn NULL;\t\n\t}\n\tip->tot_len=size;\t\n\tif(ip->tot_len<(sizeof(struct iphdr)+sizeof(struct ospfheader))){\n\t\tprintf(\"Ospf header doesn't exist \\n\");\n\t\t\n\t\treturn NULL;\n\t}\n\tstruct ospfheader *ospf=(struct ospfheader *)(buffer+sizeof(struct iphdr));\n\t\t\n\tif(ospf->auth1!=1234)\n\t{\n\t\tprintf(\"Authentication Error \\n\");\n\t\t\n\t\treturn NULL;\n\t}\t\n\tif((int)ospf->type!=4)\n\t{\n\t\t\n\t\treturn NULL;\n\t}\n\tif(ip->tot_len<=sizeof(struct iphdr)+sizeof(struct ospfheader)){\n\t\tprintf(\"No Flood header is there\\n\");\n\t\t\n\t\treturn NULL;\n\t}\n\tstruct floodingheader *flood=(struct floodingheader *)(buffer+sizeof(struct iphdr)+sizeof(struct ospfheader));\n\t\n\tstruct floodingheader temp;\n\n\ttemp.rid1=flood->rid1;\n\ttemp.n1=flood->n1;\n\ttemp.mask1=flood->mask1;\n\ttemp.rid2=flood->rid2;\n\ttemp.n2=flood->n2;\n\ttemp.mask2=flood->mask2;\n\ttemp.status=flood->status;\n\ttemp.version=flood->version;\n//\tcout<<temp.mask1<<\" \"<<temp.mask2<<endl;\n\tunsigned int ip1=ip->saddr;\n\tint index=checkinterface(ip1);\n\tip1=inet_addr(interface[index]);\n\t//free(buffer);\n\tReceivefromMain(&temp,ip1);\n\t\t\n\treturn NULL;\n}\n\n"
},
{
"alpha_fraction": 0.5797101259231567,
"alphanum_fraction": 0.5797101259231567,
"avg_line_length": 33.5,
"blob_id": "85802f8562ec717bfc40a3ec22c41b75d5f2dd6d",
"content_id": "87bc74f1c9de8773fbc5306007d2033eb3e3b4ef",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 138,
"license_type": "permissive",
"max_line_length": 60,
"num_lines": 4,
"path": "/boilerplates/chrome_bp/README.md",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "Requestly (Work In Progress Project) \n====================================\n\nRequestly - A Chrome extension to play with network requests\n"
},
{
"alpha_fraction": 0.3451327383518219,
"alphanum_fraction": 0.7079645991325378,
"avg_line_length": 36.66666793823242,
"blob_id": "65079d45d669d6707fa04094f966654c62bee741",
"content_id": "782222de38a3e2ea498d10c581edf4120e0ee333",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 113,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 3,
"path": "/projects/minimal_ospf/Project_Final/script.sh",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "clear\nc++ main.cpp -pthread -o routing\nsudo ./routing 0 172.21.1.124 255.255.252.0 0 192.168.0.123 255.255.255.0\n"
},
{
"alpha_fraction": 0.6896551847457886,
"alphanum_fraction": 0.7586206793785095,
"avg_line_length": 57,
"blob_id": "24c09ebf297869a3894c1e585cb24499b464fe26",
"content_id": "15b7475fea882ca0519d00b1544c2f664931406a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 58,
"license_type": "permissive",
"max_line_length": 57,
"num_lines": 1,
"path": "/boilerplates/chrome_bp/docs/ReadMe.txt",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "Icon Source - http://www.rw-designer.com/icon-detail/7209\n"
},
{
"alpha_fraction": 0.522857129573822,
"alphanum_fraction": 0.5285714268684387,
"avg_line_length": 14.909090995788574,
"blob_id": "b578f6f991e8a236d81d08de1c13a73a133fa05d",
"content_id": "fc56d62c08c77ae5ff4f96d8b2dc57f0bf10a33e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 350,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 22,
"path": "/php/basic/11_getcookie.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<html>\n <head>\n <title> Displaying Cookie </title>\n </head>\n\n <body>\n <h1><center><u>Displaying Cookie </u></center></h1>\n\n <?php\n print_r($_COOKIE);\n echo \"<hr>\"; \n\n if (isset($_COOKIE['userInfo'])) {\n echo $_COOKIE['userInfo'];\n } else {\n echo \"User information not present in cookie\";\n }\n\n ?>\n\n </body>\n</html>\n"
},
{
"alpha_fraction": 0.39729398488998413,
"alphanum_fraction": 0.4329643249511719,
"avg_line_length": 30.269229888916016,
"blob_id": "71ebd93d62dd42be8c180e2d61de01e233706126",
"content_id": "122cb04eb4392b57064209e6fd959926f5be9465",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 813,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 26,
"path": "/php/basic/4_numbers.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<html>\n <head>\n <title> Numbers in PHP </title>\n </head>\n\n <body>\n <h1><center><u> Using Numbers in PHP </u></center></h1>\n\n <!-- Variable scope -->\n <?php\n $pi = 22/7;\n echo \"Initial value of PI:\" . $pi . \"<hr>\";\n ?>\n\n <?php\n echo \"1. Round to 4 decimal places-----\" . round($pi, 4) . \"<br>\";\n echo \"2. Ceil of PI--------------------\" . ceil($pi) . \"<br>\";\n echo \"3. Floor of PI-------------------\" . floor($pi) . \"<br>\";\n echo \"4. Ceiling PI--------------------\" . ceil($pi) . \"<br>\";\n echo \"5. Absolute Value of (-1 * PI)---\" . abs(-1 * $pi) . \"<br>\";\n echo \"6. Random Number-----------------\" . rand() . \"<br>\";\n echo \"7. Random Number between (5,20)--\" . rand(5, 20) . \"<br>\";\n echo \"8. Modulo operator--(22%7)-------\" . fmod(22, 7) . \"<br>\";\n ?>\n </body>\n</html>\n"
},
{
"alpha_fraction": 0.707602322101593,
"alphanum_fraction": 0.7426900863647461,
"avg_line_length": 16.100000381469727,
"blob_id": "b45d814e992b8e164c84e1efe970896f50e70c0a",
"content_id": "a40a64c6d3cd8b646fe68105ff978629f9fb3211",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 171,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 10,
"path": "/projects/minimal_ospf/Project_Final/floodingheader.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "struct floodingheader{\n\tunsigned int rid1;\n\tunsigned int n1;\n\tunsigned int mask1;\n\tunsigned int rid2;\n\tunsigned int n2;\n\tunsigned int mask2;\n\tint status;\n\tint version;\n};\n"
},
{
"alpha_fraction": 0.5724743604660034,
"alphanum_fraction": 0.5812591314315796,
"avg_line_length": 22.55172348022461,
"blob_id": "fead4b503a050692d3a5af702941b8d26cf93247",
"content_id": "c4a6832957f1e348cec2ab107d23b50ce2411309",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 683,
"license_type": "no_license",
"max_line_length": 133,
"num_lines": 29,
"path": "/php/basic/8_scope.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<html>\n <head>\n <title> Global variables in PHP </title>\n </head>\n\n <body>\n <h1><center><u> Global Variables </u></center></h1>\n\n <?php\n // Declaring a global variable\n $foo = \"global\";\n echo \"Initial Value of global variable:\" . $foo . \"<hr>\";\n \n function updateGlobal() {\n $foo = \"Modified global\"; // Wrong: This actually creates a new variable with function scope and global variable is not touched\n }\n updateGlobal();\n echo \"Attempt1: \" . $foo . \"<hr>\";\n\n function updateGlobal2() {\n global $foo;\n $foo = \"Haha! modified global variable\";\n }\n updateGlobal2();\n echo \"Attempt2:\" . $foo . \"<hr>\";\n ?>\n\n </body>\n</html>\n"
},
{
"alpha_fraction": 0.6304348111152649,
"alphanum_fraction": 0.6304348111152649,
"avg_line_length": 22,
"blob_id": "f47f8e78f9b413041194691a5053852c5042362c",
"content_id": "7d9954552933ff92d92405d6a8c89a9ed9bd664e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 92,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 4,
"path": "/boilerplates/github_pages/README.md",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "blunderboy.github.io\n====================\n\nRepository for github pages for all my projects.\n"
},
{
"alpha_fraction": 0.7616580128669739,
"alphanum_fraction": 0.7772020697593689,
"avg_line_length": 47.25,
"blob_id": "a371042836d8127fc2f6ed567d5c436fdc725cf4",
"content_id": "d66ba6e8704f31b67c1add191b72cc1657801459",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 193,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 4,
"path": "/html5/fileApi/README.md",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "### webkitStorageInfo [Link to JSBIN](http://jsbin.com/omibih/1/edit)\n\n1. Requesting more space for usage in persistent/temporary storage\n2. Querying the amount of data used by current domain.\n"
},
{
"alpha_fraction": 0.48148149251937866,
"alphanum_fraction": 0.48148149251937866,
"avg_line_length": 26,
"blob_id": "abff62f06e2393e304642fe703dd8657bf3d6a87",
"content_id": "c68470a4fcac1dfba76881b7f991487ce9f91473",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 27,
"license_type": "permissive",
"max_line_length": 26,
"num_lines": 1,
"path": "/boilerplates/chrome_bp/src/pages/js/init.js",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "RQ.init({ el: '#rules' });\n"
},
{
"alpha_fraction": 0.5461538434028625,
"alphanum_fraction": 0.5538461804389954,
"avg_line_length": 12,
"blob_id": "146225bd5bde247c7c05b1ee404d25d06b71515a",
"content_id": "7c3842c353e403ce9710e5b9e563c542f4b5e9c9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 130,
"license_type": "no_license",
"max_line_length": 28,
"num_lines": 10,
"path": "/python/basic/00_bp.py",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\n# Author: Sachin Jain\n# Sample Program\n\ndef main():\n print('Hello! world')\n\nif (__name__ == \"__main__\"):\n main()\n"
},
{
"alpha_fraction": 0.4027777910232544,
"alphanum_fraction": 0.46064814925193787,
"avg_line_length": 17,
"blob_id": "dfc3f0a65f2343d33aa299258791a41954dc728b",
"content_id": "6b6dc9c7fb5d60703528e6114452d086695d76e0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 432,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 24,
"path": "/php/basic/2_strings.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<html>\n <head>\n <title> Strings in PHP </title>\n </head>\n\n <body>\n <h1><center><u> Strings in PHP </u></center></h1>\n\n <?php\n $var1 = 5;\n $var2 = 10;\n \n echo \"$var1 hello world\" . \"<hr>\"; // Output: 5 sachin\n \n echo \"$var1 + $var2\" . \"<hr>\"; // Output: 5 + 10\n \n echo \"{$var1} $var2\" . \"<hr>\"; // Output: 5 10\n \n $var3 = $var1 + $var2;\n echo \"$var3\" . \"<hr>\"; //Output: 15\n ?>\n\n </body>\n</html>\n"
},
{
"alpha_fraction": 0.5723270177841187,
"alphanum_fraction": 0.5894796848297119,
"avg_line_length": 27.20967674255371,
"blob_id": "80e6ab573c27156e463773cc362aadb80e1d2736",
"content_id": "7b13bfe7c2a8b21d1e39fe9db2e51fa481553f1d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 3498,
"license_type": "no_license",
"max_line_length": 124,
"num_lines": 124,
"path": "/projects/minimal_ospf/Project_Final/exchangemain.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "unsigned char *buildingexchangepacket(int &size,unsigned int my,unsigned int other)\n{\n\twhile(floodlock==1)\n\t{\n\t\tprintf(\"Exchange Packet Sleeping\\n\");\n\t\tsleep(1);\n\t}\n\tfloodlock=1;\n\n\tFILE *f;\n\tf=fopen(FloodDb,\"r\");\n if(f==NULL){\n\t\tfloodlock=0;\n\t\treturn NULL;\n }\n\t\n\tfseek(f,0,SEEK_END);\n\tint filesize=ftell(f);\n\tstruct floodingheader temp;\n\tfilesize=(filesize/(sizeof(struct floodingheader)));\n\tunsigned char *exchange;\n\tsize=sizeof(struct iphdr)+sizeof(struct ospfheader)+sizeof(struct exchangeheader)+(filesize)*sizeof(struct floodingheader);\n\texchange=(unsigned char *)malloc(size);\n\n//////////////////////////////////////ip header ////////////////////////////////////\n\t\n\tstruct iphdr *ip=(struct iphdr *)(exchange);\n\tip->ihl = 5;\n\tip->version = 4;\n\tip->tos = 0;\n\tip->id = htons(random());\n\tip->ttl = 0;\n\tip->protocol = usedprotocol;\n\tip->saddr = my;\n\tip->daddr = other;\n\tip->check = 0;\n\n/////////////////////////////////////////ospf header ////////////////////////////\n\n\tstruct ospfheader *ospf=(struct ospfheader *)(exchange+sizeof(struct iphdr));\n\tospf->version=2;\n\tospf->type=2; // Showing that it is Exchange packet\n\tospf->auth1=1234;\n\tospf->rid=routerid;\n\tospf->checksum=0;\n\tospf->atype=1;\n\n//////////////////////////////////////////hello header/////////////////////////////////\n\n\tstruct exchangeheader *ex=(struct exchangeheader *)(exchange+sizeof(struct iphdr)+sizeof(struct ospfheader));\n\tex->type=2;\n\tex->ddseq=globalddseq;\n\n//////////////////////////////////////////////////////////////////////////\n\n\tint pointer=sizeof(struct iphdr)+sizeof(struct ospfheader)+sizeof(struct exchangeheader);\n\tfseek(f,0,SEEK_SET);\n\tstruct floodingheader *flood;\n\tint i=0;\n\twhile(fread(&temp,sizeof(struct floodingheader),1,f))\n\t{ \n\t\tflood=(struct floodingheader *)(exchange+pointer);\n\t\tflood->rid1=temp.rid1;\t\n\t\tflood->n1=temp.n1;\n\t\tflood->mask1=temp.mask1;\n\t\tflood->rid2=temp.rid2;\n\t\tflood->n2=temp.n2;\n\t\tflood->mask2=temp.mask2;\n\t\tflood->status=temp.status;\n\t\tflood->version=temp.version;\n\t\tpointer+=sizeof(struct floodingheader);\n\t\ti++;\n\t\t\n\t}\n\tcout<<\"No of records in exchange database: \"<<i<<endl<<endl;\n\tfloodlock=0;\n\tfclose(f);\n\treturn exchange;\n}\n\nvoid exchangestart(unsigned int my,unsigned int other)\n{\n\tcout<<\"Exchangestart method called\\n\";\t\n\tint size=0;\n\tunsigned char *exchange=buildingexchangepacket(size,my,other);\t\t\n\tif(exchange==NULL) return;\n\tint sockfd=socket(AF_INET,SOCK_RAW,usedprotocol); \n\tif(sockfd < 0)\n\t{\n\t \tprintf(\"Socket Error\\n\");\n\t\treturn;\n\t}\n\tconst int on=1;\n\tif(setsockopt(sockfd,IPPROTO_IP,IP_HDRINCL,&on,sizeof(on))<0)\n\t{\n\t\tprintf(\"Error in setsocket\\n\");\t\t\t\t\n\t\treturn;\n\t}\t\n\n\t////////////////////////////// binding ///////////////////////////\n\tstruct sockaddr_in source,saddr;\n\n\tsource.sin_family=AF_INET;\n\tsource.sin_port=htons(1000);\n\tsource.sin_addr.s_addr=my;\n\tmemset(&(saddr.sin_zero), '\\0', 8);\n\tbind(sockfd,(struct sockaddr *)&source,sizeof(source));\n\n\tint saddr_size=sizeof(struct sockaddr);\n\tsaddr.sin_family=AF_INET;\n\tsaddr.sin_port=htons(1000);\n\tsaddr.sin_addr.s_addr=other;\n\tmemset(&(saddr.sin_zero), '\\0', 8);\n\t\n\tint data_size123=sendto(sockfd, exchange , size , 0 ,(struct sockaddr *)&saddr , saddr_size);\n\tif(data_size123 <0 )\n\t{\n\t\tprintf(\"send to error , failed to sent packetsn\");\n\t} else {\n\t\tprintf(\"EXCHANGE Packet Sent\\n\");\n\t}\n\tclose(sockfd);\n\treturn;\n}\n"
},
{
"alpha_fraction": 0.5367516279220581,
"alphanum_fraction": 0.5604623556137085,
"avg_line_length": 34.89361572265625,
"blob_id": "72e2632c93c716c0a1359cd34967512d8980fb1f",
"content_id": "0ec76de0ba29174b6db679b21b2df73ecc23a255",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 3374,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 94,
"path": "/projects/minimal_ospf/Project_Final/floodbroadcast.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "unsigned char *BuildFloodPacket(struct floodingheader *temp,int &size)\n{\n\tunsigned char *broadcast;\n\tsize=sizeof(struct iphdr)+sizeof(struct ospfheader)+sizeof(struct floodingheader);\n\tbroadcast=(unsigned char *)malloc(size);\n//////////////////////////////////////ip header ////////////////////////////////////\n\tstruct iphdr *ip=(struct iphdr *)(broadcast);\n\tip->ihl = 5;\n\tip->version = 4;\n\tip->tos = 0;\n\tip->id = htons(random());\n\tip->ttl = 0;\n\tip->protocol = usedprotocol;\n\tip->saddr = temp->n1;\n\tip->daddr = INADDR_BROADCAST;\n\t//ip->daddr =inet_addr(\"172.21.3.255\");\n\tip->check = 0;\n\n/////////////////////////////////////////ospf header ////////////////////////////\n\tstruct ospfheader *ospf=(struct ospfheader *)(broadcast+sizeof(struct iphdr));\n\tospf->version=2;\n\tospf->type=4; // Showing that it is a flood packet\n\tospf->auth1=1234;\n\tospf->rid=routerid;\n\tospf->checksum=0;\n\tospf->atype=1;\n//////////////////////////////////////////hello header/////////////////////////////////\n\tstruct floodingheader *flood=(struct floodingheader *)(broadcast+sizeof(struct iphdr)+sizeof(struct ospfheader));\n\tflood->rid1=temp->rid1;\t\n\tflood->n1=temp->n1;\n\tflood->mask1=temp->mask1;\n\tflood->rid2=temp->rid2;\n\tflood->n2=temp->n2;\n\tflood->mask2=temp->mask2;\n\tflood->status=temp->status;\n\tflood->version=temp->version;\n//////////////////////////////////////////////////////////////////////////\n\n\treturn broadcast;\n}\n\nvoid *broadcast(struct floodingheader *temp,unsigned int MyinterfaceIP)\n{\n\tstruct sockaddr_in source,saddr;\n\tint saddr_size,data_size64,size=0;\n\tunsigned char *broadcast=BuildFloodPacket(temp,size);\t\t\n\tstruct iphdr *ip=(struct iphdr *)(broadcast);\n\n\t////////////////////////////////broacdcasting////////////////////////////\t\t\n\tint sockfd=socket(AF_INET,SOCK_RAW,usedprotocol); \n\tif(sockfd < 0) {\n\t \tprintf(\"Socket Error\\n\");\t\n\t\treturn NULL;\n\t}\n\tconst int on=1;\n\tif(setsockopt(sockfd,IPPROTO_IP,IP_HDRINCL,&on,sizeof(on))<0) {\n\t\tprintf(\"Error in setsocket\\n\");\n\t\treturn NULL;\n\t}\n\t/////////////////////////////////\n\tint z;\n\tint so_broadcast;\n\tso_broadcast = 1;\n\tz = setsockopt(sockfd,SOL_SOCKET,SO_BROADCAST,&so_broadcast,sizeof so_broadcast);\n\tif ( z<0 ){\n\t\tperror(\"setsockopt broadcast error\");\n\t\t\n\t\treturn NULL;\n\t}\n\t/////////////////////////////// binding ///////////////////////////\n\tsource.sin_family=AF_INET;\n\tsource.sin_port=htons(1000);\n\tsource.sin_addr.s_addr=MyinterfaceIP;//inet_addr(\"172.19.5.231\");//INADDR_BROADCAST;\n\tmemset(&(saddr.sin_zero), '\\0', 8);\n\tbind(sockfd,(struct sockaddr *)&source,sizeof(source));\n\t\t\n\tip->saddr=MyinterfaceIP; ////ip source\t\t\t\n\tip->tot_len=size; //ip length\n\t\t\n\t/////////////////////////////broadcastin//////////////\t\t\t\n\tsaddr_size=sizeof(struct sockaddr);\n\tsaddr.sin_family=AF_INET;\n\tsaddr.sin_port=htons(1000);\n\tsaddr.sin_addr.s_addr=INADDR_BROADCAST;//inet_addr(\"172.19.5.231\");//INADDR_BROADCAST;\n\tmemset(&(saddr.sin_zero), '\\0', 8);\n\tdata_size64 = sendto(sockfd, broadcast , size , 0 ,(struct sockaddr *)&saddr , saddr_size);\n\tif(data_size64 <0 ) printf(\"sento error , failed to sent packetsn\");\n\telse printf(\"Packet Sent\\n\");\n\t\t\n\tclose(sockfd);\n\t//free(broadcast);\t\n\tprintf(\"Flooding Broadcasting Compeleted\\n\");\t\n\treturn NULL;\n}\n"
},
{
"alpha_fraction": 0.5384615659713745,
"alphanum_fraction": 0.5512820482254028,
"avg_line_length": 12,
"blob_id": "b2fb2c4959a6b1c9f0feca29650bfb6bd70a605a",
"content_id": "1753469df0082a0da1120a4e6c489653e45c29a5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 234,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 18,
"path": "/php/basic/13_headers.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<?php\n header(\"location: 0_html.php\");\n?>\n\n<html>\n <head>\n <title> Header Redirection </title>\n </head>\n\n <body>\n <h1><center><u> Header Redirection </u></center></h1>\n\n <?php\n echo \"Infinte loop\";\n ?>\n\n </body>\n</html>\n"
},
{
"alpha_fraction": 0.7611940503120422,
"alphanum_fraction": 0.7611940503120422,
"avg_line_length": 15.75,
"blob_id": "4049558b12ff7b347e7c2bfc39b461c64cc9b07e",
"content_id": "a7f5ae35f989bbc41d63179bf839bcb7700dac0b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 67,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 4,
"path": "/projects/minimal_ospf/Project_Final/exchangeheader.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "struct exchangeheader{\n\tunsigned int type;\n\tunsigned int ddseq;\n};\n"
},
{
"alpha_fraction": 0.7117117047309875,
"alphanum_fraction": 0.7297297120094299,
"avg_line_length": 16,
"blob_id": "b81546e4e81866668ac6da695ad285d20ef2ebc4",
"content_id": "d0abd923b69c7df629e4cf7c69f1199a52622f0e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 222,
"license_type": "no_license",
"max_line_length": 25,
"num_lines": 13,
"path": "/projects/minimal_ospf/Project_Final/ospfheader.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "\n//// size is 24 byte\nstruct ospfheader\n{\n\tchar version;\n\tchar type;\n\tunsigned short len;\n\tunsigned int rid;\n\tunsigned int aid;\n\tunsigned short checksum;\n\tunsigned short atype;\n\tunsigned int auth1;\n\tunsigned int auth2;\n};\n"
},
{
"alpha_fraction": 0.6070038676261902,
"alphanum_fraction": 0.6070038676261902,
"avg_line_length": 17.428571701049805,
"blob_id": "c265e10523209cc6ffa0698e1a602e841d830046",
"content_id": "2f5648385ee33362d910d88ddd12b367f6459579",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 257,
"license_type": "permissive",
"max_line_length": 49,
"num_lines": 14,
"path": "/boilerplates/chrome_bp/src/pages/js/router.js",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "RQ.Router = Backbone.Router.extend({\n routes: {\n '': 'showRulesList',\n 'new': 'showRuleEditor'\n },\n\n showRulesList: function() {\n alert('Rules List');\n },\n\n showRuleEditor: function() {\n alert('Rules Editor will be displayed here');\n }\n});"
},
{
"alpha_fraction": 0.5119735598564148,
"alphanum_fraction": 0.5202311873435974,
"avg_line_length": 39.36666488647461,
"blob_id": "db3897fa206e25b7d75cb0191b46781057c97762",
"content_id": "8bc55fc1265cf48a709da641f286aaf3fcee1541",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1211,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 30,
"path": "/php/basic/3_string_functions.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<html>\n <head>\n <title> String Functions in PHP </title>\n </head>\n\n <body>\n <h1><center><u>String Functions in PHP </u></center></h1>\n\n <!-- Using different scope for variables for no good reason -->\n <?php\n $first_str = \"sachin tendulkar\";\n $second_str = \" is crazy about CRICKET \";\n $third_str = \"$first_str\" . \"$second_str\";\n ?>\n\n <?php\n echo \"Initial First String:\" . \"$first_str\" . \"<hr>\";\n echo \"Initial Second String:\" . \"$second_str\" . \"<hr>\";\n\n echo \"1. Concatenation using period (.)---------: \" . \"$third_str\" . \"<hr>\";\n echo \"2. Lower case (strtolower)----------------: \" . strtolower($third_str) . \"<hr>\";\n echo \"3. Upper case (strtoupper)----------------: \" . strtoupper($third_str) . \"<hr>\";\n echo \"4. Words Capitalized (ucwords)------------: \" . ucwords($third_str) . \"<hr>\";\n echo \"5. First letter Capitalized (ucfirst)-----: \" . ucfirst($third_str) . \"<hr>\";\n echo \"6. String Length (strlen)-----------------: \" . strlen($third_str) . \"<hr>\";\n echo \"7. String trim for white spaces(trim)-----: \" . ($trimmed = trim($third_str)) . \"<hr>\";\n echo \"8. Length after trimming------------------: \" . strlen($trimmed) . \"<hr>\";\n ?>\n </body>\n</html>\n"
},
{
"alpha_fraction": 0.46666666865348816,
"alphanum_fraction": 0.4787878692150116,
"avg_line_length": 35.77777862548828,
"blob_id": "cc2bfa1fa543d5c0cbc953f1c55a234d150cee8b",
"content_id": "866021314deb1ada066c3b34c64e7908e2f51d42",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 330,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 9,
"path": "/shellscript/home/aliases",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\n# -----------------------------------------------------------\n# Add these lines in ~/.bashrc to expand the aliases\n# shopt -s expand_aliases\n# source ~/personal/repo/sandbox/shellscript/office/aliases\n# -----------------------------------------------------------\n\nalias python-server=\"python -m SimpleHTTPServer 7070\""
},
{
"alpha_fraction": 0.4736842215061188,
"alphanum_fraction": 0.4736842215061188,
"avg_line_length": 18.25,
"blob_id": "b0a3ef697d2a0362c3c02b89894a188b3cf2ad7a",
"content_id": "777d3b10af797660a63236fc8a47cb283a18f1cb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 76,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 4,
"path": "/shellscript/tabToSpace.sh",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "for i in *\ndo\n sed 's/\\t/ /g' \"${i}\" > temp.txt && mv temp.txt \"${i}\"\ndone"
},
{
"alpha_fraction": 0.75,
"alphanum_fraction": 0.75,
"avg_line_length": 14.5,
"blob_id": "bfc6551743a611e63bf130fb6b081a9e5efd55a7",
"content_id": "2f31d3b271247f6de0e50e69bfeadd51113f22c7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 124,
"license_type": "no_license",
"max_line_length": 24,
"num_lines": 8,
"path": "/projects/minimal_ospf/Project_Final/hellodatabase.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "struct hellodatabase\n{\n\tunsigned int rid;\n\tunsigned int interface;\n\tunsigned int netmask;\n\ttime_t time;\n\tint connection;\n};\n"
},
{
"alpha_fraction": 0.54666668176651,
"alphanum_fraction": 0.5600000023841858,
"avg_line_length": 17.75,
"blob_id": "65542089c3748c1f97876d3259ab4c030cf952fc",
"content_id": "807768e70eea6f818cda1d1624c637af0165668e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 75,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 4,
"path": "/php/basic/11_removecookie.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<?php\n setcookie(\"userInfo\", \"\", time() - 1);\n echo \"Cookie Removed\";\n?>\n"
},
{
"alpha_fraction": 0.5797819495201111,
"alphanum_fraction": 0.6045589447021484,
"avg_line_length": 18.423076629638672,
"blob_id": "036910340083015670b076c8006bfcca6e3e8343",
"content_id": "cd30afe93bdd07e19f516531af47df0092bbe51d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1009,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 52,
"path": "/coding/codeforces/187_2a_seregaBottles.cpp",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "/**\n @Author: Sachin Jain\n @Program: #187 Div2 A: Serege and Bottles\n @Link: http://codeforces.com/contest/315/problem/A\n**/\n#include <iostream>\n#include <stdio.h>\n#include <string.h>\n#include <string>\n#include <vector>\n#include <map>\n#include <list>\n#include <sstream>\n\n//Define all shortcut (Macros here)\n#define F(i,n) for (int i=0; i<n; i++)\n#define FF(i,a,n,c) for(int i=a; i<n; i+=c)\n\nusing namespace std;\n\nint main() {\n int n;\n \n int carriedBottles[101];\n int possibleToOpenBottles[101];\n cin >> n;\n\n int a, b;\n FF(i, 1, n+1, 1) {\n cin >> a >> b;\n carriedBottles[i] = a;\n possibleToOpenBottles[i] = b;\n }\n\n int numUnopenedBottles = 0;\n FF (i, 1, n+1, 1) {\n bool canThisBottleBeOpened = false;\n FF(j, 1, n+1, 1) {\n if (i == j) continue;\n if (carriedBottles[i] == possibleToOpenBottles[j]) {\n canThisBottleBeOpened = true;\n break;\n }\n }\n\n if (!canThisBottleBeOpened) numUnopenedBottles++;\n }\n\n cout << numUnopenedBottles << endl;\n\n return 0;\n}"
},
{
"alpha_fraction": 0.7152777910232544,
"alphanum_fraction": 0.7152777910232544,
"avg_line_length": 27.799999237060547,
"blob_id": "2c60b3cb0a7e313b413176ecf8c926c0bfa1ae1b",
"content_id": "214587d787c576667018333fbb4ac34309397626",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 144,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 5,
"path": "/node/01_module.js",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "var myModule = function(moduleNumber) {\n console.log('Module:' + moduleNumber + ' initialized successfully!!');\n};\n\nmodule.exports = myModule;\n"
},
{
"alpha_fraction": 0.5633223652839661,
"alphanum_fraction": 0.5814144611358643,
"avg_line_length": 18.317461013793945,
"blob_id": "3fb7f382d82493c23ec3a2f694aa2780341befdc",
"content_id": "8791864002fcaf8e17386a49bebe85c95b126cdb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1216,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 63,
"path": "/coding/codeforces/187_2b_seregaArray.cpp",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "/**\n @Author: Sachin Jain\n @Program: Codeforce #187 Div2 - B Sereja and Array\n @Link: http://codeforces.com/contest/315/problem/B\n**/\n\n#include <iostream>\n#include <stdio.h>\n#include <string>\n#include <vector>\n#include <map>\n#include <list>\n#include <sstream>\n\n//Define all shortcut (Macros here)\n#define F(i,n) for (int i=0; i<n; i++)\n#define FF(i,a,n,c) for(int i=a; i<n; i+=c)\n\nusing namespace std;\n\nvoid assign(int a[], int offset) {\n int index, value;\n // cin >> index >> value;\n scanf(\"%d %d\", &index, &value);\n a[index] = value - offset;\n}\n\nint increment(int current_offset) {\n int offset;\n scanf(\"%d\", &offset);\n\n return (current_offset + offset);\n}\n\nvoid print(int a[], int offset) {\n int index;\n // cin >> index;\n scanf(\"%d\", &index);\n //cout<< a[index] << endl;\n printf(\"%d\\n\", a[index] + offset);\n}\n\nint main() {\n int a[100001];\n int arr_size, num_operations, item, op;\n int offset = 0;\n\n cin >> arr_size >> num_operations;\n FF(i,1, arr_size+1, 1) {\n //cin >> item;\n scanf(\"%d\", &item);\n a[i] = item;\n }\n\n F(i, num_operations) {\n cin >> op;\n if (op == 1) assign(a, offset);\n if (op == 2) offset = increment(offset);\n if (op == 3) print(a, offset);\n }\n\n return 0;\n}"
},
{
"alpha_fraction": 0.678918182849884,
"alphanum_fraction": 0.6796116232872009,
"avg_line_length": 21.200000762939453,
"blob_id": "c8ddaa0e8229fb5dffff5c84e964e13bd2bb8449",
"content_id": "ba31c3b07a8b7342be1e7912a1bd7b02d95979e7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 1442,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 65,
"path": "/javascript/testframework/js/main.js",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "var resultsElement;\n\nfunction assert(expressionValue, description) {\n var ul = resultsElement ? resultsElement : document.getElementById('results'),\n li = document.createElement('li'),\n status = expressionValue ? 'Passed' : 'Failed ';\n\n li.className = status;\n li.innerHTML = status + ': ' + description;\n\n ul.appendChild(li);\n return li;\n}\n\nfunction test(description, callback) {\n var results = document.getElementById('results-tests'),\n ul = document.createElement('ul'),\n descriptionLi = document.createElement('li');\n\n descriptionLi.className = 'test-description';\n descriptionLi.innerHTML = description;\n\n results.appendChild(descriptionLi);\n resultsElement = results.appendChild(ul);\n\n callback();\n}\n\nvar queue = [],\n isPaused = false;\n\nfunction testAysnc(description, callback) {\n queue.push(function() {\n var results = document.getElementById('results-async-tests'),\n ul = document.createElement('ul'),\n descriptionLi = document.createElement('li');\n\n descriptionLi.className = 'test-description';\n descriptionLi.innerHTML = description;\n\n results.appendChild(descriptionLi);\n resultsElement = results.appendChild(ul);\n callback();\n });\n runTest();\n}\n\nfunction pause() {\n isPaused = true;\n}\n\nfunction resume() {\n isPaused = false;\n runTest();\n}\n\nfunction runTest() {\n if (!isPaused && queue.length > 0) {\n queue.shift()();\n\n if (!isPaused) {\n resume();\n }\n }\n}"
},
{
"alpha_fraction": 0.5846036672592163,
"alphanum_fraction": 0.5952743887901306,
"avg_line_length": 23.314815521240234,
"blob_id": "59c1475cf4ddd7a249729601ebcd0a771f80dcd9",
"content_id": "a8015db9213bb06e3e755e5be3d9cb37b5df4ad9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 1312,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 54,
"path": "/php/basic/14_db_interaction.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<?php\n/****** 5 Step Story ***********\n1. Create a connection\n2. Select the daspacespacease\n3. Fire the query\n4. Use query result\n5. Close the connection\n*******************************/\n\n// Step1. Create a connection\n$con = mysql_connect(\"localhost\", \"root\", \"root\");\nif (!$con) {\n die(\"Daspacespacease connection could not be esspacespacelished\" . mysql_error());\n}\n\n// Step2. Select the daspacespacease\n$db_select = mysql_select_db(\"widget_corp\", $con);\nif (!$db_select) {\n die(\"Daspacespacease could not be selected\" . mysql_error());\n}\n?>\n\n<html>\n <head>\n <title> Daspacespacease Interaction (MySQL + PHP) </title>\n </head>\n\n <body>\n <h1><center><u>Daspacespacease Interation (MySQL + PHP) </u></center></h1>\n\n <?php\n // Step3: Querying from Daspacespacease\n $result = mysql_query(\"SELECT * from subjects\", $con);\n if (!$result) {\n die(\"Error in firing query on daspacespacease\" . mysql_error());\n }\n\n // Step4: Using query result\n /* $row is an associative array as well So both types of indexes work on it\n Numeric indexes as well as Keys both work */\n while ($row = mysql_fetch_array($result)) {\n echo $row[\"id\"] . \" \" .\n $row[\"menu_name\"] . \" \" .\n $row[2] . \" \" .\n $row[3] . \"<hr>\";\n }\n ?>\n\n </body>\n</html>\n\n<?php\n mysql_close($con);\n?>"
},
{
"alpha_fraction": 0.6515892148017883,
"alphanum_fraction": 0.6625916957855225,
"avg_line_length": 21.054054260253906,
"blob_id": "49e00ec3f649b4a2cc68e573cfcbcb60a90673f8",
"content_id": "7bd626f30705a0ad1ac9697494a45bd152e7f62b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 818,
"license_type": "no_license",
"max_line_length": 139,
"num_lines": 37,
"path": "/projects/minimal_ospf/Project_Final/Print_2.cpp",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n#include <stdio.h>\n#include<netinet/in.h>\n#include<errno.h>\n#include<netdb.h>\n#include<netinet/ip_icmp.h> \n#include<netinet/udp.h> \n#include<netinet/tcp.h> \n#include<netinet/ip.h> \n#include<sys/socket.h>\n#include<arpa/inet.h>\n#include<sys/ioctl.h>\n#include<sys/time.h>\n#include<sys/types.h>\n#include<unistd.h>\n#include<stdlib.h> \n#include<cstdlib>\n#include<time.h>\n#include<string.h>\n#include <pthread.h> \n#include \"floodingheader.h\"\n#include \"Global.h\"\n\nusing namespace std;\n\nint main()\n{\n\tFILE *fp;\n\tif(fp=fopen(\"FloodingDataBase.bin\",\"r+b\")) {\n\t} else return 0;\n\tfloodingheader temp;\n\t\n\twhile (fread(&temp,sizeof(temp),1,fp)) {\n\t\tcout<<temp.rid1<<\" \"<<temp.n1<<\" \"<<temp.mask1<<\" \"<<temp.rid2<<\" \"<<temp.n2<<\" \"<<temp.mask2<<\" \"<<temp.version<<\" \"<<temp.status<<endl;\n\t}\n\treturn 0;\n}\n\t\n"
},
{
"alpha_fraction": 0.6217948794364929,
"alphanum_fraction": 0.6217948794364929,
"avg_line_length": 21.428571701049805,
"blob_id": "c4987b45a50e66f789724d644c86a25c0c5e2b33",
"content_id": "62e4be0d946c23c1059d557e005edf9dcfd10f0b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 156,
"license_type": "permissive",
"max_line_length": 45,
"num_lines": 7,
"path": "/boilerplates/chrome_bp/src/pages/js/app.js",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "var RQ = {\n init: function(options) {\n // Create models, collections, views here\n this.router = new RQ.Router();\n Backbone.history.start();\n }\n};"
},
{
"alpha_fraction": 0.5370886921882629,
"alphanum_fraction": 0.5588399171829224,
"avg_line_length": 26.58461570739746,
"blob_id": "2235e36c2d169dc6bbccd91c9319cfb0e595de8c",
"content_id": "3382bae7186d604a296385e96e9babf55eb91f2a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1793,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 65,
"path": "/projects/minimal_ospf/Project_Final/ReceivefromHello.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "void ReceivefromHello(struct floodingheader *New)\n{\n\tFILE *fp;\n\tint flag=0;\n\tint flag2=0;\n\twhile (floodlock==1) {\n\t\tprintf(\"receieve from Hello sleeping\\n\"); sleep(2);\n\t}\t\n\tif(fp=fopen(FloodDb,\"r+b\")) {\n\t} else {\n\t\tprintf(\"ERROR in W+B\\n\");\n\t\tfp=fopen(FloodDb,\"w+b\");\n\t}\n\t\n\tfloodlock=1;\n\tcout<<\"-----------Link status changed so updating DataBase and flooding----------------\"<<endl;\n\tstruct floodingheader temp;\n\twhile (fread(&temp,sizeof(temp),1,fp)) {\n\t\tif (((temp.n1==New->n1) && (temp.n2==New->n2)) || ((temp.n1==New->n2) && (temp.n2==New->n1))) {\n\t\t\tfseek(fp,-1*sizeof(temp),SEEK_CUR);\n\t\t\tflag=1;\n\t\t\tint z=temp.status;\n\t\t\ttemp.version++;\n\t\t\ttemp.status=New->status;\n\t\t\tfwrite(&temp,sizeof(temp),1,fp);\n\t\t\tfor (int i=0;i<interfaces;i++) {\n\t\t\t\tif(inet_addr(interface[i])!=New->n1){\n\t\t\t\t\tbroadcast(&temp,inet_addr(interface[i]));\n\t\t\t\t\tsleep(2);\n\t\t\t\t}\n\t\t\t}\n\t\t\tcout<<\"-------------------Link state updated--------------- \"<<z<<\" \"<<New->status<<endl;\n\t\t\tif (New->status==1) {\n\t\t\t\tglobalddseq++;\n\t\t\t\tfloodlock=0;\n\t\t\t\tprintf(\"Entering in exchangestart FLag=1\\n\");\n\t\t\t\texchangestart(New->n1,New->n2);\n\t\t\t}\n\t\t\tfclose(fp);\n\t\t\tfloodlock=0;\n\t\t\treturn;\n\t\t}\n\t}\n\tif (flag==0) {\n\t\tfseek(fp,0,SEEK_END);\n\t\tNew->version=0;\n\t\tfwrite(New,sizeof(struct floodingheader),1,fp);\n\t\tprintf(\"***************************Written record in file***********************\\n\");\n\t\tfloodlock=0;\n\t\tfclose(fp);\n\t\tcout<<\"----------------Link has been added-------------------\"<<endl;\n\t\tfor (int i=0;i<interfaces;i++) {\n\t\t\tif(inet_addr(interface[i])!=New->n1){\n\t\t\t\tbroadcast(&temp,inet_addr(interface[i]));\n\t\t\t\tsleep(2);\n\t\t\t}\n\t\t}\n\t\tglobalddseq++;\n\t\tprintf(\"Entering in exchangestart Flag=0\\n\");\n\t\texchangestart(New->n1,New->n2);\n\t\treturn;\n\t}\t\n\tfloodlock=0;\n\tcout<<\"----------------------Returning from Receive fomr Hello\\n\"; \n}\n"
},
{
"alpha_fraction": 0.6324324607849121,
"alphanum_fraction": 0.637837827205658,
"avg_line_length": 18.473684310913086,
"blob_id": "b878e83473c537d75560ed5672ffe14fa656b4b5",
"content_id": "b2a5cc90f48100682e029b57ab2a213cfff24461",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 370,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 19,
"path": "/php/basic/12_startsession.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<?php\n session_start();\n?>\n\n<html>\n <head>\n <title> Starting a session </title>\n </head>\n\n <body>\n <h1><center><u>Starting Session </u></center></h1>\n \n <p> Session must be started before sending back any HTML </p>\n\n <p>In chrome, use document.cookie in console to see the session Id.<br>\n It should come with PHPSESSID as cookie name</p>\n\n </body>\n</html>\n"
},
{
"alpha_fraction": 0.41107872128486633,
"alphanum_fraction": 0.4285714328289032,
"avg_line_length": 17.54054069519043,
"blob_id": "12578ed49cfe07544a42de32cec45dab03fe8083",
"content_id": "f78c42f49306183b4871c7b3a6614a97ed37971a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 686,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 37,
"path": "/php/basic/7_control.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<html>\n <head>\n <title> Control Structures in PHP </title>\n </head>\n\n <body>\n <h1><center><u> Control Structures in PHP </u></center></h1>\n\n <?php\n $a = 20;\n $b = \"10\";\n $c = \"12\";\n $d = \"25\";\n \n /* 3 Examples of Type conversion */\n $b += 5; // PHP type cast $b from string to int \n settype($c, \"integer\");\n $d = (int) $d;\n \n echo \"Type of variables:\";\n echo gettype($a) . \" \" . \n gettype($b) . \" \" . \n gettype($c) . \" \" . \n gettype($d) . \"<hr>\";\n ?>\n\n <?php\n if ($a > $b) {\n echo \"A is greater than B\" . \"<hr>\";\n }\n\n if ($c != $d) {\n echo \"C is not equal to D\" . \"<hr>\";\n }\n ?>\n </body>\n</html>\n"
},
{
"alpha_fraction": 0.4917197525501251,
"alphanum_fraction": 0.5375795960426331,
"avg_line_length": 29.19230842590332,
"blob_id": "8e68cf88887f722c850d366fa5dbf002deb69871",
"content_id": "c86ac4aab31c68c67dd467b19bd5ad6f6a6ebb3f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 785,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 26,
"path": "/projects/minimal_ospf/Project_Final/Global.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "void *exchangeonreceive(void *arvc);\nchar FloodDb[30]=\"FloodingDataBase.bin\";\nint floodlock=0;\nint abcdef=0;//for debugging\nchar asd[]=\"Processing\";\nint interfaces=0;//number of interfaces;\nunsigned char buffer123[6553];\n\nchar *interface[20];\nchar *netmask[20];\n\nint networklink=0;\nchar *networkip[20];\nchar *networkmask[20];\n\nint data_size;\n//////////////////////////////////////////////////////////////////////////////\n///////////////////////////////////////////////////////////////////////////protocol used///////////////////\nint usedprotocol=89;\n\n///////////////////////////////hello global variable/////////////////////////////\nunsigned int routerid=inet_addr(\"172.21.1.124\");\nchar hellofilename[]=\"hellodatabase.bin\";\nint hellointerval=3;\nint deadinterval=11;\nint globalddseq=0;\n"
},
{
"alpha_fraction": 0.6594171524047852,
"alphanum_fraction": 0.6689864993095398,
"avg_line_length": 27.75,
"blob_id": "ebb6f0f1241bbf01c177c142334bd47e5b8609c2",
"content_id": "761034cb43e299c9209e8eec1d418e1db0c5d364",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2299,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 80,
"path": "/coding/codeforces/189_2b_easyPingPong.py",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "# Author: Sachin Jain\n# Link: http://codeforces.com/problemset/problem/320/B\n# Problem: DIV -189 B\n\ndef addNewInterval(connectivityMatrix, existingIntervalsX,\n existingIntervalsY, newIntervalX,\n newIntervalY):\n\n # Define reachabe path from newInterval to already present intervals and vice-versa and update connectivityMatrix\n newIntervalConnections = []\n i = 0\n while i < len(existingIntervalsX):\n if ((existingIntervalsX[i] < newIntervalX < existingIntervalsY[i]) or \n (existingIntervalsX[i] < newIntervalY < existingIntervalsY[i])):\n newIntervalConnections.append(True)\n else:\n newIntervalConnections.append(False)\n\n if ((newIntervalX < existingIntervalsX[i] < newIntervalY) or\n (newIntervalX < existingIntervalsY[i] < newIntervalY)):\n connectivityMatrix[i].append(True)\n else:\n connectivityMatrix[i].append(False)\n\n i += 1\n\n #Connection to itself (Always False)\n newIntervalConnections.append(False)\n connectivityMatrix.append(newIntervalConnections)\n\n existingIntervalsX.append(newIntervalX)\n existingIntervalsY.append(newIntervalY)\n\n# BFS traversal on matrix to update the path from source node to every other node\ndef updateConnections(matrix, source):\n bfsQueue = []\n visitedList = {}\n bfsQueue.append(source)\n\n queueIter = 0\n while queueIter < len(bfsQueue):\n i = bfsQueue[queueIter]\n j = 0\n while j < len(matrix[i]):\n if not j in visitedList and matrix[i][j] == True:\n bfsQueue.append(j)\n matrix[source][j] = True\n\n j += 1\n visitedList[i] = 1\n queueIter += 1\n#End of BFS Traversal method code\n\ndef main():\n numOperations = int(raw_input())\n connectivityMatrix = []\n existingIntervalsX = []\n existingIntervalsY = []\n\n while (numOperations > 0):\n operation, x, y = raw_input().split()\n operation = int(operation)\n x = int(x)\n y = int(y)\n\n if (operation == 1):\n addNewInterval(connectivityMatrix, existingIntervalsX, existingIntervalsY, x, y)\n elif (operation == 2):\n if connectivityMatrix[x-1][y-1] == False:\n updateConnections(connectivityMatrix, x-1)\n\n if connectivityMatrix[x-1][y-1] == True:\n print \"YES\"\n else:\n print \"NO\"\n \n numOperations -= 1\n\nif (__name__ == \"__main__\"):\n main()"
},
{
"alpha_fraction": 0.6800000071525574,
"alphanum_fraction": 0.6880000233650208,
"avg_line_length": 23.899999618530273,
"blob_id": "a231249a34dd77509aebde76869ffab886c61589",
"content_id": "6ddeba80aeed002acabaf8176be9f6650661484f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 250,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 10,
"path": "/node/nodeschool/stream-adventure/02_fs_readstream.js",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "var num_arguments = process.argv.length,\n fs = require('fs'),\n filename;\n\nif (num_arguments < 3) {\n console.error('Please provide a valid file name');\n} else {\n filename = process.argv[2];\n fs.createReadStream(filename).pipe(process.stdout);\n}\n\n"
},
{
"alpha_fraction": 0.5665445923805237,
"alphanum_fraction": 0.5795685648918152,
"avg_line_length": 23.885135650634766,
"blob_id": "5bd0fb7e80194d7d84cf5f46bb6bbf9a68d4d0c2",
"content_id": "76734af63b1e58a26e1e25154912b2d18e8a2a0f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 7371,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 296,
"path": "/projects/minimal_ospf/Project_Final/hello.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "#include \"helloheader.h\"\n#include \"hellodatabase.h\"\nint hellolock=0;\n\n\t\tvoid updatehellodatabase(struct hellodatabase *database,int flag,struct floodingheader *f1);\n\t\tvoid checkhellodatabase(unsigned char *buffer,struct iphdr *ip,struct ospfheader *ospf,struct helloheader *hello);\n\t\tvoid onReceiveHello(unsigned char *buffer,int size);\n\t\tvoid *broadcast(void *argv);\n\t\tint correctdestination(struct iphdr *ip);\n\t\tvoid print_ip_header(struct iphdr *iph);\n\t\tint checkinterface(unsigned int ip);\n\n\n\n#include \"hellobroadcast.h\"\n#include \"hellodead.h\"\nint checkinterface(unsigned int ip)\n{\n\tint answer=-1,ret;\t\n\tint flag=0;\n\t\n\t\n\t/*\t\n\tfor(int i=0;i<interfaces;i++)\n\t{\n\t\t\n\t\tunsigned int temp=inet_addr(interface[i]);\n\t\ttemp=temp^ip;\n\t\t//temp=~temp;\n\t\tif(flag==0){\n\t\t\tflag=1;\n\t\t\tanswer=temp;\n\t\t\tret=i;\n\t\t} else if(temp>answer){\n\t\t\tanswer=temp;\n\t\t\tret=i;\n\t\t}\n\t}\n\t*/\n\t\n\tstruct sockaddr_in source;\n\tmemset(&source, 0, sizeof(source));\n\tsource.sin_addr.s_addr =ip;\n\tchar *s1=inet_ntoa(source.sin_addr);\n\tchar *s=(char *)malloc(strlen(s1)+1);\n\ts[strlen(s1)]='\\0';\n\tfor(int i=0;i<strlen(s1);i++)\n\t{\n\t\ts[i]=s1[i];\n\t}\n\tfor(int i=0;i<interfaces;i++)\n\t{\n\t\t\n\t\tint counter=0;\n\t\tint j=0;\n\t\twhile(j<strlen(interface[i])&&j<strlen(s))\n\t\t{\n\t\t\tif(s[j]==interface[i][j])\n\t\t\t{\n\t\t\t\tcounter++;\n\t\t\t} else {\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tj++;\t\t\n\t\t}\t\n\t\tif(counter>answer)\n\t\t{\n\t\t\tanswer=counter;\n\t\t\tret=i;\n\t\t}\t\t\n\t}\n\tfree(s);\n\treturn ret;\n\t\n}\nvoid print_ip_header(struct iphdr *iph)\n{\n\tstruct sockaddr_in source,dest;\n\tunsigned short iphdrlen; \n\tiphdrlen =iph->ihl*4; \n\tmemset(&source, 0, sizeof(source));\n\tsource.sin_addr.s_addr = iph->saddr; \n\tmemset(&dest, 0, sizeof(dest));\n\tdest.sin_addr.s_addr = iph->daddr; \n\tprintf(\"IP Header\\n\");\n\tprintf(\" |-Source IP : %s\\n\",inet_ntoa(source.sin_addr));\n\tprintf(\" |-Destination IP : %s\\n\",inet_ntoa(dest.sin_addr));\n\t//printf(\" Total Length : %d\\n\",iph->check);\n}\nint correctdestination(struct iphdr *ip)\n{\n\tstruct sockaddr_in input,source;\n\tmemset(&input, 0, sizeof(source));\n\tsource.sin_addr.s_addr = ip->saddr;\n\tchar *s;\n\ts=inet_ntoa(source.sin_addr);\n\tfor(int i=0;i<interfaces;i++)\n\t{\n\t\tif(strcmp(interface[i],s)==0)\n\t\t{\n\t\t\treturn 0;\t\t\n\t\t}\n\t}\n\tprint_ip_header(ip);\n\treturn 1;\t\t\n}\nvoid updatehellodatabase(struct hellodatabase *database,int flag,struct floodingheader *f1)\n{\n\t\t\t\n\tFILE *f;\n\twhile(hellolock==1)\n\t{\n\t\tsleep(1);\n\t}\n\thellolock=1;\n\tf=fopen(hellofilename,\"r\");\n if(f==NULL){\n f=fopen(hellofilename,\"w\");\n }\n\tfclose(f);\n\tf=fopen(hellofilename,\"r+b\");\n\tstruct hellodatabase record;\n\tint check=0;\n\twhile(fread(&record,sizeof(hellodatabase),1,f))\n\t{\n\t\tif(record.rid==database->rid)\n\t\t{\n\t\t\tcheck=1;\t\n\t\t\tif(record.connection==1)\n\t\t\t{\n\t\t\t\tif(flag==1)\n\t\t\t\t{\n\t\t\t\t\tdatabase->connection=1;\n\t\t\t\t\tfseek(f,-sizeof(record),SEEK_CUR);\n\t\t\t\t\tfwrite(database,sizeof(struct hellodatabase),1,f);\n\t\t\t\t\tfclose(f);\n\t\t\t\t\t//free(database);\n\t\t\t\t\tbreak;\n\t\t\t\t} else {\n\t\t\t\t\tdatabase->connection=0;\n\t\t\t\t\tfseek(f,-sizeof(record),SEEK_CUR);\n fwrite(database,sizeof(struct hellodatabase),1,f);\n f1->status=0;\n\t\t\t\t\thellolock=0;\n\t\t\tprintf(\"********************Sachin method called 1*****************\\n\");\n\t\t\t\t\tfclose(f);\n\t\t\t\t\t//free(database);\n ReceivefromHello(f1);\n/////////////////////////Sachin Delete Call///////////////////////////////////////////\n\t\t\t\t\tbreak;\n\t\t\t\t}\t\t\t\n\t\t\t} else {\n\t\t\t\tif(flag==1)\n\t\t\t\t{\n\t\t\t\t\tdatabase->connection=1;\n fseek(f,-sizeof(record),SEEK_CUR);\n fwrite(database,sizeof(struct hellodatabase),1,f);\n\t\t\t\t\tf1->status=1;\n\t\t\t\t\thellolock=0;\t\t\n\t\t\t\t\tfclose(f);\n\t\t\t\t\t//free(database);\n\t\t\tprintf(\"********************Sachin method called 2*****************\\n\");\t\t\t\n ReceivefromHello(f1);\n////////////////////////////////Sachin Link Found Call////////////////////////////////\n break;\n\t\t\t\t} else {\n\t\t\t\t\tdatabase->connection=0;\n fseek(f,-sizeof(record),SEEK_CUR);\n fwrite(database,sizeof(struct hellodatabase),1,f);\n\t\t\t\t\tfclose(f);\n\t\t\t\t\t//free(database);\n\t\t\t\t\tbreak;\n\t\t\t\t}\t\n\t\t\t}\n\t\t}\n\t}\n\tif(check==0)\n\t{\n\t\tif(flag==1)\n\t\t{\n\t\t\tdatabase->connection=1;\n fseek(f,0,SEEK_END);\n fwrite(database,sizeof(struct hellodatabase),1,f);\n f1->status=1;\n\t\t\thellolock=0;\n\t\t\tfclose(f);\n\t\t\t//free(database);\n\t\t\tprintf(\"********************Sachin method called 3*****************\\n\");\n ReceivefromHello(f1);\n////////////////////////////Sachin Link Found Call//////////////////////////////////\n \n\t\t} else {\n\t\t\tdatabase->connection=0;\n fseek(f,0,SEEK_END);\n fwrite(database,sizeof(struct hellodatabase),1,f);\n\t\t\tfclose(f);\n\t\t}\n\t} \n\thellolock=0;\n\treturn;\n}\nvoid checkhellodatabase(unsigned char *buffer,struct iphdr *ip,struct ospfheader *ospf,struct helloheader *hello)\n{\n\tint flag=0;\n\tint i=sizeof(struct iphdr)+sizeof(struct ospfheader)+sizeof(struct helloheader);\n\tstruct neighbour *neigh;\n\twhile(i<ip->tot_len)\n\t{\n\t\tneigh=(struct neighbour *)(buffer+i);\n\t\tif(neigh->neigh==routerid)\n\t\t{\n\t\t\tflag=1;\n\t\t\tbreak;\n\t\t}\n\t\ti+=sizeof(struct neighbour);\n\t}\n\tprintf(\"%d\\n\",flag);\n\tstruct sockaddr_in source;\n\tmemset(&source, 0, sizeof(source));\n\tsource.sin_addr.s_addr =ip->saddr;\n\t\n\tstruct floodingheader f1;\t\t\n\tint index=checkinterface(ip->saddr);\t\n\tcout<<inet_ntoa(source.sin_addr)<<\" \"<<interface[index]<<\" \"<<endl;\n\t\t\n\tf1.rid1=routerid;\t\t\n\tf1.n1=inet_addr(interface[index]);\n\tf1.mask1=inet_addr(netmask[index]);\t\n\tf1.n2=ip->saddr;\n\tf1.rid2=ospf->rid;\n\tf1.mask2=hello->netmask;\n\t\n\tstruct hellodatabase database;\n\tdatabase.rid=ospf->rid;\n\tdatabase.interface=ip->saddr;\n\tdatabase.netmask=hello->netmask;\n\tdatabase.time=time(NULL);\n\tfree(buffer);\n\tupdatehellodatabase(&database,flag,&f1);\n\t\n}\nvoid *onReceiveHello(void *argv)\n{\n\tunsigned char *buffer=(unsigned char *)malloc(sizeof(unsigned char)*data_size);\n\tint size=data_size;\n\tfor(int i=0;i<data_size;i++)\n\t{\n\t\tbuffer[i]=buffer123[i];\n\t}\n\tstruct iphdr *ip=(struct iphdr *)buffer;\t\n\tif(!correctdestination(ip)){ \n\t\t//printf(\"Own Packet Recieved\\n\");\n\t\t//free(buffer);\t\t\n\t\treturn NULL;\t\n\t}\n\tip->tot_len=size;\t\n\tif(ip->tot_len<(sizeof(struct iphdr)+sizeof(struct ospfheader))){\n\t\tprintf(\"Ospf header doesn't exist \\n\");\n\t\t//free(buffer);\n\t\treturn NULL;\n\t}\n\tstruct ospfheader *ospf=(struct ospfheader *)(buffer+sizeof(struct iphdr));\n\t\t\n\tif(ospf->auth1!=1234)\n\t{\n\t\tprintf(\"Authentication Error \\n\");\n\t\t//free(buffer);\n\t\treturn NULL;\n\t}\t\n\tif((int)ospf->type!=1)\n\t{\t\n\t\tprintf(\"Not a hello packet\\n\");\n\t\t//free(buffer);\n\t\treturn NULL;\n\t}\n\tif(ip->tot_len<=sizeof(struct iphdr)+sizeof(struct ospfheader)){\n\t\tprintf(\"No Hello header is there\\n\");\n\t\t//free(buffer);\n\t\treturn NULL;\n\t}\n\tstruct helloheader *hello=(struct helloheader *)(buffer+sizeof(struct iphdr)+sizeof(struct ospfheader));\n\t\n\tif(hello->hellointerval!=hellointerval)\n\t{\n\t\tprintf(\"hello interval dint matched\\n\");\n\t\t//free(buffer);\n\t\treturn NULL;\n\t}\n\tif(hello->deadinterval!=deadinterval){\n\t\tprintf(\"dead interval dint matched\\n\");\n\t\t//free(buffer);\n\t\treturn NULL;\n\t}\n\tcheckhellodatabase(buffer,ip,ospf,hello);\n\treturn NULL;\n}\n\n\t\t\t\n"
},
{
"alpha_fraction": 0.5474308133125305,
"alphanum_fraction": 0.5691699385643005,
"avg_line_length": 17.740739822387695,
"blob_id": "9cc0d1b0f7737e7f1bb746aa1288cb7b7dca496e",
"content_id": "e3a6d3dd2e231aa301e7ba47f505515a472dca29",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 506,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 27,
"path": "/coding/codeforces/196_2a_puzzles.py",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\n# Author: Sachin Jain\n# Link: http://codeforces.com/contest/337/problem/A\n# Problem: DIV -196 A\n\ndef main():\n\tn, m = map(int, input().split(' '))\n\ta = list(map(int, input().split(' ')))\n\n\t# for i,val in enumerate(a): a[i] = int(val)\n\t\n\ta.sort()\n\n\tfirst = 0\n\tlast = n-1\n\tminimum = a[last] - a[first]\n\n\twhile (last < len(a)):\n\t\tnew_min = a[last] - a[first]\n\t\tif (new_min < minimum):\tminimum = new_min\n\t\tfirst, last = first + 1, last + 1\n\n\tprint(minimum)\n\nif (__name__ == \"__main__\"):\n main()\n"
},
{
"alpha_fraction": 0.7689242959022522,
"alphanum_fraction": 0.775564432144165,
"avg_line_length": 25.85714340209961,
"blob_id": "26e261cd21a8d52ce08b6531b5a10752633faf7a",
"content_id": "b66571da940d59a4a5c116d3538df7fd70939da0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 753,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 28,
"path": "/node/Commands.md",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "List of NodeJs Commands\n\n# Initialize the project with package.json\n1. npm init\n\n# Adding a dependency to package.json\n2. npm install --save express\n\n--save: Records the version number\n\n# Installing express module as global module\n3. npm install -g express\n\n# Once express module is defined as global module, we can write\nexpress project_name\n\n# It will create a project_name directory with package.json with defaults initialized\n# Then do cd project_name and npm install (To get the dependencies)\n# To run the app\nnode app or node start\n\n# Removing a dependency. Remove entry from package.json and use\n4. npm prune\n\n# This will remove the folders from node_modules which our project is not using\n\n# To update the version of dependencies\n5. npm update\n\n"
},
{
"alpha_fraction": 0.6287878751754761,
"alphanum_fraction": 0.6287878751754761,
"avg_line_length": 32.25,
"blob_id": "8cc7c5d97f6207ba7eea736bf733a77f742a1535",
"content_id": "1e2b91dafe1b3399de3655b5a010ed0ce8812391",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 132,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 4,
"path": "/javascript/requirejs/sample_app/js/main.js",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "define(['./student', './teacher', './school'], function(student, teacher, school) {\n \tconsole.log('Main module has loaded');\n }\n);"
},
{
"alpha_fraction": 0.6703703999519348,
"alphanum_fraction": 0.6703703999519348,
"avg_line_length": 17,
"blob_id": "1c523f7065a1a4f58301214b8bf800af8b9ecbe5",
"content_id": "7483988c518d33f879e705e9706d1738fc9036d3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 270,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 15,
"path": "/node/app.js",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "/* Testing request module */\n\nvar request_module = require('./requestTest');\nrequest_module.init();\n\nvar cheerio_module = require('./cheerioTest');\ncheerio_module.init();\n\n\n/* Testing http module */\n\n\n/* var http_module = require('./httpTest');\n http_module.init();\n*/\n"
},
{
"alpha_fraction": 0.5463286638259888,
"alphanum_fraction": 0.5688374042510986,
"avg_line_length": 30.335617065429688,
"blob_id": "aca58890c3cc6dee410cd81201669baca00c2b21",
"content_id": "b0f8ce2bc48ee6ab39b2e31d0fb8ee6bd7fa4658",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 4576,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 146,
"path": "/projects/minimal_ospf/Project_Final/hellobroadcast.h",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "unsigned char *buildinghellopacket(int &size)\n{\n\twhile(hellolock==1)\n\t{\n\t\tprintf(\"Sleeping\");\n\t\tsleep(1);\n\t}\n\thellolock=1;\n\n\tFILE *f;\n\tf=fopen(hellofilename,\"r\");\n if(f==NULL){\n f=fopen(hellofilename,\"w\");\n }\n\tfclose(f);\n\tf=fopen(hellofilename,\"r+w\");\n\t\n\tfseek(f,0,SEEK_END);\n\tint filesize=ftell(f);\n\tstruct hellodatabase record;\n\tfilesize=(filesize/(sizeof(struct hellodatabase)));\n\tunsigned char *broadcast;\n\tsize=sizeof(struct iphdr)+sizeof(struct ospfheader)+sizeof(struct helloheader)+(filesize)*sizeof(struct neighbour);\n\tbroadcast=(unsigned char *)malloc(size);\n//////////////////////////////////////ip header ////////////////////////////////////\n\tstruct iphdr *ip=(struct iphdr *)(broadcast);\n\tip->ihl = 5;\n\tip->version = 4;\n\tip->tos = 0;\n\tip->id = htons(random());\n\tip->ttl = 0;\n\tip->protocol = usedprotocol;\n\tip->saddr = inet_addr(\"202.19.5.132\");\n\tip->daddr = INADDR_BROADCAST;\n\t//ip->daddr =inet_addr(\"172.21.3.255\");\n\tip->check = 0;\n\n/////////////////////////////////////////ospf header ////////////////////////////\n\tstruct ospfheader *ospf=(struct ospfheader *)(broadcast+sizeof(struct iphdr));\n\tospf->version=2;\n\tospf->type=1;\n\tospf->auth1=1234;\n\tospf->rid=routerid;\n\tospf->checksum=0;\n\tospf->atype=1;\n//////////////////////////////////////////hello header/////////////////////////////////\n\tstruct helloheader *hello=(struct helloheader *)(broadcast+sizeof(struct iphdr)+sizeof(struct ospfheader));\n\thello->type=1;\n\thello->hellointerval=hellointerval;\n\thello->deadinterval=deadinterval;\n\thello->option=0;\n\thello->priority=0;\n\thello->desig=0;\n\thello->backup=0;\n//////////////////////////////////////////////////////////////////////////\n\tint pointer=sizeof(struct iphdr)+sizeof(struct ospfheader)+sizeof(struct helloheader);\n\tfseek(f,0,SEEK_SET);\n\tstruct neighbour *n;\n\tint i=0;\n\twhile(fread(&record,sizeof(struct hellodatabase),1,f))\n\t{ \n\t\tn=(struct neighbour *)(broadcast+pointer);\n\t\tn->neigh=record.rid;\n\t\tpointer+=sizeof(struct neighbour);\n\t\t//printf(\"And %u %d\\n\\n\\n\\n\\n\",n->neigh,size);\n\t}\n\t//n=(struct neighbour *)(broadcast+pointer);\n\t//n->neigh=123123123;\n\thellolock=0;\n\t\n\treturn broadcast;\n}\nvoid *broadcast(void *argv)\n{\n\tstruct sockaddr_in source,saddr;\n\tint saddr_size,data_size,size=0;\n\twhile(1)\n\t{\n\t\tsleep(hellointerval);\n\t\tprintf(\"Hello Broadcasting Started\\n\");\n\t\tunsigned char *broadcast=buildinghellopacket(size);\t\t\n\t\tstruct iphdr *ip=(struct iphdr *)(broadcast);\n\t\tstruct helloheader *hello=(struct helloheader *)(broadcast+sizeof(struct iphdr)+sizeof(ospfheader));\n\t\n\t\tfor(int i=0;i<interfaces;i++)\n\t\t{\n\t\t\tint sockfd=socket(AF_INET,SOCK_RAW,usedprotocol); \n\t\t\tif(sockfd < 0)\n\t\t\t{\n\t\t \tprintf(\"Socket Error\\n\");\n\t\t\t\t\n\t\t\t\treturn NULL;\n\t\t\t}\n\t\t\tconst int on=1;\n\t\t\tif(setsockopt(sockfd,IPPROTO_IP,IP_HDRINCL,&on,sizeof(on))<0)\n\t\t\t{\n\t\t\t\tprintf(\"Error in setsocket\\n\");\t\t\t\t\n\t\t\t\treturn NULL;\n\t\t\t}\n\t\t\n\t\t/////////////////////////////////\n\t\n\t\n\t\t\tint z;\n\t\t\tint so_broadcast;\n\t\t\tso_broadcast = 1;\n\t\t\tz = setsockopt(sockfd,SOL_SOCKET,SO_BROADCAST,&so_broadcast,sizeof so_broadcast);\n\t\t\tif ( z<0 ){\n\t\t\t\tperror(\"setsockopt broadcast error\");\n\t\t\t\t\n\t\t\t\treturn NULL;\n\t\t\t}\n\t\t/////////////////////////////// binding ///////////////////////////\n\t\t\tsource.sin_family=AF_INET;\n\t\t\tsource.sin_port=htons(1000);\n\t\t\tsource.sin_addr.s_addr=inet_addr(interface[i]);//inet_addr(\"172.19.5.231\");//INADDR_BROADCAST;\n\t\t\tmemset(&(saddr.sin_zero), '\\0', 8);\n\t\t\tbind(sockfd,(struct sockaddr *)&source,sizeof(source));\n\t\t\t\n\t\t\tip->saddr=inet_addr(interface[i]); ////ip source\t\t\t\n\t\t\tip->tot_len=size; //ip length\n\t\t\t\n\t\t\tprintf(\"Here %s\\n\",interface[i]);\n\t\t\t//hello->netmask=inet_addr(\"255.255.252.0\");//inet_addr(netmask[i]);\t\n\t\t\thello->netmask=inet_addr(netmask[i]);\t\t\n\t\t/////////////////////////////broadcastin//////////////\t\t\t\n\t\t\tsaddr_size=sizeof(struct sockaddr);\n\t\t\tsaddr.sin_family=AF_INET;\n\t\t\tsaddr.sin_port=htons(1000);\n\t\t\tsaddr.sin_addr.s_addr=INADDR_BROADCAST;//inet_addr(\"172.19.5.231\");//INADDR_BROADCAST;\n\t\t\tmemset(&(saddr.sin_zero), '\\0', 8);\n\t\t\t//printf(\"%s \\n\",inet_ntoa(ip->saddr));\n\t\t\tdata_size = sendto(sockfd, broadcast , size , 0 ,(struct sockaddr *)&saddr , saddr_size);\n\t \tif(data_size <0 )\n\t \t{\n\t \t printf(\"send to error , failed to sent packetsn\");\n\t \t} else {\n\t\t\t\tprintf(\"Packet Sent\\n\");\n\t\t\t}\n\t\t\tclose(sockfd);\n\t\t}\n\t\t//free(broadcast);\n\t\tprintf(\"Hello Broadcasting Compeleted\\n\");\t\t\n\t}\n\treturn NULL;\n}\n\n"
},
{
"alpha_fraction": 0.6335877776145935,
"alphanum_fraction": 0.6386768221855164,
"avg_line_length": 16.130434036254883,
"blob_id": "1441501ecef11bbe07ec251d51e325b86ddf5558",
"content_id": "d13e9468d8709cbb2265128f3373dda2c4c4f312",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 393,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 23,
"path": "/coding/codeforces/bp.cpp",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "/**\n @Author: Sachin Jain\n @Program: Boiler Plate\n**/\n#include <iostream>\n#include <stdio.h>\n#include <string.h>\n#include <string>\n#include <vector>\n#include <map>\n#include <list>\n#include <sstream>\n\n//Define all shortcut (Macros here)\n#define F(i,n) for (int i=0; i<n; i++)\n#define FF(i,a,n,c) for(int i=a; i<n; i+=c)\n\nusing namespace std;\n\nint main() {\n\tcout<<\"Hello world\"<<endl;\n return 0;\n}"
},
{
"alpha_fraction": 0.5852534770965576,
"alphanum_fraction": 0.6059907674789429,
"avg_line_length": 18.727272033691406,
"blob_id": "806d7f804854ff6c8d4ee74b20114e43476fc87a",
"content_id": "0813e9f51e2735e3f77d5cb36a41506086f5567d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 434,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 22,
"path": "/python/basic/02_loops.py",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n# Author: Sachin Jain\n# Loops in python\n\ndef main():\n\tprint('Printing Fibonacci series in Python:')\n\n\t#Print fibonacci series using while loop\n\ta, b, n = 0, 1, 0\n\twhile (n < 10):\n\t\tprint(a, end=' ')\n\t\ta, b, n = b, a+b, n+1\n\n\tprint('\\nPrinting Loops code in Python:')\n\n\t#Print this code using for in loop\n\tfh = open('02_loops.py')\n\tfor line in fh.readlines():\n\t\tprint(line, end='')\n\nif (__name__ == \"__main__\"):\n main()\n"
},
{
"alpha_fraction": 0.5251798629760742,
"alphanum_fraction": 0.5323740839958191,
"avg_line_length": 16.375,
"blob_id": "e7b9847ac68706777dcde37c6d8ec486c6e4aa0d",
"content_id": "7b16d7e1e3c8a831fe91e9161e969e4ae62d67e8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 278,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 16,
"path": "/php/basic/9_urlparams.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<html>\n <head>\n <title> URL Paramters </title>\n </head>\n\n <body>\n <h1><center><u> URL paramters in PHP</u></center></h1>\n\n <?php\n echo \"URL Parameters are:\" . \"<br>\";\n print_r($_GET); // $_GET is the array of URL paramters\n echo \"<hr>\";\n ?>\n\n </body>\n</html>\n"
},
{
"alpha_fraction": 0.5342857241630554,
"alphanum_fraction": 0.5628571510314941,
"avg_line_length": 15.714285850524902,
"blob_id": "5e2f6c02e93be7f84194a7d5bf0c230c13212740",
"content_id": "1f74c7c2deae404f63cfc18cfd1faf250b14fe73",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 350,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 21,
"path": "/coding/codeforces/190_2a_ceilAndDance.py",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "# Author: Sachin Jain\n# Link: http://codeforces.com/problemset/problem/322/A\n\ndef main():\n num_boys, num_girls = raw_input().split()\n num_boys = int(num_boys)\n num_girls = int(num_girls) \n\n print(num_boys + num_girls - 1)\n\n i = 1\n while (i <= num_girls):\n print 1, i\n i += 1\n\n i = 2\n while (i <= num_boys):\n print i, 1\n i += 1\n\nmain()"
},
{
"alpha_fraction": 0.5744157433509827,
"alphanum_fraction": 0.599016010761261,
"avg_line_length": 21,
"blob_id": "2d47deddc7abea031a99dc6fd41eda01c8701fbe",
"content_id": "35d9a3a4a8e37dbd0f62192427b40a80b36c7eb4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 813,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 37,
"path": "/coding/codeforces/188_2b_stringsofpower.cpp",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "/**\n @Author: Sachin Jain\n @Program: 188_2B Strings Of Power\n @Link: http://codeforces.com/problemset/problem/318/B\n**/\n#include <iostream>\n#include <stdio.h>\n#include <string.h>\n#include <string>\n#include <vector>\n#include <map>\n#include <list>\n#include <sstream>\n\n//Define all shortcut (Macros here)\n#define F(i,n) for (int i=0; i<n; i++)\n#define FF(i,a,n,c) for(int i=a; i<n; i+=c)\n\nusing namespace std;\n\nint main() {\n string text;\n cin >> text;\n\n int num_heavy_occurences = 0;\n long long int result = 0;\n int TEXT_LEN = text.length();\n\n FF (i, 0, TEXT_LEN-4, 1) {\n string block5 = text.substr(i, 5);\n if (block5 == \"heavy\") { num_heavy_occurences++; i += 4; }\n else if (block5 == \"metal\") { result += num_heavy_occurences; i += 4; }\n }\n\n cout << result << endl;\n return 0;\n}"
},
{
"alpha_fraction": 0.6709021329879761,
"alphanum_fraction": 0.6861499547958374,
"avg_line_length": 27.14285659790039,
"blob_id": "4c99cfd8b3f0478ca451fd6e28aba3f5a209d70e",
"content_id": "9b4c232cfb482fc268c787b1d1f1b8b8c99d9dac",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 787,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 28,
"path": "/coding/codeforces/190_2b_ceilAndFlowers.py",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "# Author: Sachin Jain\n# Problem: http://codeforces.com/problemset/problem/322/B\n\ndef buildMixingBucketsFirst(num_red, num_green, num_blue):\n mixing_buckets = min(num_red, num_green, num_blue)\n num_red -= mixing_buckets\n num_green -= mixing_buckets\n num_blue -= mixing_buckets\n\n individialBuckets = num_red/3 + num_green/3 + num_blue/3\n remainingFlowers = num_red%3 + num_green%3 + num_blue%3\n\n if (remainingFlowers == 4 and mixing_buckets):\n individialBuckets += 2\n mixing_buckets -= 1\n\n return individialBuckets + mixing_buckets\n\ndef main():\n num_red, num_green, num_blue = raw_input().split()\n num_red = int(num_red)\n num_green = int(num_green)\n num_blue = int(num_blue)\n\n result = buildMixingBucketsFirst(num_red, num_green, num_blue)\n print(result)\n\nmain()"
},
{
"alpha_fraction": 0.5273109078407288,
"alphanum_fraction": 0.5315126180648804,
"avg_line_length": 18.040000915527344,
"blob_id": "e48b30a8db3f5ebe6fce389ff44245b796dd13dc",
"content_id": "be93939a5a60a7f0c1fa09018edf839ece0b573a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 476,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 25,
"path": "/php/basic/12_printsession.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<html>\n <head>\n <title> Viewing Session Information </title>\n </head>\n\n <body>\n <h1><center><u> Print Session Information </u></center></h1>\n\n <!-- Set some data in Session (If not present already) -->\n <?php\n if (!isset($_SESSION['username'])) {\n $_SESSION['username'] = \"Britney\";\n } \n \n if (!isset($_SESSION['password'])) {\n $_SESSION['password'] = \"Spears\";\n }\n ?>\n\n <pre><?php\n print_r($_SESSION);\n ?></pre>\n\n </body>\n</html>\n"
},
{
"alpha_fraction": 0.5484949946403503,
"alphanum_fraction": 0.568561851978302,
"avg_line_length": 18.933332443237305,
"blob_id": "6563753a6192dc1a81f5fb923bb2544aad9d4500",
"content_id": "3d89b0d3656ed1faeae252b29c22b5ace97366a1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 299,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 15,
"path": "/python/basic/01_printformats.py",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\n# Author: Sachin Jain\n# List of all print formats\n\ndef main():\n\ta,b = 1,2\n\t# Works only in python3\n\tprint('a ({}) is less than b({})'.format(a,b))\n\t\n\tprint('A is small' if a < b else 'B is small')\n\tprint('A is even' if a%2 is 0 else 'A is odd')\n\nif (__name__ == \"__main__\"):\n main()\n"
},
{
"alpha_fraction": 0.4936170279979706,
"alphanum_fraction": 0.5319148898124695,
"avg_line_length": 13.6875,
"blob_id": "6062776370f5ccbc8199c9c5c2fe5193cf715281",
"content_id": "0eee875899442c0e6676fbda3d7d37869b0930d6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 235,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 16,
"path": "/php/basic/11_setcookie.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<?php\n setcookie(\"userInfo\", 256, time() + (60*60*24));\n?>\n\n<html>\n <head>\n <title>\n Setting Cookie\n </title>\n </head>\n\n <body>\n <?php\n echo \"Remember to set cookies before any HTML ise served\";\n ?>\n </body>\n"
},
{
"alpha_fraction": 0.5135951638221741,
"alphanum_fraction": 0.5196374654769897,
"avg_line_length": 17.38888931274414,
"blob_id": "35b91ec0b086f905d0e344b692e482e1d23c571a",
"content_id": "e73ea411350b52911aa07aedaeb95e7673605a34",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "PHP",
"length_bytes": 331,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 18,
"path": "/php/basic/10_form.php",
"repo_name": "blunderboy/sandbox",
"src_encoding": "UTF-8",
"text": "<html>\n <head>\n <title> URL Paramters </title>\n </head>\n\n <body>\n <h1><center><u> FORM paramters in PHP</u></center></h1>\n\n <?php\n echo \"Form Data is:\" . \"<br>\";\n print_r($_POST); // $_GET is the array of URL paramters\n echo \"<hr>\";\n\n echo $_POST['username'] . \":\" . $_POST['password'];\n ?>\n\n </body>\n</html>\n"
}
] | 72 |
muratf-l/highperformancesocketserver
|
https://github.com/muratf-l/highperformancesocketserver
|
dcc3faeee12616153f796543a00103cbd5f1fd30
|
e5b193aae4288d2c249279d7eb9fa9514e5d2de5
|
58c41437faff933c9f88f4e7ab8a5cfd1b78f2c9
|
refs/heads/master
| 2022-11-12T12:43:39.576537 | 2020-06-28T14:06:31 | 2020-06-28T14:06:31 | 275,597,463 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.580918550491333,
"alphanum_fraction": 0.5820120573043823,
"avg_line_length": 25.712121963500977,
"blob_id": "daf7d1fd976068b2c5eb35e12169fcc0aa37a37d",
"content_id": "af8d62e69f4de602e402a69b0d753515f841a4e6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3658,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 132,
"path": "/GameBase.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "import collections\r\nimport json\r\nfrom abc import abstractmethod, ABC\r\nfrom asyncio import Lock\r\n\r\nfrom Client import Client\r\nfrom Enums import UserOnlineStatus, Action, GameStatus\r\nfrom GamePlayer import GamePlayer\r\nfrom Utils import getId\r\n\r\n\r\nclass GameBase(ABC):\r\n\r\n def __init__(self):\r\n self.lock = Lock()\r\n self.gameId = getId()\r\n self.gameCapacity: int = 0\r\n self.gamePlayers = collections.defaultdict(GamePlayer)\r\n self.gameStatus: GameStatus = GameStatus.Unknow\r\n\r\n @abstractmethod\r\n def GameStarted(self):\r\n pass\r\n\r\n @abstractmethod\r\n def GameClosed(self):\r\n pass\r\n\r\n @abstractmethod\r\n def PlayerJoined(self, player: GamePlayer):\r\n pass\r\n\r\n @abstractmethod\r\n def PlayerLeft(self, player: GamePlayer):\r\n pass\r\n\r\n def setStatus(self, status: GameStatus):\r\n\r\n if status == GameStatus.Running:\r\n self.GameStarted()\r\n\r\n elif status == GameStatus.Closing:\r\n self.GameClosed()\r\n self.gamePlayers.clear()\r\n status = GameStatus.Closed\r\n\r\n self.gameStatus = status\r\n\r\n async def PlayerAdd(self, client: Client):\r\n if self.gameStatus != GameStatus.Waiting:\r\n return\r\n\r\n async with self.lock:\r\n if not self.IsFull():\r\n player = GamePlayer(client, self.gameId, len(self.gamePlayers))\r\n\r\n await player.sendGameJoinMessage()\r\n\r\n self.gamePlayers[player.getClientToken()] = player\r\n\r\n await self.sendGamePlayerList()\r\n\r\n self.PlayerJoined(player)\r\n\r\n if self.IsFull() and self.gameStatus == GameStatus.Waiting:\r\n self.setStatus(GameStatus.Running)\r\n\r\n return\r\n\r\n async def PlayerRemove(self, client: Client):\r\n async with self.lock:\r\n if client.getClientToken() in self.gamePlayers.keys():\r\n player: GamePlayer = self.gamePlayers[client.getClientToken()]\r\n\r\n player.status = UserOnlineStatus.Offline\r\n\r\n client.setGame(None)\r\n\r\n await player.sendGameLeaveMessage()\r\n\r\n await self.sendGamePlayerList()\r\n\r\n self.PlayerLeft(player)\r\n\r\n if self.IsEmpty():\r\n self.setStatus(GameStatus.Closing)\r\n\r\n return\r\n\r\n def IsEmpty(self):\r\n if len(self.gamePlayers) == 0:\r\n return True\r\n\r\n count: int = 0\r\n\r\n for player in self.gamePlayers.values():\r\n if player.status == UserOnlineStatus.Offline:\r\n count += 1\r\n\r\n if count >= len(self.gamePlayers):\r\n return True\r\n\r\n return False\r\n\r\n def IsFull(self):\r\n if len(self.gamePlayers) >= self.gameCapacity:\r\n return True\r\n else:\r\n return False\r\n\r\n def IsPlayerInGame(self, playerId: str):\r\n if playerId in self.gamePlayers.keys():\r\n return True\r\n\r\n return False\r\n\r\n async def AttemptSetup(self, capacity: int):\r\n self.gameCapacity = capacity\r\n self.setStatus(GameStatus.Waiting)\r\n\r\n async def sendGamePlayerList(self):\r\n player_list = []\r\n\r\n for player in self.gamePlayers.values():\r\n player_list.append(player.getPlayerInfo())\r\n\r\n await self.BroadcastMessage({\"action\": Action.GameUserList.value, \"game\": self.gameId, \"data\": player_list})\r\n\r\n async def BroadcastMessage(self, msg: json):\r\n for player in self.gamePlayers.values():\r\n if player.status == UserOnlineStatus.Online:\r\n await player.sendMessage(msg)\r\n"
},
{
"alpha_fraction": 0.45851850509643555,
"alphanum_fraction": 0.4592592716217041,
"avg_line_length": 26.723403930664062,
"blob_id": "4d381a70fd4ea06e1b69ce67511cf577c9e64848",
"content_id": "164e3122796e6406cf5450db74b2f84f9523ec06",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1350,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 47,
"path": "/AppConfig.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "import json\r\nimport logging\r\n\r\n\r\nclass AppConfig:\r\n def __init__(self):\r\n self.config = {}\r\n self.isDebug = False\r\n self.mode = None\r\n self.log = logging.getLogger('app')\r\n\r\n def load(self, mode):\r\n self.mode = mode\r\n\r\n if mode == \"dev\":\r\n config_file = \"config/config.dev.json\"\r\n else:\r\n config_file = \"config/config.pro.json\"\r\n\r\n with open(config_file, 'r') as f:\r\n try:\r\n appConfig.config = json.load(f)\r\n\r\n ch = logging.StreamHandler()\r\n\r\n f = logging.Formatter('[L:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s',\r\n datefmt='%d-%m-%Y %H:%M:%S')\r\n\r\n if mode == \"dev\":\r\n self.isDebug = True\r\n appConfig.log.setLevel(logging.DEBUG)\r\n ch.setLevel(logging.DEBUG)\r\n else:\r\n self.isDebug = False\r\n appConfig.log.setLevel(logging.INFO)\r\n ch.setLevel(logging.INFO)\r\n\r\n ch.setFormatter(f)\r\n\r\n appConfig.log.addHandler(ch)\r\n\r\n except Exception as e:\r\n appConfig.config = {}\r\n appConfig.log.debug(\"Config Load Error: %s\" % e)\r\n\r\n\r\nappConfig = AppConfig()\r\n"
},
{
"alpha_fraction": 0.5555555820465088,
"alphanum_fraction": 0.5555555820465088,
"avg_line_length": 14.199999809265137,
"blob_id": "515b4175d3f868aa0e5bb4dd9ef87a1501b15707",
"content_id": "c7471052964a5db97d563e385d5dadab9bf04411",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 81,
"license_type": "no_license",
"max_line_length": 16,
"num_lines": 5,
"path": "/models/UserInfo.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "class UserInfo:\r\n token: str\r\n coin: int\r\n name: str\r\n picture: str\r\n"
},
{
"alpha_fraction": 0.6986951231956482,
"alphanum_fraction": 0.6986951231956482,
"avg_line_length": 30.423076629638672,
"blob_id": "98279743e5a141b59fbea2db92c14c58233a89a2",
"content_id": "b42bb4e9fbefc65dc25658b19d0366eccb36db95",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 843,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 26,
"path": "/models/User.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "from datetime import datetime\r\n\r\nfrom sqlalchemy import Column, String, Integer, DateTime\r\n\r\nfrom AppStartup import Base\r\n\r\n\r\nclass User(Base):\r\n __tablename__ = 'ZUsers'\r\n\r\n UserId = Column(Integer, primary_key=True)\r\n UserToken = Column(String)\r\n PlatformId = Column(String, nullable=True)\r\n NameFirst = Column(String)\r\n NameLast = Column(String, nullable=True)\r\n Email = Column(String, nullable=True)\r\n ProfileImageUrl = Column(String, nullable=True)\r\n Locale = Column(String, nullable=True)\r\n Password = Column(String, nullable=True)\r\n RegisterStatus = Column(Integer)\r\n OnlineStatus = Column(Integer)\r\n RegisterMethod = Column(Integer)\r\n\r\n created_at = Column(DateTime, default=datetime.utcnow)\r\n updated_at = Column(DateTime, nullable=True)\r\n deleted_at = Column(DateTime, nullable=True)\r\n"
},
{
"alpha_fraction": 0.553113579750061,
"alphanum_fraction": 0.5641025900840759,
"avg_line_length": 19,
"blob_id": "775b6b09370867b62dc31df008d6ec01b1aca4ff",
"content_id": "40782dd3e52fe41ae89240708dc2d72da583c3ec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 273,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 13,
"path": "/setup.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "from setuptools import setup\r\n\r\nsetup(\r\n name='reflect_game_server',\r\n version='1.0',\r\n packages=[''],\r\n url='',\r\n license='',\r\n author='firat',\r\n author_email='',\r\n description='',\r\n install_requires=[\"websockets\", \"psycopg2\", \"sqlalchemy\"]\r\n)\r\n"
},
{
"alpha_fraction": 0.5955055952072144,
"alphanum_fraction": 0.5955055952072144,
"avg_line_length": 15.800000190734863,
"blob_id": "0005aeeca1062b85801137d4e10cfaea290e5c69",
"content_id": "c6986f536171b45bc3d2051ae5a49efcd89c2f04",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 89,
"license_type": "no_license",
"max_line_length": 21,
"num_lines": 5,
"path": "/models/GamePlayerInfo.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "class GamePlayerInfo:\r\n index: int\r\n picture: str\r\n name: str\r\n status: int\r\n"
},
{
"alpha_fraction": 0.656854510307312,
"alphanum_fraction": 0.656854510307312,
"avg_line_length": 29.289474487304688,
"blob_id": "b4c374e926d658b10c19638a2285d0530b075ac6",
"content_id": "dd150c0d3a2ce227f9a3c2898c8a20c9a8c134d0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1189,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 38,
"path": "/GamePlayer.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "import json\r\n\r\nfrom Client import Client\r\nfrom Enums import UserOnlineStatus, Action\r\nfrom models.GamePlayerInfo import GamePlayerInfo\r\n\r\n\r\nclass GamePlayer():\r\n\r\n def __init__(self, client: Client, game_id: str, index: int):\r\n self.index = index\r\n self.status: UserOnlineStatus = UserOnlineStatus.Online\r\n\r\n self.client = client\r\n self.client.setGame(game_id)\r\n\r\n async def sendGameJoinMessage(self):\r\n await self.client.sendMessage({\"action\": Action.GameJoin.value, \"game\": self.getGameId()})\r\n\r\n async def sendGameLeaveMessage(self):\r\n await self.client.sendMessage({\"action\": Action.GameLeave.value, \"game\": self.getGameId()})\r\n\r\n async def sendMessage(self, msg: json):\r\n await self.client.sendMessage(msg)\r\n\r\n def getPlayerInfo(self):\r\n info = GamePlayerInfo()\r\n info.index = self.index\r\n info.picture = self.client.userInfo.picture\r\n info.name = self.client.userInfo.name\r\n info.status = self.status.value\r\n return info.__dict__\r\n\r\n def getGameId(self):\r\n return self.client.getGameId()\r\n\r\n def getClientToken(self):\r\n return self.client.getClientToken()\r\n"
},
{
"alpha_fraction": 0.5490196347236633,
"alphanum_fraction": 0.5539215803146362,
"avg_line_length": 12.571428298950195,
"blob_id": "754971f49a0dd19ed9f973748732e42effa3aca6",
"content_id": "77d9e275729b128a9036c5c1c5daaea9caffe23d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 204,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 14,
"path": "/Utils.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "import uuid\r\n\r\n\r\ndef getId():\r\n return str(uuid.uuid4().hex)\r\n\r\n\r\ndef is_json_key_present(json, key):\r\n try:\r\n buf = json[key]\r\n except KeyError:\r\n return False\r\n\r\n return True\r\n"
},
{
"alpha_fraction": 0.6233766078948975,
"alphanum_fraction": 0.6233766078948975,
"avg_line_length": 16.117647171020508,
"blob_id": "e78b6a0e613f96fb70020051691c51764b4f0abc",
"content_id": "68d273f7e065a30fccd7312f95d02234622d21e3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 308,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 17,
"path": "/Game.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "from GameBase import GameBase\r\nfrom GamePlayer import GamePlayer\r\n\r\n\r\nclass Game(GameBase):\r\n\r\n def GameStarted(self):\r\n pass\r\n\r\n def GameClosed(self):\r\n pass\r\n\r\n def PlayerJoined(self, player: GamePlayer):\r\n pass\r\n\r\n def PlayerLeft(self, player: GamePlayer):\r\n pass\r\n"
},
{
"alpha_fraction": 0.5666104555130005,
"alphanum_fraction": 0.5666104555130005,
"avg_line_length": 22.70833396911621,
"blob_id": "125561c1df5e63197bf3346c95e410aaf6f8ca8a",
"content_id": "857cadd44e0f5b7f6428dba8c028efc27b6c4c54",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1186,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 48,
"path": "/AppServer.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "import collections\r\nimport json\r\n\r\nfrom AppConfig import appConfig\r\nfrom Client import Client\r\nfrom GameHost import GameHost\r\n\r\n\r\nclass AppServer:\r\n\r\n def __init__(self):\r\n self.CLIENTS = collections.defaultdict(Client)\r\n self.host: GameHost = GameHost()\r\n\r\n async def server_handler(self, ws, path):\r\n\r\n client = Client(ws)\r\n\r\n self.CLIENTS[client.connectionId] = ws\r\n\r\n await client.OnWsConnected()\r\n\r\n try:\r\n async for msg in ws:\r\n try:\r\n jsonData = json.loads(msg)\r\n\r\n await client.OnWsReceived(jsonData)\r\n\r\n except KeyError:\r\n appConfig.log.debug(\"Json from %s is not valid : %s\", client.connectionId, msg)\r\n\r\n except ValueError:\r\n appConfig.log.debug(\"Message from %s is not valid json string : %s\", client.connectionId, msg)\r\n\r\n finally:\r\n await client.OnWsDisconnected()\r\n\r\n await ws.close()\r\n\r\n self.CLIENTS.pop(client.connectionId)\r\n\r\n appConfig.log.debug('disconnect %s', client.connectionId)\r\n\r\n return ws\r\n\r\n\r\nappServer = AppServer()\r\n"
},
{
"alpha_fraction": 0.6151193380355835,
"alphanum_fraction": 0.6183024048805237,
"avg_line_length": 29.41666603088379,
"blob_id": "8e6decb05ba0a060660fb29d5247a655a0650b4f",
"content_id": "7b390b356fe706bebd8f6953acbb5714c4b78ef2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3770,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 120,
"path": "/Database.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "import json\r\n\r\nfrom sqlalchemy.orm import sessionmaker\r\n\r\nfrom Enums import UserRegisterStatus, UserRegisterMethod, UserOnlineStatus, ResponseCode\r\nfrom Utils import getId\r\nfrom models.UserInfo import UserInfo\r\n\r\n\r\nclass Database:\r\n\r\n def __init__(self):\r\n self.models = {}\r\n\r\n from models.User import User\r\n self.models[\"User\"] = User\r\n pass\r\n\r\n def createDatabase(self):\r\n from AppConfig import appConfig\r\n from sqlalchemy import create_engine\r\n self.engine = create_engine(appConfig.config['ConnectionString'], echo=appConfig.isDebug)\r\n\r\n from AppStartup import Base\r\n # Base.metadata.drop_all(self.engine)\r\n Base.metadata.create_all(self.engine)\r\n\r\n self.session = sessionmaker(bind=self.engine)\r\n\r\n async def registerUserFromFacebook(self, data: json, cb):\r\n from models.User import User\r\n\r\n session = self.session()\r\n\r\n query = session.query(User).filter(User.PlatformId == data[\"id\"]).limit(1).all()\r\n\r\n queryCount = len(query)\r\n\r\n if queryCount > 0:\r\n user = query[0]\r\n await self.getUserInfoFromRecord(user, cb)\r\n return\r\n\r\n user = User()\r\n user.UserToken = getId()\r\n user.PlatformId = data[\"id\"]\r\n user.NameFirst = data[\"name\"]\r\n user.ProfileImageUrl = data[\"photo\"]\r\n user.Locale = data[\"locale\"]\r\n user.PlatformMethod = data[\"platform\"]\r\n user.RegisterStatus = UserRegisterStatus.Registered.value\r\n user.OnlineStatus = UserOnlineStatus.Online.value\r\n user.RegisterMethod = UserRegisterMethod.Facebook.value\r\n\r\n session.add(user)\r\n session.commit()\r\n await self.getUserInfoFromRecord(user, cb)\r\n\r\n async def registerUserFromMail(self, data: json, cb):\r\n from models.User import User\r\n\r\n session = self.session()\r\n\r\n queryCount = session.query(User).filter(User.Email == data[\"mail\"]).limit(1).count()\r\n\r\n if queryCount > 0:\r\n await cb(ResponseCode.Error.value, None, \"already exists\")\r\n return\r\n\r\n user = User()\r\n user.UserToken = getId()\r\n user.NameFirst = data[\"name\"]\r\n user.Email = data[\"mail\"]\r\n user.Password = data[\"pass\"]\r\n user.RegisterStatus = UserRegisterStatus.Registered.value\r\n user.OnlineStatus = UserOnlineStatus.Online.value\r\n user.RegisterMethod = UserRegisterMethod.Mail.value\r\n\r\n session.add(user)\r\n session.commit()\r\n await self.getUserInfoFromRecord(user, cb)\r\n\r\n async def loginUserFromMailPassword(self, data: json, cb):\r\n from models.User import User\r\n\r\n session = self.session()\r\n\r\n query = session.query(User).filter(User.Email == data[\"mail\"] and User.Password == data[\"pass\"]).limit(1).all()\r\n\r\n queryCount = len(query)\r\n\r\n if queryCount > 0:\r\n user = query[0]\r\n await self.getUserInfoFromRecord(user, cb)\r\n return\r\n\r\n await cb(ResponseCode.Error.value, None, \"not found\")\r\n\r\n async def getUserInfoFromRecord(self, user, cb):\r\n userInfo = UserInfo()\r\n userInfo.token = user.UserToken\r\n userInfo.coin = 2500\r\n userInfo.name = user.NameFirst\r\n userInfo.picture = user.ProfileImageUrl\r\n\r\n await cb(ResponseCode.OK.value, userInfo, None)\r\n\r\n async def setUserOnlineStatus(self, userInfo: UserInfo, status: UserOnlineStatus):\r\n from models.User import User\r\n\r\n session = self.session()\r\n\r\n session.query(User).filter(User.UserToken == userInfo.token) \\\r\n .update({User.OnlineStatus: status.value},\r\n synchronize_session=False)\r\n\r\n session.commit()\r\n\r\n\r\ndatabase = Database()\r\n"
},
{
"alpha_fraction": 0.5672858953475952,
"alphanum_fraction": 0.5681921243667603,
"avg_line_length": 29.08450698852539,
"blob_id": "22720c8c486efa8d74a466bc6ba54f60e682225f",
"content_id": "c7786586b655234af350a8432773e70230283754",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2207,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 71,
"path": "/GameHost.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "import asyncio\r\nimport collections\r\nfrom asyncio import Lock\r\n\r\nfrom AppConfig import appConfig\r\nfrom Client import Client\r\nfrom Enums import GameStatus\r\nfrom Game import Game\r\n\r\n\r\nclass GameHost:\r\n def __init__(self):\r\n self.lock = Lock()\r\n self.runningGames = collections.defaultdict(Game)\r\n\r\n async def GameFindOrCreate(self, client: Client, capacity: int):\r\n async with self.lock:\r\n game = None\r\n\r\n for live_game in self.runningGames.values():\r\n if live_game.gameStatus == GameStatus.Waiting and \\\r\n not live_game.IsFull() and \\\r\n not live_game.IsPlayerInGame(client.getClientToken()):\r\n game = live_game\r\n break\r\n\r\n if game is None:\r\n game = await self.GameCreate(capacity)\r\n\r\n await game.PlayerAdd(client)\r\n\r\n # appConfig.log.debug(\"player add g:%s c:%s\", game.gameId, client.userInfo.token)\r\n\r\n return\r\n\r\n async def GameLeft(self, client: Client):\r\n async with self.lock:\r\n if client.getGameId() is None or \\\r\n client.userInfo is None or \\\r\n not client.getGameId() in self.runningGames.keys():\r\n client.setGame(None)\r\n return\r\n\r\n game = self.runningGames[client.getGameId()]\r\n\r\n await game.PlayerRemove(client)\r\n\r\n return\r\n\r\n async def GameCreate(self, capacity: int):\r\n game = Game()\r\n\r\n await game.AttemptSetup(capacity)\r\n\r\n self.runningGames[game.gameId] = game\r\n\r\n return game\r\n\r\n async def GameCheckLoop(self):\r\n gameControlTimeOut: int = appConfig.config[\"GameControlLoopSeconds\"]\r\n\r\n while True:\r\n closeGameCount: int = 0\r\n\r\n for key, live_game in list(self.runningGames.items()):\r\n if live_game.gameStatus == GameStatus.Closed:\r\n closeGameCount += 1\r\n self.runningGames.pop(key)\r\n\r\n appConfig.log.debug(\"online game:%i close game:%i\", len(self.runningGames), closeGameCount)\r\n await asyncio.sleep(gameControlTimeOut)\r\n"
},
{
"alpha_fraction": 0.5122807025909424,
"alphanum_fraction": 0.5701754093170166,
"avg_line_length": 12.615385055541992,
"blob_id": "976766a4e5607dba936ba9afc5487b18a2da6b75",
"content_id": "838550805cf10821139d8e7887ba85162b289b7d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 570,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 39,
"path": "/Enums.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "from enum import Enum\r\n\r\n\r\nclass Action(Enum):\r\n Error = 10\r\n UserInfo = 7\r\n ConnectionOK = 8\r\n GameJoin = 4\r\n GameLeave = 5\r\n GameUserList = 6\r\n\r\n\r\nclass UserRegisterStatus(Enum):\r\n Guest = 0\r\n Registered = 10\r\n\r\n\r\nclass UserOnlineStatus(Enum):\r\n Offline = 0\r\n Online = 10\r\n\r\n\r\nclass UserRegisterMethod(Enum):\r\n Unknow = 0\r\n Mail = 10\r\n Facebook = 20\r\n\r\n\r\nclass ResponseCode(Enum):\r\n OK = 200\r\n Error = 500\r\n\r\n\r\nclass GameStatus(Enum):\r\n Unknow = 0\r\n Waiting = 10\r\n Closing = 20\r\n Closed = 20\r\n Running = 30\r\n"
},
{
"alpha_fraction": 0.6188013553619385,
"alphanum_fraction": 0.6203224658966064,
"avg_line_length": 33.344085693359375,
"blob_id": "c5e853bf01251943352187dc08603c6b57f26977",
"content_id": "aed7e02129a4904d29cb1f853fe75f9e58ea2be0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3287,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 93,
"path": "/Client.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "import json\r\nfrom asyncio import Lock\r\n\r\nfrom websockets import WebSocketServerProtocol\r\n\r\nfrom AppConfig import appConfig\r\n\r\nfrom Enums import Action, ResponseCode, UserOnlineStatus\r\nfrom Utils import getId, is_json_key_present\r\nfrom models.UserInfo import UserInfo\r\n\r\n\r\nclass Client:\r\n\r\n def __init__(self, connection: WebSocketServerProtocol):\r\n self.connection: WebSocketServerProtocol = connection\r\n self.connectionId: str = getId()\r\n\r\n self.lock = Lock()\r\n self.gameId: str = None\r\n self.userInfo: UserInfo = None\r\n\r\n def setGame(self, gameId: str):\r\n self.gameId = gameId\r\n\r\n def getGameId(self):\r\n return self.gameId\r\n\r\n def getClientToken(self):\r\n return self.userInfo.token\r\n\r\n async def sendMessage(self, msg: json):\r\n if self.connection is None or self.connection.closed:\r\n return\r\n\r\n await self.connection.send(json.dumps(msg))\r\n\r\n async def OnWsConnected(self):\r\n await self.sendMessage({\"action\": Action.ConnectionOK.value})\r\n\r\n async def OnWsDisconnected(self):\r\n if not self.userInfo is None:\r\n from Database import database\r\n await database.setUserOnlineStatus(self.userInfo, UserOnlineStatus.Offline)\r\n\r\n from AppServer import appServer\r\n await appServer.host.GameLeft(self)\r\n\r\n async def OnWsReceived(self, message: json):\r\n\r\n try:\r\n action = int(message['action'])\r\n except:\r\n action = None\r\n\r\n if action is None:\r\n return\r\n\r\n async with self.lock:\r\n appConfig.log.debug('action sid:%s action:%s', self.connectionId, action)\r\n from Database import database\r\n\r\n if action == 1 and self.userInfo is None:\r\n if is_json_key_present(message, \"data\"):\r\n await database.registerUserFromFacebook(message[\"data\"], self.ActionUserInfoCallback)\r\n\r\n elif action == 2 and self.userInfo is None:\r\n if is_json_key_present(message, \"data\"):\r\n await database.registerUserFromMail(message[\"data\"], self.ActionUserInfoCallback)\r\n\r\n elif action == 3 and self.userInfo is None:\r\n if is_json_key_present(message, \"data\"):\r\n await database.loginUserFromMailPassword(message[\"data\"], self.ActionUserInfoCallback)\r\n\r\n elif action == 4 and not self.userInfo is None:\r\n if is_json_key_present(message, \"data\"):\r\n from AppServer import appServer\r\n await appServer.host.GameFindOrCreate(self, int(message[\"data\"][\"player\"]))\r\n\r\n elif action == 5 and not self.userInfo is None:\r\n from AppServer import appServer\r\n await appServer.host.GameLeft(self)\r\n\r\n async def ActionUserInfoCallback(self, code: int, userInfo, message: str):\r\n self.userInfo = userInfo\r\n\r\n if code != ResponseCode.OK.value:\r\n await self.sendMessage({\"action\": code})\r\n return\r\n\r\n from Database import database\r\n await database.setUserOnlineStatus(self.userInfo, UserOnlineStatus.Online)\r\n await self.sendMessage({\"action\": Action.UserInfo.value, \"user\": self.userInfo.__dict__})\r\n"
},
{
"alpha_fraction": 0.6457783579826355,
"alphanum_fraction": 0.6536939144134521,
"avg_line_length": 24.59649085998535,
"blob_id": "365b3667c304747f64b5d0d6465b806a2fbecea2",
"content_id": "864925b69c26040131940a9de26aca59b3423f80",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1516,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 57,
"path": "/AppStartup.py",
"repo_name": "muratf-l/highperformancesocketserver",
"src_encoding": "UTF-8",
"text": "import asyncio\r\nimport ssl\r\nimport sys\r\nimport logging\r\n\r\nimport websockets\r\nfrom sqlalchemy.ext.declarative import declarative_base\r\n\r\nfrom AppConfig import appConfig\r\nfrom AppServer import appServer\r\n\r\nBase = declarative_base()\r\n\r\nif __name__ == \"__main__\":\r\n args = sys.argv\r\n\r\n if len(args) < 2:\r\n logging.critical('Invalid Args (dev,pro)')\r\n sys.exit()\r\n\r\n appConfig.load(args[1])\r\n appConfig.log.debug('start server %s' % str(appConfig.config['Port']))\r\n\r\n from Database import database\r\n\r\n database.createDatabase()\r\n\r\n if sys.platform == 'win32':\r\n asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())\r\n\r\n event_loop = asyncio.get_event_loop()\r\n event_loop.set_debug(appConfig.isDebug)\r\n\r\n GameLoop = asyncio.get_event_loop().create_task(appServer.host.GameCheckLoop())\r\n\r\n if not appConfig.isDebug:\r\n ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)\r\n ssl_context.load_cert_chain('public.crt', 'private.key')\r\n start_server = websockets.serve(appServer.server_handler, '0.0.0.0', appConfig.config['Port'], ssl=ssl_context)\r\n else:\r\n start_server = websockets.serve(appServer.server_handler, '0.0.0.0', appConfig.config['Port'])\r\n\r\n event_loop.run_until_complete(start_server)\r\n\r\n try:\r\n event_loop.run_forever()\r\n\r\n except KeyboardInterrupt:\r\n pass\r\n\r\n finally:\r\n GameLoop.cancel()\r\n pass\r\n\r\n event_loop.close()\r\n\r\n appConfig.log.debug('Stop server')\r\n"
}
] | 15 |
Gwynhegg/VigenereCipher
|
https://github.com/Gwynhegg/VigenereCipher
|
1acd5f8f89cba025c041adf28fe9af6c172ad386
|
db2f9ca2886833ff4b41402424615e64cdf21028
|
500ab0024d951e1121afc39bcba9b2bc0178a5e8
|
refs/heads/master
| 2023-03-30T15:22:02.262516 | 2021-04-05T12:26:19 | 2021-04-05T12:26:19 | 354,809,904 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5173584818840027,
"alphanum_fraction": 0.5446226596832275,
"avg_line_length": 35.054420471191406,
"blob_id": "2e335cdb7273412a1afb789c68b1eaffca5c7516",
"content_id": "97c92eb1c228336f977bf24d256dddebbe7f0735",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11167,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 294,
"path": "/main.py",
"repo_name": "Gwynhegg/VigenereCipher",
"src_encoding": "UTF-8",
"text": "from tkinter import *\nfrom tkinter.messagebox import *\nfrom collections import Counter\nimport pyperclip\n\n\n# -*- coding: utf-8 -*-\n\ndef construct_form():\n\n def copy(event):\n pyperclip.copy(text_exit.get(1.0, END))\n\n def paste(event):\n text_entry.insert(1.0, pyperclip.paste())\n\n def clear(event):\n text_entry.delete(1.0, END)\n entry.delete(0, END)\n\n alphabet_table = ['а', 'б', 'в', 'г', 'д', 'е', 'ж', 'з', 'и', 'й', 'к', 'л', 'м', 'н', 'о', 'п', 'р', 'с', 'т',\n 'у', 'ф', 'х', 'ц', 'ч', 'ш', 'щ', 'ъ', 'ы', 'ь', 'э', 'ю', 'я']\n\n frequencies = {'а': 0.062, 'б': 0.014, 'в': 0.038, 'г': 0.013, 'д': 0.025, 'е': 0.072, 'ж': 0.07, 'з': 0.016,\n 'и': 0.062, 'й': 0.010, 'к': 0.028,\n 'л': 0.035, 'м': 0.026, 'н': 0.053, 'о': 0.09, 'п': 0.023, 'р': 0.040, 'с': 0.045, 'т': 0.053,\n 'у': 0.021, 'ф': 0.002, 'х': 0.009,\n 'ц': 0.003, 'ч': 0.012, 'ш': 0.006, 'щ': 0.003, 'ъ': 0.014, 'ы': 0.016, 'ь': 0.014, 'э': 0.003,\n 'ю': 0.006, 'я': 0.018}\n\n visinere_table = []\n for i in range(32):\n visinere_table.append(alphabet_table[i:] + alphabet_table[:i])\n\n\n def check_correctness(text):\n if (text == '\\n' or text == \"\"):\n showerror(title=\"Ошибка\", message=\"Необходимые поля содержат пустые значения\")\n return False\n if [ch for ch in text if ch in '0123456789']:\n showerror(title=\"Ошибка\", message=\"Необходимые поля содержат цифры\")\n return False\n text = text.casefold()\n tab = str(alphabet_table)\n deltab = \",.!? -_)(*%№;:@#$^&[]\\r\\n\"\n text = text.translate(str.maketrans(tab, tab, deltab))\n text = text.replace('ё', 'е')\n print(text)\n if any(ch not in alphabet_table for ch in text):\n showerror(title=\"Ошибка\", message=\"Допустимы только буквы русского алфавита\")\n return False\n return text\n\n def format_text(text):\n step = 0\n result = \"\"\n iter = 0\n while (iter < len(text)):\n if (step < 5):\n result += text[iter]\n step += 1\n iter += 1\n else:\n result += \" \"\n step = 0\n return result\n\n def cypher(event):\n\n text_exit.delete(1.0, END)\n\n text = text_entry.get(1.0, END)\n key = entry.get()\n\n text = str(check_correctness(text))\n if text == \"False\":\n return\n key = str(check_correctness(key))\n if key == \"False\":\n return\n\n key *= int(len(text) / len(key)) + 1\n key = key[:len(text)]\n\n cyphered_message = \"\"\n\n for i in range(len(text)):\n cyphered_message += visinere_table[alphabet_table.index(key[i])][alphabet_table.index(text[i])]\n\n text_exit.insert(1.0, format_text(cyphered_message))\n\n def decypher(event):\n\n text_exit.delete(1.0, END)\n\n text = text_entry.get(1.0, END)\n key = entry.get()\n\n text = str(check_correctness(text))\n if text == \"False\":\n return\n key = str(check_correctness(key))\n if key == \"False\":\n return\n\n key *= int(len(text) / len(key)) + 1\n key = key[:len(text)]\n\n decyphered_message = \"\"\n\n for i in range(len(text)):\n decyphered_message += alphabet_table[visinere_table[alphabet_table.index(key[i])].index(text[i])]\n\n text_exit.insert(1.0, format_text(decyphered_message))\n\n def hack(event):\n\n text_exit.delete(1.0, END)\n entry.delete(0, END)\n \n text = text_entry.get(1.0, END)\n text = str(check_correctness(text))\n if text == \"False\":\n return\n ngram_dict = {}\n for length in range(2,5):\n ngram_dict = searchNgrams(length, ngram_dict, text)\n print(\"Обнаруженные N-граммы и их местоположения: \",ngram_dict)\n distance_dict = convert_to_distance(ngram_dict)\n print(\"Дистанция между позициями: \",distance_dict)\n gcd_dict = find_gcd(distance_dict)\n print(\"НОД расстояний между позициями внутри одной группы N-граммы: \",gcd_dict)\n gcd_array = find_array_gcd([item[0] for item in gcd_dict.values()])\n gcd_array = [a for a in gcd_array if a!=1]\n print(\"НОД расстояний между группами: \",gcd_array)\n\n gcd_repeats = Counter(gcd_array)\n\n max=gcd_repeats[3]\n key_length=3\n for i in range(3,10):\n print(\"Количество повторений НОДа {0} = {1} \".format(i,gcd_repeats[i]))\n if gcd_repeats[i]>max:\n max = gcd_repeats[i]\n key_length = i\n print(\"предполагаемая длина ключа: {0}\".format(key_length))\n\n shifts = []\n key = \"\"\n for i in range(key_length):\n shifts.append(text[i::key_length])\n print(\"Сдвиг номер {0} применим к буквам: {1}\".format(i+1,shifts[-1]))\n\n char_repeats = Counter(shifts[-1])\n for k in char_repeats:\n char_repeats[k]=char_repeats[k]/len(alphabet_table)\n\n print(\"Частота встречаемости букв: {0}\".format(char_repeats))\n minimal_shift=0\n minimal_interpolation = interpolation(shifts[-1])\n for k in range(1,len(alphabet_table)):\n new_shift = create_shift(shifts[-1],k)\n interpolation_coef = interpolation(new_shift)\n if (interpolation_coef<minimal_interpolation):\n minimal_interpolation = interpolation_coef\n minimal_shift=k\n fin_string = create_shift(shifts[-1],minimal_shift)\n print(\"Был произведен сдвиг влево на {0} позиций, новая строка: {1}\".format(minimal_shift, fin_string))\n alph_character_index = alphabet_table.index(fin_string[0])\n seek_character = shifts[-1][0]\n key_character = ''\n for iter in range(len(visinere_table)):\n if (visinere_table[iter][alph_character_index]==seek_character):\n key_character = alphabet_table[iter]\n break;\n print(\"{0}-ая буква ключа: {1}\".format(i+1,key_character))\n key+=key_character\n print(\"Найденный ключ: {0}\".format(key))\n entry.insert(0, key)\n decypher(event)\n\n def create_shift(str, shift):\n final_string=\"\"\n for iter in range(len(str)):\n final_string+=alphabet_table[(alphabet_table.index(str[iter])+shift)%len(alphabet_table)]\n return final_string\n\n def interpolation(str):\n interpolation_koef = 0\n char_repeats = Counter(str)\n for i in char_repeats:\n char_repeats[i] = char_repeats[i] / len(alphabet_table)\n for char in alphabet_table:\n try:\n interpolation_koef+=(frequencies[char]-char_repeats[char])**2\n except KeyError:\n interpolation_koef+=frequencies[char]**2\n\n return interpolation_koef\n\n def find_array_gcd(arr):\n gcd_array = []\n for i in range(len(arr)):\n for k in range(i+1, len(arr)):\n gcd_array.append(calc_gcd(arr[i],arr[k]))\n return gcd_array\n\n def find_gcd(dict):\n for key in dict.keys():\n while (len(dict[key])>1):\n dict[key][0] = calc_gcd(dict[key][1], dict[key][0])\n dict[key].pop(1)\n return dict\n\n def calc_gcd(a,b):\n if (b==0):\n if (a==0): return -1\n else: return a\n else:\n if (a>b): return calc_gcd(b, a%b)\n else: return calc_gcd(a, b%a)\n\n def convert_to_distance(dict):\n for key in dict.keys():\n for iter in range(len(dict[key])):\n dict[key][iter] = max(dict[key])-dict[key][iter]\n dict[key].remove(0)\n return dict\n\n def searchNgrams(n, arr, text):\n for i in range(len(text) - n):\n ngram = text[i:i + n]\n temp_arr = []\n flag = True\n start_index = 0\n while (flag):\n temp_arr.append(text.find(ngram, start_index))\n if (temp_arr[-1] == -1):\n temp_arr.pop()\n if (len(temp_arr) > 1):\n arr[ngram] = temp_arr\n flag = False\n start_index += temp_arr[-1] + n\n\n return arr\n\n root = Tk()\n root.geometry('1500x500')\n\n root.title(\"Шифр Вижинера\")\n\n frame_key = Frame(root)\n label_key = Label(frame_key, text=\"Введите ключ\")\n entry = Entry(frame_key, width=100)\n frame_key.pack(fill=BOTH)\n label_key.pack(expand=1, side=\"left\", fill=BOTH, padx=20, pady=10)\n entry.pack(expand=1, side=\"right\", fill=BOTH, pady=10, padx=20)\n\n frame_buttons = Frame(root)\n cypher_button = Button(frame_buttons, text=\"Зашифровать\", bg=\"lightgreen\")\n paste_button = Button(frame_buttons, text=\"Вставить из буфера\")\n decypher_button = Button(frame_buttons, text=\"Дешифровать\", bg=\"lightblue\")\n copy_button = Button(frame_buttons, text=\"Копировать в буфер\")\n hack_button = Button(frame_buttons, text=\"Взломать\", bg=\"lightyellow\")\n clear_button = Button(frame_buttons, text=\"Очистить\")\n frame_buttons.pack(fill=BOTH)\n cypher_button.pack(expand=1, side=\"left\", padx=20, pady=10, fill=BOTH)\n paste_button.pack(expand=1, side=\"left\", padx=20, pady=10, fill=BOTH)\n clear_button.pack(expand=1, side=\"left\", padx=20, pady=10, fill=BOTH)\n hack_button.pack(expand=1, side=\"left\", padx=230, pady=10, fill=BOTH)\n copy_button.pack(expand=1, side=\"left\", padx=20, pady=10, fill=BOTH)\n decypher_button.pack(expand=1, side=\"left\", padx=20, pady=10, fill=BOTH)\n\n frame_text = Frame(root)\n text_entry = Text(frame_text, bg=\"grey\", fg=\"white\", font={'Arial', 14})\n text_exit = Text(frame_text, bg=\"grey\", fg=\"white\", font={'Arial', 14}, wrap=WORD)\n text_entry.pack(side=\"left\", padx=10, expand=1)\n scroll_entry = Scrollbar(frame_text, command=text_entry.yview)\n scroll_entry.pack(side=\"left\")\n text_exit.pack(side=\"left\", padx=10, expand=1)\n scroll_exit = Scrollbar(frame_text, command=text_exit.yview)\n scroll_exit.pack(side=\"left\")\n frame_text.pack(fill=BOTH)\n\n hack_button.bind('<Button-1>', hack)\n decypher_button.bind('<Button-1>', decypher)\n cypher_button.bind('<Button-1>', cypher)\n copy_button.bind('<Button-1>', copy)\n paste_button.bind('<Button-1>', paste)\n clear_button.bind('<Button-1>', clear)\n root.mainloop()\n\n\nconstruct_form()\n"
}
] | 1 |
radylemes/python-520
|
https://github.com/radylemes/python-520
|
056594331351bfe2761862c259c55d9f07197b0d
|
e4fdbcc3266cc73cadbd7641b37429a0ce76e03e
|
a367e3ac29528ea584ee829c054dec5d495ee2e4
|
refs/heads/master
| 2020-04-22T18:55:43.695853 | 2019-02-23T01:31:28 | 2019-02-23T01:31:28 | 170,592,024 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5722543597221375,
"alphanum_fraction": 0.5780346989631653,
"avg_line_length": 16.100000381469727,
"blob_id": "9c6cf7f6402a91a94fdc0a1405d0f08ecebeb4aa",
"content_id": "67e66c97208a0aa452f28dae46eb17502aca9d90",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 173,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 10,
"path": "/ordenar.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\ndef up(letras):\n return letras.upper()\n\nletras = ['a','Z', 'b', 'c', 'l', 'A']\nordenados = sorted(letras, key=up)\n\nfor i in ordenados:\n print(i)\n\n\n"
},
{
"alpha_fraction": 0.602478563785553,
"alphanum_fraction": 0.6186844706535339,
"avg_line_length": 29.852941513061523,
"blob_id": "f030af14beeebeb9cec87028a218bf62fa2fd80b",
"content_id": "7703588eb3b1338a0dd3ff099209d7b6e32353b6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1053,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 34,
"path": "/classes/animal.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "#classe que define uma abstração de mamíferos\n\n\nclass Animal():\n\n#\tdef __init__(self, peso=0, idade=0, cor='', nome='', especie=''):\n#\tdef __init__(self, *args):\n#\t\tself.peso = args[0] if len(args) >= 1 else 0\n#\t\tself.idade = args[1] if len(args) >= 2 else 0\n#\t\tself.cor = args[2] if len(args) >= 3 else ''\n#\t\tself.nome = args[3] if len(args) >= 4 else ''\n#\t\tself.especie = args[4] if len(args) >= 5 else ''\n\n\tdef __init__(self, **kwargs):\n\t\tfor k in kwargs:\n\t\t\tself [k] = kwargs[k]\n\n#\t\tself.peso = kwargs['peso'] if 'peso' in kwargs else ''\n#\t\tself.idade = kwargs['idade'] if 'idade' in kwargs else ''\n#\t\tself.cor = kwargs['cor'] if 'cor' in kwargs else ''\n#\t\tself.nome = kwargs['nome'] if 'nome' in kwargs else ''\n#\t\tself.especie = kwargs['especie'] if 'especie' in kwargs else ''\n\n\tdef __str__(self):\n\t\treturn 'Você printou o gatinho {0}'.format(self.nome)\n\t\n\tdef __setitem__(self, key, value):\n\t\tsetattr(self, key, value)\n\ngatinho = Animal(nome='Manfred', cor='Rosa')\ngatinho2 = Animal(nome='Gatinha',cor='Pink')\n\nprint(gatinho)\nprint(gatinho2)\n"
},
{
"alpha_fraction": 0.5443723201751709,
"alphanum_fraction": 0.572510838508606,
"avg_line_length": 22.100000381469727,
"blob_id": "dfecaa4f607b34e538fbf894bb11dc0e3a1666bc",
"content_id": "3397e62f4927ba3d757c82c9f941053a6b98628b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 925,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 40,
"path": "/usuarios.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n# git add .\n# git commit -m 'Exercicio final da aula 3'\n# git push origin master\n\n# Escrever os nomes e os usuários da seguinte forma:\n# hector................ [email protected]\n\n\n\nlista = []\nfor l in open('usuarios.csv'):\n nome, idade, email = l.split(',')\n lista.append({\"nome\" : nome.strip(),\"email\" : email.strip(),\"idade\" : int(idade.strip())})\n\ndef hprint():\n\treturn '{0: ^6} {1:.<20} {2:.<34}'.format('ID','NOME','EMAIL')\n\t\ndef fprint(n, m):\n\tn = n.zfill(2)\n\tprint('{2: ^6}--> {0:.<20} {1:.>34}'.format(m['nome'], m['email'], n))\n\nprint(hprint())\n\nfor i, u in enumerate(sorted(lista, key=lambda u : u[\"nome\"]), start=1):\n\tfprint(str(i), u)\n\nexit()\n\n\nfor u in sorted(lista, key=lambda i : i[\"nome\"]):\n print('{0:.<20} {1:.>35}'.format(u['nome'], u['email']))\n\nexit()\nordenados = []\nfor i in lista:\n# print(i['nome'])\n ordenados.append(i['nome'])\nfor n in sorted(ordenados):\n print(n)\n"
},
{
"alpha_fraction": 0.6405463814735413,
"alphanum_fraction": 0.6556434035301208,
"avg_line_length": 30.613636016845703,
"blob_id": "a29e502eb67dfe96c748c5fd4ab218ffc7ef30e5",
"content_id": "11f150159a97e3469247dcf53f32694cd4f7e890",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1412,
"license_type": "no_license",
"max_line_length": 414,
"num_lines": 44,
"path": "/aula1.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\ntexto_grotesco = 'Por conseguinte, o novo modelo estrutural aqui preconizado nos obriga à análise das condições financeiras e administrativas exigidas. Ainda assim, existem dúvidas a respeito de como o surgimento do comércio virtual faz parte de um processo de gerenciamento das regras de conduta normativas. Neste sentido, a execução dos pontos do programa exige a precisão e a definição do fluxo de informações.'\n\nnomes = ['Hector', 'Guilherme', 'Joel', 'Flávio', 'Fabiano', 'Roger', 'Cícero', 'Hugo', 'Ayron', 'Leonel', 'Pedro', 'Lucas']\n\nif 'virtual' in texto_grotesco:\n print('Palavra \"virtual\" encontrada')\n\nprint(nomes[-4:])\nprint(len(texto_grotesco.split()))\n\nfor nome in nomes:\n # Mais elegante\n if nome[0] in 'FH':\n print(nome)\n # Segunda mais elegante\n if nome[0] == 'H' or nome[0] == 'F':\n print(nome)\n # A que funciona...\n if nome[0] == 'F':\n print(nome)\n elif nome[0] == 'H':\n print(nome)\n\n# Percorrer a lista \"nomes\" e exibir apenas os nomes \n# que começam com a letra F e H\n# for\n# if com [:] em cima do item \"do momento\"\n\nexit()\nn1 = int(input('Digite o primeiro número: '))\nn2 = int(input('Digite o segundo número: '))\n\nn3 = n1 + n2\n\n# Se o numero for igual a 50, escrever \"...\"\n\nif n3 > 100:\n print('Que número grandão...')\nelif n3 == 50:\n print('...')\nelse:\n print('Que número pequeno...')\n"
},
{
"alpha_fraction": 0.6399286985397339,
"alphanum_fraction": 0.6559714674949646,
"avg_line_length": 16.53125,
"blob_id": "79833f3a28e01d66a8034f9e9f3e4e8bee8ac05a",
"content_id": "4fb304656f5495108057daaec8190e978d0e3f6a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 561,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 32,
"path": "/modulos/modulos.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "import time, random, datetime\nfrom subprocess import run, PIPE\nimport som\n\nprint(som.FREQUENCIA)\nsom.doppler()\n\nexit()\n#r = subprocess.run(['free', '-h'], stdout=subprocess.PIPE)\n\nr = run(['apt-get', 'install' , '-y', 'sl'], stdout=PIPE, stderr=PIPE)\n\nif r.returncode !=0:\n\tprint('Deu merda!!!!')\n\n\n#print(dir(r))\n#print(r.stdout.decode('utf-8'))\n\nexit()\nletras = ['A', 'B', 'C', 'D']\n\nprint(random.randint(100, 999))\n\ntime.spleep(1)\n\nprint(random.choice(letras))\n\nprint(datetime.datetime.now())\n\nhoje = datetime.datetime.now()\nprint(hoje.strftime('%d/%m/%Y'))\n"
},
{
"alpha_fraction": 0.6345864534378052,
"alphanum_fraction": 0.6451127529144287,
"avg_line_length": 32.29999923706055,
"blob_id": "1b4a2426f8ba4169be6212d3aa176159be335c0d",
"content_id": "948b4d2150391fc535c3030ab2e5e71e5e847a5e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 671,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 20,
"path": "/exercicios/exemplo.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "def checkint(valor, nome):\n if not valor.isnumeric():\n print('Campo {0} inválido'.format(nome))\n exit()\n return valor\n\ndef checklen(valor, minimo, nome):\n if len(valor) < minimo:\n print('Campo {0} menor do que {1}'.format(nome, minimo))\n exit()\n return valor\n#!/usr/bin/python3\n\nfrom functions import checkint\n\nid = checkint(input('Digite o id do usuário: '), 'id')\nnome = checklen(input('Digite o nome do usuário: '), 10, 'nome')\nlogin = checklen(input('Digite o login do usuário: '), 6, 'login')\nidade = checkint(input('Digite a idade do usuário: '), 'idade')\nsetor = checkint(input('Digite o setor do usuário: '), 'setor')"
},
{
"alpha_fraction": 0.6061968803405762,
"alphanum_fraction": 0.6331834197044373,
"avg_line_length": 23.108434677124023,
"blob_id": "d21e87d773947ad117fb8310b8f15d4862b31eac",
"content_id": "6249684b15060f9dd47bbbfe34e622558ac9ff9c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2002,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 83,
"path": "/classes/humano.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "# Criar uma clasee humano\n# Que receba nome, idade e cor\n# Criar um humano chamado Paramahansa Yoganda com 45 anos e negro\n# Criar um humano chamado Monja Coen com 50 anos e branca\n\nclass Humano():\n\t\n\tdef __init__(self, nome='', idade=0, cor=''):\n\t\tself.nome = nome\n\t\tself.idade = idade \n\t\tself.cor = cor\n\n\tdef envelhercer(self):\n\t\tself.idade += 1\n\n\tdef __str__(self):\n\t\treturn 'Você printou o nome do humano {0}'.format(self.nome)\n\nclass Homem(Humano):\n\t\n\tdef __init__(self, nome, idade, cor, veiculo):\n#\t\tsuper(Homem, self).__init__(nome, idade, cor) # chama o construtor da classe de quem herdou\n\t\tsuper().__init__(nome, idade, cor)\n\t\tself.veiculo = veiculo\t\t\n\n\tdef envelhercer(self):\n\t\tself.idade +=2\t\t\n\nhumano1 = Homem(nome='Paramahansa Yoganda', idade=45, cor='Negro', veiculo='Fan 125')\nhumano2 = Humano(nome='Monja Coen', idade=50, cor='Branca')\n\nfor i in range (0, 10):\n\tprint(humano1.nome, humano1.idade, humano1.cor, humano1.veiculo)\n\thumano1.envelhercer()\n\nfor i in range (0, 10):\n\tprint(humano2.nome, humano2.idade, humano2.cor)\n\thumano2.envelhercer()\n\n\nexit()\n\nclass Humano():\n\n def __init__(self, nome, idade, cor, peso):\n self.nome = nome\n self.idade = idade\n self.cor = cor\n self.peso = peso\n\n def envelhecer(self):\n self.idade += 1\n\nclass Homem(Humano):\n\n def __init__(self, nome, idade, cor, peso, veiculo):\n # Chama o construtor da classe de quem herdou\n #super(Homem, self).__init__(nome, idade, cor)\n super().__init__(nome, idade, cor, peso)\n self.veiculo = veiculo\n\n def envelhecer(self):\n self.idade += 2\n \nclass Mulher(Humano):\n\n def engravidar(self):\n self.peso += 200\n\nparamahansa = Homem('Paramahansa Yogananda', 45, 'Negro', 85, 'Fan 125cc')\ncoen = Mulher('Monja Coen', 50, 'Branca', 50)\nprint(coen.peso)\ncoen.engravidar()\nprint(coen.peso)\nexit()\n\nfor i in range(0, 10):\n print(coen.nome, coen.idade)\n coen.envelhecer()\n\nfor i in range(0, 10):\n print(paramahansa.nome, paramahansa.idade)\n paramahansa.envelhecer()\n"
},
{
"alpha_fraction": 0.45268139243125916,
"alphanum_fraction": 0.48304417729377747,
"avg_line_length": 37.984615325927734,
"blob_id": "f3f06c9ebf9667162fee758390cb08839cb96c10",
"content_id": "77c71dd64f164f2dc6ffbc1875afb1961288dbce",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2540,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 65,
"path": "/aula2.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n#Exibir o nome do primeiro filho de quem tem filho\n\nusuarios = [{\"nome\": \"Hector\", \"idade\" : 27, \"email\" : \"[email protected]\"},\n {\"nome\": \"Leonel\", \"idade\" : 50,\"email\" : \"[email protected]\", 'filhos' : ['Ana', 'João', 'Antônio']},\n {\"nome\": \"Hugo\", \"idade\" : 36, \"email\" : \"[email protected]\"},\n {\"nome\": \"Guilherme Serafim\", \"idade\" : 27, \"email\" : \"[email protected]\"},\n {\"nome\": \"Joel\", \"idade\" : 28, \"email\" : \"[email protected]\"},\n {\"nome\": \"Ayron\", \"idade\" : 28, \"email\" : \"[email protected]\", 'filhos' : ['Belzebu', 'Belphegor', 'Baphomet']},\n {\"nome\": \"Lucas\", \"idade\" : 25, \"email\" : \"[email protected]\"},\n {\"nome\": \"Roger\", \"idade\" : 23, \"email\" : \"[email protected]\"},\n {\"nome\": \"Victor Lapetina\", \"idade\" : 23, \"email\" : \"[email protected]\"},\n {\"nome\": \"Cicero\", \"idade\" : 52, \"email\" : \"[email protected]\", 'filhos' : ['Nicko', 'Xiphorimphola']},\n {\"nome\": \"Daniel\", \"idade\" : 37, \"email\" : \"[email protected]\"},\n {\"nome\": \"Fabiano\", \"idade\" : 36, \"email\" : \"[email protected]\"},\n {\"nome\": \"Flavio\", \"idade\" : 43, \"email\" : \"[email protected]\", 'filhos' : ['Pedro', 'Paulo', 'Peterson']},\n {\"nome\": \"Guilherme Ayres\", \"idade\" : 23, \"email\" : \"[email protected]\"}]\n\nfor u in usuarios:\n if 'filhos' in u:\n print(u['filhos'][0])\nexit()\n\n# Escrever com print o meu email\n\nusuarios = [{\"nome\": \"Hector\", \"idade\" : 27, \"email\" : \"[email protected]\"},\n{\"nome\": \"Leonel\", \"idade\" : 50,\"email\" : \"[email protected]\"},\n{\"nome\": \"Hugo\", \"idade\" : 36, \"email\" : \"[email protected]\"},\n{\"nome\": \"Guilherme Serafim\", \"idade\" : 27, \"email\" : \"[email protected]\"},\n{\"nome\": \"Joel\", \"idade\" : 28, \"email\" : \"[email protected]\"},\n{\"nome\": \"Ayron\", \"idade\" : 28, \"email\" : \"[email protected]\"},\n{\"nome\": \"Lucas\", \"idade\" : 25, \"email\" : \"[email protected]\"},\n{\"nome\": \"Roger\", \"idade\" : 23, \"email\" : \"[email protected]\"},\n{\"nome\": \"Victor Lapetina\", \"idade\" : 23, \"email\" : \"[email protected]\"},\n{\"nome\": \"Cicero\", \"idade\" : 52, \"email\" : \"[email protected]\"},\n{\"nome\": \"Daniel\", \"idade\" : 37, \"email\" : \"[email protected]\"},\n{\"nome\": \"Fabiano\", \"idade\" : 36, \"email\" : \"[email protected]\"},\n{\"nome\": \"Flavio\", \"idade\" : 43, \"email\" : \"[email protected]\"},\n{\"nome\": \"Guilherme Ayres\", \"idade\" : 23, \"email\" : \"[email protected]\"}]\n\n\nprint(usuarios[0]['email'])\n\nfor u in usuarios:\n# print(u['email'])\n print('NOME: {0:.>20} Email: {1:.>30}'.format(u['nome'], u['email'])) \n\nexit()\nusuarios = [[1,2,'daniel'],[3,4,5,'andre'],[5,6,6,'Teste']]\n\nfor u in usuarios:\n for i in u:\n if type(i) == str:\n print(i.upper())\n\n# Exibir o último número de cada lista\n\nfor usuario in usuarios:\n print(usuario[-1])\n\n\nexit()\nletras = ['a','b','c','e','f']\nv1, *v2 = letras\nprint (v2)\n\n\n"
},
{
"alpha_fraction": 0.5645073652267456,
"alphanum_fraction": 0.5759390592575073,
"avg_line_length": 21.81366539001465,
"blob_id": "33c69d8c53648853fc317ea9526d77846f8da57c",
"content_id": "a6718728ddc182b7360a7a638d3d097a2607c2de",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3683,
"license_type": "no_license",
"max_line_length": 130,
"num_lines": 161,
"path": "/sistema/app.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\nfrom pymongo import MongoClient\nimport re\nimport os\n\n\nwhile True:\n\ttry:\n\t\tos.system('clear')\n\t\tclient = MongoClient()\n\t\tdb = client.python\n\n\t\tmessagem_inicial = '''\n#################################\n# #\n# Sistema de Terminal 0.1 #\n# #\n#################################\n\n1] Buscar pelo nome\n2] Cadastrar\n3] Atualizar\n4] Deletar\n5] Listar Usuarios\n6] Sair\n\t\t'''\n\t\tprint(messagem_inicial)\n\t\topcao = int(input('Selecione uma opção: '))\n\n\t\tif opcao <=6 and opcao != 0:\n\t\t\tif opcao == 1:\n\t\t\t\tnome = input('Digite o nome: ')\n\t\t\t\tnome = re.compile(nome, re.IGNORECASE)\n\t\t\t\tcheck = db.usuarios.find({'nome': nome}).count()\n\t\t\t\tif check == 0:\n\t\t\t\t\tprint('Nome não localizado!!!!')\n\t\t\t\t\tinput('Digite enter para continuar')\n\t\t\t\telse:\n\t\t\t\t\tfor i in db.usuarios.find({'nome': nome}):\n\t\t\t\t\t\tprint('{0:.<26} {1:.<30} {2: ^3}'.format(i['nome'], i['email'], i['idade']))\n\t\t\t\t\tinput('Digite enter para continuar')\n\n\t\t\tif opcao == 2:\n\t\t\t\tnome = input('Digite o nome do usuario: ')\n\t\t\t\temail = input('Digite o email: ')\n\t\t\t\tidade = int(input('Digite a idade: '))\n\t\t\t\tsenha = input('Digite a senha: ')\n\t\t\t\tid_banco = db.usuarios.find({},{\"_id\" : 1}).sort([(\"_id\", -1)]).limit(1)\n\t\t\t\tfor i in id_banco:\n\t\t\t\t\tid_banco = (i['_id']) + 1\n\t\t\t\tdb.usuarios.insert({'_id' : id_banco, 'nome' : nome, 'email' : email, 'idade' : idade, 'senha' : senha })\n\t\t\t\tprint('Usuario cadastrado com sucesso!!!')\n\t\t\t\tinput('Digite enter para continuar')\n\n\t\t\tif opcao == 3:\n\t\t\t\tnome = input('Digite o nome do usuario para atualizar cadastro: ')\n\t\t\t\tnome = re.compile(nome, re.IGNORECASE)\n\t\t\t\tfor i in db.usuarios.find({'nome': nome}):\n\t\t\t\t\t\tid_banco = (i['_id'])\n\t\t\t\t\t\tnome = input('Digite o nome do usuario: ')\n\t\t\t\t\t\temail = input('Digite o email: ')\n\t\t\t\t\t\tidade = int(input('Digite a idade: '))\n\t\t\t\t\t\tsenha = input('Digite a senha: ')\n\t\t\t\t\t\tdb.usuarios.update({'_id' : id_banco},{'_id' : id_banco, 'nome' : nome, 'email' : email, 'idade' : idade, 'senha' : senha })\n\t\t\t\tprint('Atualizado com sucesso!!')\n\n\t\t\tif opcao == 4:\n\t\t\t\tfor i in db.usuarios.find():\n\t\t\t\t\tprint(i['nome'])\n\t\t\t\tnome = input('Digite o nome do usuario para exclusão: ')\n\t\t\t\tnome = re.compile(nome, re.IGNORECASE)\n\t\t\t\tfor i in db.usuarios.find({'nome': nome}):\n\t\t\t\t\tid_banco = (i['_id'])\n\t\t\t\t\tdb.usuarios.remove({'_id' : id_banco})\n\t\t\t\tprint('Usuario removido com sucesso!!!')\n\t\t\t\tinput('Digite enter para continuar')\n\t\t\t\n\t\t\tif opcao == 5:\n\t\t\t\tfor i in db.usuarios.find():\n\t\t\t\t\tprint('{0:.<26} {1:.<30} {2: ^3}'.format(i['nome'], i['email'], i['idade']))\n\t\t\t\tinput('Digite enter para continuar')\t\n\n\t\t\tif opcao == 6:\n\t\t\t\tprint('Até logo!!!!')\n\t\t\t\texit()\n\t\telse:\n\t\t\tprint('Opção invalida!!!')\n\t\t\tinput('Digite enter para continuar')\n\n\texcept Exception as e:\n\t\tprint(e)\n\t\tprint('Erro encontrado saindo!!!!')\n\t\tbreak\n\n\n\n\n\t\t\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nexit()\n\nwhile True:\n try:\n input('Digite um nome qualquer: ')\n except:\n print('\\nSaindo...')\n break\n\n\n\nexit()\nfrom modulos.mysql import dados\nfrom modulos.mysql import cb\n\nnome = input('Digite o nome do usuario: ')\nemail = input('Digite o email: ')\nsql_email = \"SELECT * FROM usuarios WHERE email = '{0}'\".format(email)\nresultado = dados.execute(sql_email)\nif resultado == 1:\n\tprint('Email ja cadastrado!!!')\n\texit()\nsexo = input('Digite o sexo: ')\nif 'Masculino' in sexo.title():\n\t\tsexo = 0\nelif 'Feminino' in sexo.title():\n\tsexo = 1\nelse:\n\tprint('Opção invalida', sexo)\n\texit()\ngravar = \"INSERT INTO usuarios (nome, email, sexo) VALUES (%s, %s,%s)\"\ndados.execute(gravar, (nome, email, sexo))\ncb.cnn.commit()\n\nsql = \"SELECT * FROM usuarios \"\ndados.execute(sql)\n\nfor i in dados:\n\tprint(i)\n\n"
},
{
"alpha_fraction": 0.584269642829895,
"alphanum_fraction": 0.5917602777481079,
"avg_line_length": 21.16666603088379,
"blob_id": "3d95084fe475f5a463e683a40e95f66dea036c09",
"content_id": "524bfa09ac29e3488f0ed7061cf965d25b88930f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 267,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 12,
"path": "/aula3.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\n#arquivo = open('zen.txt').read().upper()\narquivo = open('zen.txt')\n#print(type(arquivo))\n#print(arquivo.readlines())\nfor linha in arquivo:\n if '-' not in linha[0]:\n# linha = linha.strip()\n# if linha != '-':\n print(linha, end='')\n#\n\n"
},
{
"alpha_fraction": 0.6872246861457825,
"alphanum_fraction": 0.6960352659225464,
"avg_line_length": 16.461538314819336,
"blob_id": "69f3b53ee1e32fdb527a8b5d25d19734c01d1f47",
"content_id": "3a6f120bf6a52c302d23bd612eda6a3806e84d58",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 232,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 13,
"path": "/README.md",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "#Meu primeiro repositório\n\nEste **repositório** serve apenas para testes. Ainda bem que é gratuíto.\n\nPython Fundamentals\n-------------------\n\nPara executar estes scripts, faça da seguinte forma:\n\n\tpython3 aula1.py\n\nAulas\n-----\n"
},
{
"alpha_fraction": 0.5648000240325928,
"alphanum_fraction": 0.5935999751091003,
"avg_line_length": 15.837838172912598,
"blob_id": "c782195b47cf1743c7d390d26e5e1ddb1c525191",
"content_id": "c0b2c6c7aab7b6bd83e4ab459d688eebbaeecfca",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 631,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 37,
"path": "/funcoes.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "#print, len, str, format, int, input, range, enumerate\na = int('10')\nb = str(10)\nc = range(0, 10)\n\ndef square(n):\n return n**2\n\ndef grande(param1):\n print(param1.upper())\n\ngrande('Hector vido')\ngrande('Lucas salles')\ngrande('Ayron Pedro')\n\nquadrado = square(10)\n\ndef par_impar(n):\n if n % 2 == 0:\n print ('par')\n else:\n print ('impar')\n\nfor i in range(0, 10)\n par_impar(i)\n\n\nletras = ['a', 'b', 'c', 'd', 'e']\n# Passo?\n# criar uma função que retorna a lista ou palavra ao contrario\n# chamar a função de \"reverse\"\n# printar a lista fora da função\n\ndef contrario(n):\n return n[::-1]\n\nprint(contrario(letras))\n\n\n"
},
{
"alpha_fraction": 0.497854083776474,
"alphanum_fraction": 0.54935622215271,
"avg_line_length": 27.375,
"blob_id": "689de3b8da92b7c912c11de408d4940f1ba4220a",
"content_id": "2a71ee5bfb436eaa7f6ffc194c8801fc07d457e1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 233,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 8,
"path": "/compreender.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\nfor n in map(lambda i : i*2, [1,2,3,4]):\n print(n)\n\nduck_tails = ['Huguinho|1', 'Zezinho|2', 'Luizinho|3']\nfor d in [i.split('|')[0] for i in duck_tails]: # if 'Zezinho' not in i]:\n print(d[0], d[-1])\n\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.6372092962265015,
"alphanum_fraction": 0.6511628031730652,
"avg_line_length": 25.875,
"blob_id": "6853cb2577d178d06e5cb40331d4940b30531788",
"content_id": "4e55ec058f84c03158d68eec737686284bff8db2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 215,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 8,
"path": "/1",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n#Abrir o arquivo usuarios\n#separar os valores por \",\"\n#Escrever na tela o dicionario:\n# {\"nome\" : \"Hector\", \"idade\" : 27, \"email\" : \"[email protected]\"}\n\nfor linha in open('usuarios.csv'):\n print(lista)\n"
},
{
"alpha_fraction": 0.6499999761581421,
"alphanum_fraction": 0.699999988079071,
"avg_line_length": 14,
"blob_id": "813dd079d1821379875b5bf6d8728fe4aa9c8c1c",
"content_id": "4b01d34ec26448941053f7e23bcf6de2455346ee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 60,
"license_type": "no_license",
"max_line_length": 22,
"num_lines": 4,
"path": "/modulos/som.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "FREQUENCIA = '400Hz'\n\ndef doppler():\n\tprint('Biiiiiiiiiii')\n"
},
{
"alpha_fraction": 0.6790540814399719,
"alphanum_fraction": 0.7027027010917664,
"avg_line_length": 23.58333396911621,
"blob_id": "00a717059c809c025625bd9bb24a3874cfaba0e8",
"content_id": "9d8797c4bd2258585b37b092e65ce3390458356c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 296,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 12,
"path": "/sistema/modulos/mysql.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\nfrom MySQLdb import connect\nfrom MySQLdb.cursors import DictCursor\n\nclass Consulta_Banco():\n\n\tdef __init__(self):\n\t\tself.cnn = connect(host='127.0.0.1', user='python', passwd='4linux123', db='python', cursorclass=DictCursor)\n\t\t\ncb = Consulta_Banco()\ndados = cb.cnn.cursor()\n\n"
},
{
"alpha_fraction": 0.6225961446762085,
"alphanum_fraction": 0.6274038553237915,
"avg_line_length": 20.947368621826172,
"blob_id": "0e5f53b24fa5d00ee6b49f9031490ba73ddd6310",
"content_id": "82769f63804f3b26e98bd9df2d17469d9c0f244d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 416,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 19,
"path": "/mongo.py",
"repo_name": "radylemes/python-520",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\nfrom pymongo import MongoClient\n\nclient = MongoClient()\ndb = client.python\n\nnome = input('Digite o nome do usuario: ')\nemail = input('Digite o email: ')\nidade = input('Digite a idade: ')\n\ndb.usuarios.insert({'_id': 6, 'nome' : nome, 'email' : email, 'idade' : idade })\n\nexit()\n\nfor i in db.usuarios.find():\n\tif 'filhos' in i:\n\t\tfor f in i['filhos']:\n\t\t\tif f['nome'] == 'Jose':\n\t\t\t\tprint(f['nome'])"
}
] | 17 |
ilkesert/Whatsappdan_mesaj
|
https://github.com/ilkesert/Whatsappdan_mesaj
|
dde37fdd4dbeb1a6b51b7062552fe9354094e94a
|
ba02661e6de0b6304fd5224dc6ec37abd3eadd32
|
885cfbb65261a979fd0a5bbd94d02f62f30faf45
|
refs/heads/main
| 2023-05-30T04:18:04.502554 | 2021-06-24T16:49:37 | 2021-06-24T16:49:37 | 379,992,834 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7615740895271301,
"alphanum_fraction": 0.7662037014961243,
"avg_line_length": 37.45454406738281,
"blob_id": "2acfe6f4226afba6e6714553cc3d35054c84635a",
"content_id": "126d61a409bf35121214fffa78206d78bfe835c3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 446,
"license_type": "no_license",
"max_line_length": 133,
"num_lines": 11,
"path": "/main.py",
"repo_name": "ilkesert/Whatsappdan_mesaj",
"src_encoding": "UTF-8",
"text": "import webbrowser\r\nimport time\r\nimport pyautogui\r\n\r\nkisi = input(str(\"Kişinin numarasını giriniz\"))\r\nmesaj = input(\"Yazmak istediğiniz mesajı giriniz\")\r\nwebbrowser.open('https://web.whatsapp.com/send?phone=' + kisi + '&text=' + mesaj)\r\ntime.sleep(30)\r\npyautogui.press('enter')\r\nprint(\"gönderildi\")\r\n#Eğer istenilirse ses alma modülü kurulup onunla beraber yazı yazmadan sesli halde kurulabilir. Yardım için iletişime geçebilirsiniz."
}
] | 1 |
Grygon/OnlineEnrollmentDocs
|
https://github.com/Grygon/OnlineEnrollmentDocs
|
027601f68527a00feef2e1aa1ecfe82b9beea733
|
5312adb4b5c8862376dfd8cafefd977ac8a6e0c7
|
e5ccb1095876a8efdeb4cad441261742787e54b6
|
refs/heads/master
| 2021-01-10T17:40:52.593347 | 2016-09-09T16:53:57 | 2016-09-09T16:53:57 | 54,496,247 | 0 | 0 | null | 2016-03-22T17:43:22 | 2016-03-29T16:04:47 | 2016-09-09T16:36:57 |
Python
|
[
{
"alpha_fraction": 0.800000011920929,
"alphanum_fraction": 0.800000011920929,
"avg_line_length": 53.375,
"blob_id": "afbacde69f0a6ca452d84145241742579eddee90",
"content_id": "6123e74dc6d1c8faa628fbde6a41a9b63fa337db",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 435,
"license_type": "no_license",
"max_line_length": 195,
"num_lines": 8,
"path": "/README.md",
"repo_name": "Grygon/OnlineEnrollmentDocs",
"src_encoding": "UTF-8",
"text": "# OnlineEnrollmentDocs\nPython script to handle enrollment documents\n\nStandalone version can be found under ./build folder. Entire interior folder is needed to function. To build run python setup.py build\n\nTo use, run OnlineEnrollmentCalc(.py or .exe) and select one or multiple enrollment files and an output file. Use standard file selection. Enrollment and drops files must be formatted using .csv.\n\nFor help, contact Jakob Rubin [email protected]\n"
},
{
"alpha_fraction": 0.474789023399353,
"alphanum_fraction": 0.48512372374534607,
"avg_line_length": 37.15006637573242,
"blob_id": "3237a543037b09412ff74b6a9356d0d1b08b7e30",
"content_id": "275dab1e13a771545af3cb242c332f8a4b9865f2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 27964,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 733,
"path": "/OnlineEnrollmentCalc.py",
"repo_name": "Grygon/OnlineEnrollmentDocs",
"src_encoding": "UTF-8",
"text": "import csv\n\n# ID'ed by their net IDs\n\n\nclass Student:\n\n def __init__(self, email, first, last):\n self.email = email\n self.first = first\n self.last = last\n self.progs = {}\n self.courses = []\n self.droppedCourses = []\n self.reasons = []\n\n def addCourse(self, course):\n if course not in self.courses:\n self.courses.append(course)\n\n def dropCourse(self, course, reason=\"\"):\n if course in self.courses:\n self.droppedCourses.append(course)\n self.reasons.append(reason)\n if course in self.courses:\n self.courses.pop(course)\n\n def addProg(self, program, term):\n if term not in self.progs:\n self.progs[term] = program\n\n\n# ID'ed by the course number\nclass Course:\n\n def __init__(self, cid, title, credits, term):\n self.cid = cid\n self.title = title\n self.credits = credits\n self.term = term\n self.current = False\n\nallStudents = {}\nallCourses = {}\nprograms = []\nvirtPrograms = []\n\n\ndef readFile(file):\n with open(file, newline='') as csvfile:\n\n reader = csv.reader(csvfile)\n next(reader)\n next(reader)\n for row in reader:\n # This whole section uses magic numbers based on the CSV\n courseData = row[10:13] + [row[14]] + [row[7]]\n # Uses Class num as identifier\n if courseData[0] not in allCourses:\n # Just trust that it works\n allCourses[courseData[0]] = \\\n Course(courseData[1], courseData[2],\n courseData[3], row[1])\n studentData = row[2:6]\n # Using campus ID for tracking\n if studentData[1] not in allStudents:\n name = studentData[0].partition(\",\")\n # Yes... let the hate flow through you\n allStudents[studentData[1]] = \\\n Student(studentData[0], name[0], name[2])\n\n # Sorts through whether the student is enrolled\n # or dropped the course\n if courseData[4] == \"ENRL\":\n allStudents[studentData[1]].addCourse(\n allCourses[courseData[0]])\n else:\n allStudents[studentData[1]].dropCourse(\n allCourses[courseData[0]], courseData[4])\n\n # Adds the program to the student\n allStudents[studentData[1]].addProg(\n studentData[2], row[1])\n\n # Removes students with no registered courses\n if len(allStudents[studentData[1]].courses) == 0:\n allStudents.pop(studentData[1])\n\n\n# Counts number of course enrollments per term. Each student\n# enrolled per course increases it by 1\n\n\ndef studentsPerTerm(students):\n termStudents = {}\n for key, student in students.items():\n for prog in student.progs:\n if prog not in termStudents:\n termStudents[prog] = {}\n termStudents[prog][key] = student\n return termStudents\n\n\n# Filters out students who are virt in the given term\ndef virtFilter(students, virt, term):\n virtStudents = {}\n for key, student in students.items():\n for t, prog in student.progs.items():\n if ('V' in prog) == virt and t == term:\n virtStudents[key] = student\n\n return virtStudents\n\n\n# Filters for students in the given program in the given term\ndef progFilter(students, program, term):\n filtered = {}\n for key, student in students.items():\n if term in student.progs and student.progs[term] == program:\n filtered[key] = student\n return filtered\n\n\n# Gets a dict of enrolled programs for the given term\ndef progGet(students):\n prog = {}\n for key, student in students.items():\n if student.progs[term] not in prog:\n prog[student.progs[term]] = {}\n prog[student.progs[term]][key] = student\n return prog\n\n\n# Returns a dict of terms and noobs for the respective term\n# TODO: Optimize for new progs\ndef newFilter(students):\n noobs = {}\n for key, student in students.items():\n firstTerm = min(student.courses, key=(lambda t: keyTerm(t.term))).term\n if firstTerm not in noobs:\n noobs[firstTerm] = {}\n noobs[firstTerm][key] = student\n return noobs\n\n\n# Returns a dict of students taking the number of classes in the term\ndef numClassFilter(students, num, term):\n takers = {}\n for key, student in students.items():\n numTermCourses = 0\n for course in student.courses:\n if course.term == term:\n numTermCourses += 1\n if numTermCourses is num:\n takers[key] = student\n\n return takers\n\n\n# Returns the intersection of two students\ndef overlap(firstStudents, secondStudents):\n overlappers = {}\n for key in firstStudents:\n if key in secondStudents:\n overlappers[key] = firstStudents[key]\n\n return overlappers\n\n\n# Finds if there is a student in the given term\ndef inTerm(students, term):\n for key, student in students:\n for pTerm in student.progs:\n if pTerm == term:\n return True\n return False\n\n\n# Finds if a student is in both terms\ndef inTerms(student, term1, term2):\n for term in student.progs:\n if term is term1 and term is term2:\n return True\n\n return False\n\n\ndef percentage(n1, n2):\n if n2 is 0:\n return \"N/A\"\n return str(round((n1 / n2) * 100, 2)) + \"%\"\n\n\noutFile = None\n\n\ndef registerFiles():\n global outFile\n print(\"Please enter an enrollment file:\")\n while True:\n try:\n read = input(\"---> \")\n if read is \"\" and len(allStudents) > 0:\n break\n readFile(read)\n print(\"\"\"File read, please enter another file or\n a blank line to finish reading\"\"\")\n except Exception as e:\n print(e)\n print(\n \"\"\"Invalid enrollment file, please enter a valid file\"\"\")\n\n print(\"Please enter file to write to:\")\n outFile = input(\"---> \")\n\n\ndef createPrograms():\n for key, student in allStudents.items():\n for term, prog in student.progs.items():\n if prog not in programs:\n if 'V' in prog:\n virtPrograms.append(prog)\n programs.append(prog)\n\n virtPrograms.sort()\n programs.sort()\n\n\ndef keyTerm(t):\n seasonComp = {\"Spring\": 1, \"Summer\": 2, \"Fall\": 3}\n return t[-4:] + str(seasonComp[t[:-5]])\n\n\n# To execute at runtime\ndef runTime():\n registerFiles()\n # testFiles()\n createPrograms()\n with open(outFile, 'w', newline='') as f:\n write = lambda w: csv.writer(f).writerow(w)\n\n termStudents = studentsPerTerm(allStudents)\n virtStudents = {}\n for term in termStudents:\n virtStudents[term] = virtFilter(allStudents, True, term)\n\n # Question 1: Students virt/not per term\n print(\"Question 1\")\n write([\"# and % of students on/off campus\"])\n numPercent = lambda x: [str(x), percentage(x, len(termStudents[term]))]\n write([\"\", \"# On\", \"% On\", \"# Off\", \"% Off\"])\n for term in sorted(termStudents, key=keyTerm):\n write([term] +\n numPercent(len(virtFilter(termStudents[term], False, term))) +\n numPercent(len(virtStudents[term])))\n\n write([\"\"])\n\n # Question 2: Students in program per term\n print(\"\\nQuestion 2\")\n write([\"# of students in each online program\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n write([term] + [len(progFilter(termStudents[term], program, term))\n for program in virtPrograms] +\n [len(virtFilter(termStudents[term], True, term))])\n\n write([\"\"])\n\n # Question 3:\n print(\"\\nQuestion 3\")\n write([\"# of new students in each online program\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n noobs = newFilter(allStudents)\n for term in sorted(termStudents, key=keyTerm):\n write([term] + [len(progFilter(noobs[term], program, term))\n for program in virtPrograms] +\n [len(virtFilter(noobs[term], True, term))])\n\n write([\"\"])\n\n # Question 4:\n print(\"\\nQuestion 4\")\n write([\"% of students in each online program taking 1 class\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n write([term] +\n [percentage(len(numClassFilter(progFilter(termStudents[term], program, term), 1, term)),\n len(progFilter(termStudents[term], program, term)))\n for program in virtPrograms] +\n [percentage(len(numClassFilter(termStudents[term], 1, term)),\n len(termStudents[term]))])\n\n write([\"\"])\n\n # Question 5:\n print(\"\\nQuestion 5\")\n write([\"% of students in each online program taking 2 classes\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n write([term] +\n [percentage(len(numClassFilter(progFilter(termStudents[term], program, term), 2, term)),\n len(progFilter(termStudents[term], program, term)))\n for program in virtPrograms] +\n [percentage(len(numClassFilter(termStudents[term], 2, term)),\n len(termStudents[term]))])\n\n write([\"\"])\n\n # Question 6:\n print(\"\\nQuestion 6a\")\n write([\"# of students in each online program taking 1 class\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n write([term] +\n [len(numClassFilter(progFilter(termStudents[term], program, term), 1, term))\n for program in virtPrograms] +\n [len(numClassFilter(virtStudents[term], 1, term))])\n\n write([\"\"])\n\n # Question 6:\n print(\"\\nQuestion 6b\")\n write([\"% of students in each online program taking 1 class\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n write([term] +\n [percentage(len(numClassFilter(progFilter(termStudents[term], program, term), 1, term)),\n len(progFilter(termStudents[term], program, term)))\n for program in virtPrograms] +\n [percentage(len(numClassFilter(virtStudents[term], 1, term)),\n len(virtStudents[term]))])\n\n write([\"\"])\n\n # Question 7:\n print(\"\\nQuestion 7a\")\n write([\"# of students in each online program taking 2 classes\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n write([term] +\n [len(numClassFilter(progFilter(termStudents[term], program, term), 2, term))\n for program in virtPrograms] +\n [len(numClassFilter(virtStudents[term], 2, term))])\n\n write([\"\"])\n\n # Question 7:\n print(\"\\nQuestion 7b\")\n write([\"% of students in each online program taking 2 classes\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n write([term] +\n [percentage(len(numClassFilter(progFilter(termStudents[term], program, term), 2, term)),\n len(progFilter(termStudents[term], program, term)))\n for program in virtPrograms] +\n [percentage(len(numClassFilter(virtStudents[term], 2, term)),\n len(virtStudents[term]))])\n\n write([\"\"])\n\n terms = sorted(termStudents, key=keyTerm)\n\n # Question 8:\n print(\"\\nQuestion 8a\")\n write(\n [\"# of students in each online program who took 1 last term, now 2 classes\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n nowTerm = terms.index(term)\n if nowTerm == 0:\n continue\n write([term] +\n [len(overlap(\n # In theory I don't need an overlap here,\n # but I don't want to mess with it...\n # Students who took 1 last term\n numClassFilter(\n progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1]),\n 1, terms[nowTerm - 1]),\n # Students taking 2 now\n numClassFilter(\n progFilter(virtStudents[term], program, term),\n 2, terms[nowTerm])))\n for program in virtPrograms] +\n # Is this right? Depends what we're looking for\n [len(overlap(\n # Students who took 1 last term\n numClassFilter(\n virtStudents[terms[nowTerm - 1]],\n 1, terms[nowTerm - 1]),\n # Students taking 2 now\n numClassFilter(\n virtStudents[term],\n 2, terms[nowTerm])))])\n\n write([\"\"])\n\n # Question 8:\n print(\"\\nQuestion 8b\")\n write(\n [\"% of students in each online program who took 1 last term, now 2 classes\"])\n # Comparing % for num who took 1\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n nowTerm = terms.index(term)\n if nowTerm == 0:\n continue\n write([term] +\n [percentage(len(overlap(\n # In theory I don't need an overlap here,\n # but I don't want to mess with it...\n # Students who took 1 last term\n numClassFilter(\n progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1]),\n 1, terms[nowTerm - 1]),\n # Students taking 2 now\n numClassFilter(\n progFilter(virtStudents[term], program, term),\n 2, terms[nowTerm]))),\n len(numClassFilter(\n progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1]),\n 1, terms[nowTerm - 1])))\n for program in virtPrograms] +\n # Is this right? Depends what we're looking for\n [percentage(len(overlap(\n # Students who took 1 last term\n numClassFilter(\n virtStudents[terms[nowTerm - 1]],\n 1, terms[nowTerm - 1]),\n # Students taking 2 now\n numClassFilter(\n virtStudents[term],\n 2, terms[nowTerm]))),\n len(numClassFilter(\n virtStudents[terms[nowTerm - 1]],\n 1, terms[nowTerm - 1])))])\n\n write([\"\"])\n\n # Question 9:\n print(\"\\nQuestion 9a\")\n write(\n [\"# of students in each online program who took 1 last term, now 1 class\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n nowTerm = terms.index(term)\n if nowTerm == 0:\n continue\n write([term] +\n [len(overlap(\n # Students who took 1 last term\n numClassFilter(\n progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1]),\n 1, terms[nowTerm - 1]),\n # Students taking 1 now\n numClassFilter(\n progFilter(virtStudents[term], program, term),\n 1, terms[nowTerm])))\n for program in virtPrograms] +\n # Is this right? Depends what we're looking for\n [len(overlap(\n # Students who took 1 last term\n numClassFilter(\n virtStudents[terms[nowTerm - 1]],\n 1, terms[nowTerm - 1]),\n # Students taking 1 now\n numClassFilter(\n virtStudents[term],\n 1, terms[nowTerm])))])\n\n write([\"\"])\n\n # Question 9:\n print(\"\\nQuestion 9b\")\n write(\n [\"% of students in each online program who took 1 last term, now 1 class\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n nowTerm = terms.index(term)\n if nowTerm == 0:\n continue\n write([term] +\n [percentage(len(overlap(\n # Students who took 1 last term\n numClassFilter(\n progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1]),\n 1, terms[nowTerm - 1]),\n # Students taking 1 now\n numClassFilter(\n progFilter(virtStudents[term], program, term),\n 1, terms[nowTerm]))),\n len(numClassFilter(\n progFilter(virtStudents[term], program, term),\n 1, terms[nowTerm])))\n for program in virtPrograms] +\n # Is this right? Depends what we're looking for\n [percentage(len(overlap(\n # Students who took 1 last term\n numClassFilter(\n virtStudents[terms[nowTerm - 1]],\n 1, terms[nowTerm - 1]),\n # Students taking 1 now\n numClassFilter(\n virtStudents[term],\n 1, terms[nowTerm]))),\n len(numClassFilter(\n virtStudents[terms[nowTerm - 1]],\n 1, terms[nowTerm - 1])))])\n\n write([\"\"])\n\n # Question 10:\n print(\"\\nQuestion 10a\")\n write(\n [\"# of students in each online program who took 2 last term, now 2 classes\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n nowTerm = terms.index(term)\n if nowTerm == 0:\n continue\n write([term] +\n [len(overlap(\n # Students who took 2 last term\n numClassFilter(\n progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1]),\n 2, terms[nowTerm - 1]),\n # Students taking 2 now\n numClassFilter(\n progFilter(virtStudents[term], program, term),\n 2, terms[nowTerm])))\n for program in virtPrograms] +\n # Is this right? Depends what we're looking for\n [len(overlap(\n # Students who took 2 last term\n numClassFilter(\n virtStudents[terms[nowTerm - 1]],\n 2, terms[nowTerm - 1]),\n # Students taking 2 now\n numClassFilter(\n virtStudents[term],\n 2, terms[nowTerm])))])\n\n write([\"\"])\n\n # Question 10:\n print(\"\\nQuestion 10b\")\n write(\n [\"% of students in each online program who took 2 last term, now 2 classes\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n nowTerm = terms.index(term)\n if nowTerm == 0:\n continue\n write([term] +\n [percentage(len(overlap(\n # Students who took 2 last term\n numClassFilter(\n progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1]),\n 2, terms[nowTerm - 1]),\n # Students taking 2 now\n numClassFilter(\n progFilter(virtStudents[term], program, term),\n 2, terms[nowTerm]))),\n len(numClassFilter(progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1]),\n 2, terms[nowTerm - 1])))\n for program in virtPrograms] +\n # Is this right? Depends what we're looking for\n [percentage(len(overlap(\n # Students who took 2 last term\n numClassFilter(\n virtStudents[terms[nowTerm - 1]],\n 2, terms[nowTerm - 1]),\n # Students taking 2 now\n numClassFilter(\n virtStudents[term],\n 2, terms[nowTerm]))),\n len(numClassFilter(\n virtStudents[terms[nowTerm - 1]],\n 2, terms[nowTerm - 1])))])\n\n write([\"\"])\n\n # Question 11:\n print(\"\\nQuestion 11a\")\n write(\n [\"# of students in each online program who took 2 last term, now 1 class\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n nowTerm = terms.index(term)\n if nowTerm == 0:\n continue\n write([term] +\n [len(overlap(\n # Students who took 2 last term\n numClassFilter(\n progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1]),\n 2, terms[nowTerm - 1]),\n # Students taking 1 now\n numClassFilter(\n progFilter(virtStudents[term], program, term),\n 1, terms[nowTerm])))\n for program in virtPrograms] +\n # Is this right? Depends what we're looking for\n [len(overlap(\n # Students who took 2 last term\n numClassFilter(\n virtStudents[terms[nowTerm - 1]],\n 2, terms[nowTerm - 1]),\n # Students taking 1 now\n numClassFilter(\n virtStudents[term],\n 1, terms[nowTerm])))])\n\n write([\"\"])\n\n # Question 11:\n print(\"\\nQuestion 11b\")\n write(\n [\"% of students in each online program who took 2 last term, now 1 class\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n nowTerm = terms.index(term)\n if nowTerm == 0:\n continue\n write([term] +\n [percentage(len(overlap(\n # Students who took 2 last term\n numClassFilter(\n progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1]),\n 2, terms[nowTerm - 1]),\n # Students taking 1 now\n numClassFilter(\n progFilter(virtStudents[term], program, term),\n 1, terms[nowTerm]))),\n len(numClassFilter(\n progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1]),\n 2, terms[nowTerm - 1])))\n\n for program in virtPrograms] +\n # Is this right? Depends what we're looking for\n [percentage(len(overlap(\n # Students who took 2 last term\n numClassFilter(\n virtStudents[terms[nowTerm - 1]],\n 2, terms[nowTerm - 1]),\n # Students taking 1 now\n numClassFilter(\n virtStudents[term],\n 1, terms[nowTerm]))),\n len(numClassFilter(\n virtStudents[terms[nowTerm - 1]],\n 2, terms[nowTerm - 1])))])\n\n write([\"\"])\n\n # Question 12:\n print(\"\\nQuestion 12a\")\n write(\n [\"# of students in each online program who were in last and current semester\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n terms = sorted(termStudents, key=keyTerm)\n nowTerm = terms.index(term)\n if nowTerm == 0:\n continue\n write([term] +\n [len(overlap(\n # Students in last term\n progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1]),\n # Students now\n progFilter(virtStudents[term], program, term)))\n for program in virtPrograms] +\n [len(overlap(\n # Students in last term\n virtStudents[terms[nowTerm - 1]],\n # Students now\n virtStudents[term]))])\n\n write([\"\"])\n\n # Question 12:\n print(\"\\nQuestion 12b\")\n write(\n [\"% of students in each online program in last semester also in current semester\"])\n write([\"\"] + virtPrograms + [\"Total\"])\n for term in sorted(termStudents, key=keyTerm):\n terms = sorted(termStudents, key=keyTerm)\n nowTerm = terms.index(term)\n if nowTerm == 0:\n continue\n write([term] +\n [percentage(len(overlap(\n # Students in last term\n progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1]),\n # Students now\n progFilter(virtStudents[term], program, term))),\n len(progFilter(\n virtStudents[terms[nowTerm - 1]], program, terms[nowTerm - 1])))\n for program in virtPrograms] +\n [percentage(len(overlap(\n # Students in last term\n virtStudents[terms[nowTerm - 1]],\n # Students now\n virtStudents[term])),\n len(virtStudents[terms[nowTerm - 1]]))])\n\n # TODO: Missing Spring EBVMS student...\n\n # Testing\n\n # for student in allStudents:\n # print(allStudents[student].first)\n\n # asdf = studentsPerTerm(allStudents)\n # for term in asdf:\n # print(term, asdf[term])\n\n # Leave this as the last call\nrunTime()\n"
},
{
"alpha_fraction": 0.7459016442298889,
"alphanum_fraction": 0.7540983557701111,
"avg_line_length": 29.5,
"blob_id": "f0b9d7cc34e721d6e24805d20cd0f342e2085c7d",
"content_id": "205a5e50f0a36166e0be87181d1704c53ad8ef06",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 244,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 8,
"path": "/setup.py",
"repo_name": "Grygon/OnlineEnrollmentDocs",
"src_encoding": "UTF-8",
"text": "import sys\nfrom cx_Freeze import setup, Executable\n\nsetup(\n name=\"Online Enrollment Statistic Calculator\",\n version=\"2.0\",\n description=\"Online Enrollment Statistic Calculator\",\n executables=[Executable(\"OnlineEnrollmentCalc.py\")])\n"
}
] | 3 |
harsh9200/pyteal
|
https://github.com/harsh9200/pyteal
|
522399a9861610a86dfab501b48cd468121425bc
|
fdf003da4ec759900c5d96d86d2516ce8acfb9f7
|
f53819eac14c4020ed8aecf522fb74d0be807dd0
|
refs/heads/master
| 2023-08-29T05:59:37.667740 | 2021-11-05T21:29:11 | 2021-11-05T21:29:11 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6055734753608704,
"alphanum_fraction": 0.6359949111938477,
"avg_line_length": 28.131290435791016,
"blob_id": "2a32f01cd4f8624279104bf715d7d6caff1cf026",
"content_id": "9effe218945cc5247f44cee21b5f8e17b3e90422",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 13313,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 457,
"path": "/pyteal/compiler/scratchslots_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\nfrom .scratchslots import collectScratchSlots, assignScratchSlotsToSubroutines\n\n\ndef test_collectScratchSlots():\n def sub1Impl():\n return None\n\n def sub2Impl(a1):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub2Impl, TealType.bytes)\n subroutine3 = SubroutineDefinition(sub3Impl, TealType.none)\n\n globalSlot1 = ScratchSlot()\n\n subroutine1Slot1 = ScratchSlot()\n subroutine1Slot2 = ScratchSlot()\n subroutine1Ops = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, subroutine1Slot1),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.store, subroutine1Slot2),\n TealOp(None, Op.load, globalSlot1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine2Slot1 = ScratchSlot()\n subroutine2Ops = [\n TealOp(None, Op.byte, '\"value\"'),\n TealOp(None, Op.store, subroutine2Slot1),\n TealOp(None, Op.load, subroutine2Slot1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine3Ops = [\n TealOp(None, Op.retsub),\n ]\n\n mainSlot1 = ScratchSlot()\n mainSlot2 = ScratchSlot()\n mainOps = [\n TealOp(None, Op.int, 7),\n TealOp(None, Op.store, globalSlot1),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, mainSlot1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.store, mainSlot2),\n TealOp(None, Op.load, mainSlot1),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine1: subroutine1Ops,\n subroutine2: subroutine2Ops,\n subroutine3: subroutine3Ops,\n }\n\n expected = {\n None: {globalSlot1, mainSlot1, mainSlot2},\n subroutine1: {globalSlot1, subroutine1Slot1, subroutine1Slot2},\n subroutine2: {subroutine2Slot1},\n subroutine3: set(),\n }\n\n actual = collectScratchSlots(subroutineMapping)\n\n assert actual == expected\n\n\ndef test_assignScratchSlotsToSubroutines_no_requested_ids():\n def sub1Impl():\n return None\n\n def sub2Impl(a1):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub2Impl, TealType.bytes)\n subroutine3 = SubroutineDefinition(sub3Impl, TealType.none)\n\n globalSlot1 = ScratchSlot()\n\n subroutine1Slot1 = ScratchSlot()\n subroutine1Slot2 = ScratchSlot()\n subroutine1Ops = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, subroutine1Slot1),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.store, subroutine1Slot2),\n TealOp(None, Op.load, globalSlot1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine2Slot1 = ScratchSlot()\n subroutine2Ops = [\n TealOp(None, Op.byte, '\"value\"'),\n TealOp(None, Op.store, subroutine2Slot1),\n TealOp(None, Op.load, subroutine2Slot1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine3Ops = [\n TealOp(None, Op.retsub),\n ]\n\n mainSlot1 = ScratchSlot()\n mainSlot2 = ScratchSlot()\n mainOps = [\n TealOp(None, Op.int, 7),\n TealOp(None, Op.store, globalSlot1),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, mainSlot1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.store, mainSlot2),\n TealOp(None, Op.load, mainSlot1),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine1: subroutine1Ops,\n subroutine2: subroutine2Ops,\n subroutine3: subroutine3Ops,\n }\n\n subroutineBlocks = {\n None: TealSimpleBlock(mainOps),\n subroutine1: TealSimpleBlock(subroutine1Ops),\n subroutine2: TealSimpleBlock(subroutine2Ops),\n subroutine3: TealSimpleBlock(subroutine3Ops),\n }\n\n expectedAssignments = {\n globalSlot1: 0,\n subroutine1Slot1: 1,\n subroutine1Slot2: 2,\n subroutine2Slot1: 3,\n mainSlot1: 4,\n mainSlot2: 5,\n }\n\n expected = {\n None: {expectedAssignments[mainSlot1], expectedAssignments[mainSlot2]},\n subroutine1: {\n expectedAssignments[subroutine1Slot1],\n expectedAssignments[subroutine1Slot2],\n },\n subroutine2: {expectedAssignments[subroutine2Slot1]},\n subroutine3: set(),\n }\n\n actual = assignScratchSlotsToSubroutines(subroutineMapping, subroutineBlocks)\n\n assert actual == expected\n\n assert subroutine1Ops == [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, expectedAssignments[subroutine1Slot1]),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.store, expectedAssignments[subroutine1Slot2]),\n TealOp(None, Op.load, expectedAssignments[globalSlot1]),\n TealOp(None, Op.retsub),\n ]\n\n assert subroutine2Ops == [\n TealOp(None, Op.byte, '\"value\"'),\n TealOp(None, Op.store, expectedAssignments[subroutine2Slot1]),\n TealOp(None, Op.load, expectedAssignments[subroutine2Slot1]),\n TealOp(None, Op.retsub),\n ]\n\n assert subroutine3Ops == [\n TealOp(None, Op.retsub),\n ]\n\n assert mainOps == [\n TealOp(None, Op.int, 7),\n TealOp(None, Op.store, expectedAssignments[globalSlot1]),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, expectedAssignments[mainSlot1]),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.store, expectedAssignments[mainSlot2]),\n TealOp(None, Op.load, expectedAssignments[mainSlot1]),\n TealOp(None, Op.return_),\n ]\n\n\ndef test_assignScratchSlotsToSubroutines_with_requested_ids():\n def sub1Impl():\n return None\n\n def sub2Impl(a1):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub2Impl, TealType.bytes)\n subroutine3 = SubroutineDefinition(sub3Impl, TealType.none)\n\n globalSlot1 = ScratchSlot(requestedSlotId=8)\n\n subroutine1Slot1 = ScratchSlot()\n subroutine1Slot2 = ScratchSlot(requestedSlotId=5)\n subroutine1Ops = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, subroutine1Slot1),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.store, subroutine1Slot2),\n TealOp(None, Op.load, globalSlot1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine2Slot1 = ScratchSlot()\n subroutine2Ops = [\n TealOp(None, Op.byte, '\"value\"'),\n TealOp(None, Op.store, subroutine2Slot1),\n TealOp(None, Op.load, subroutine2Slot1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine3Ops = [\n TealOp(None, Op.retsub),\n ]\n\n mainSlot1 = ScratchSlot()\n mainSlot2 = ScratchSlot(requestedSlotId=100)\n mainOps = [\n TealOp(None, Op.int, 7),\n TealOp(None, Op.store, globalSlot1),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, mainSlot1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.store, mainSlot2),\n TealOp(None, Op.load, mainSlot1),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine1: subroutine1Ops,\n subroutine2: subroutine2Ops,\n subroutine3: subroutine3Ops,\n }\n\n subroutineBlocks = {\n None: TealSimpleBlock(mainOps),\n subroutine1: TealSimpleBlock(subroutine1Ops),\n subroutine2: TealSimpleBlock(subroutine2Ops),\n subroutine3: TealSimpleBlock(subroutine3Ops),\n }\n\n expectedAssignments = {\n globalSlot1: 8,\n subroutine1Slot1: 0,\n subroutine1Slot2: 5,\n subroutine2Slot1: 1,\n mainSlot1: 2,\n mainSlot2: 100,\n }\n\n expected = {\n None: {expectedAssignments[mainSlot1], expectedAssignments[mainSlot2]},\n subroutine1: {\n expectedAssignments[subroutine1Slot1],\n expectedAssignments[subroutine1Slot2],\n },\n subroutine2: {expectedAssignments[subroutine2Slot1]},\n subroutine3: set(),\n }\n\n actual = assignScratchSlotsToSubroutines(subroutineMapping, subroutineBlocks)\n\n assert actual == expected\n\n assert subroutine1Ops == [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, expectedAssignments[subroutine1Slot1]),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.store, expectedAssignments[subroutine1Slot2]),\n TealOp(None, Op.load, expectedAssignments[globalSlot1]),\n TealOp(None, Op.retsub),\n ]\n\n assert subroutine2Ops == [\n TealOp(None, Op.byte, '\"value\"'),\n TealOp(None, Op.store, expectedAssignments[subroutine2Slot1]),\n TealOp(None, Op.load, expectedAssignments[subroutine2Slot1]),\n TealOp(None, Op.retsub),\n ]\n\n assert subroutine3Ops == [\n TealOp(None, Op.retsub),\n ]\n\n assert mainOps == [\n TealOp(None, Op.int, 7),\n TealOp(None, Op.store, expectedAssignments[globalSlot1]),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, expectedAssignments[mainSlot1]),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.store, expectedAssignments[mainSlot2]),\n TealOp(None, Op.load, expectedAssignments[mainSlot1]),\n TealOp(None, Op.return_),\n ]\n\n\ndef test_assignScratchSlotsToSubroutines_invalid_requested_id():\n def sub1Impl():\n return None\n\n def sub2Impl(a1):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub2Impl, TealType.bytes)\n subroutine3 = SubroutineDefinition(sub3Impl, TealType.none)\n\n globalSlot1 = ScratchSlot(requestedSlotId=8)\n\n subroutine1Slot1 = ScratchSlot()\n subroutine1Slot2 = ScratchSlot(requestedSlotId=5)\n subroutine1Ops = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, subroutine1Slot1),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.store, subroutine1Slot2),\n TealOp(None, Op.load, globalSlot1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine2Slot1 = ScratchSlot(requestedSlotId=100)\n subroutine2Ops = [\n TealOp(None, Op.byte, '\"value\"'),\n TealOp(None, Op.store, subroutine2Slot1),\n TealOp(None, Op.load, subroutine2Slot1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine3Ops = [\n TealOp(None, Op.retsub),\n ]\n\n mainSlot1 = ScratchSlot()\n mainSlot2 = ScratchSlot(requestedSlotId=100)\n mainOps = [\n TealOp(None, Op.int, 7),\n TealOp(None, Op.store, globalSlot1),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, mainSlot1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.store, mainSlot2),\n TealOp(None, Op.load, mainSlot1),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine1: subroutine1Ops,\n subroutine2: subroutine2Ops,\n subroutine3: subroutine3Ops,\n }\n\n subroutineBlocks = {\n None: TealSimpleBlock(mainOps),\n subroutine1: TealSimpleBlock(subroutine1Ops),\n subroutine2: TealSimpleBlock(subroutine2Ops),\n subroutine3: TealSimpleBlock(subroutine3Ops),\n }\n\n # mainSlot2 and subroutine2Slot1 request the same ID, 100\n with pytest.raises(TealInternalError):\n actual = assignScratchSlotsToSubroutines(subroutineMapping, subroutineBlocks)\n\n\ndef test_assignScratchSlotsToSubroutines_slot_used_before_assignment():\n def sub1Impl():\n return None\n\n def sub2Impl(a1):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub2Impl, TealType.bytes)\n subroutine3 = SubroutineDefinition(sub3Impl, TealType.none)\n\n globalSlot1 = ScratchSlot()\n\n subroutine1Slot1 = ScratchSlot()\n subroutine1Slot2 = ScratchSlot()\n subroutine1Ops = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, subroutine1Slot1),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.store, subroutine1Slot2),\n TealOp(None, Op.load, globalSlot1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine2Slot1 = ScratchSlot()\n subroutine2Ops = [\n TealOp(None, Op.byte, '\"value\"'),\n TealOp(None, Op.store, subroutine2Slot1),\n TealOp(None, Op.load, subroutine2Slot1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine3Ops = [\n TealOp(None, Op.retsub),\n ]\n\n mainSlot1 = ScratchSlot()\n mainSlot2 = ScratchSlot()\n mainOps = [\n TealOp(None, Op.int, 7),\n TealOp(None, Op.store, globalSlot1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.store, mainSlot2),\n TealOp(None, Op.load, mainSlot1),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine1: subroutine1Ops,\n subroutine2: subroutine2Ops,\n subroutine3: subroutine3Ops,\n }\n\n subroutineBlocks = {\n None: TealSimpleBlock(mainOps),\n subroutine1: TealSimpleBlock(subroutine1Ops),\n subroutine2: TealSimpleBlock(subroutine2Ops),\n subroutine3: TealSimpleBlock(subroutine3Ops),\n }\n\n with pytest.raises(TealInternalError):\n assignScratchSlotsToSubroutines(subroutineMapping, subroutineBlocks)\n"
},
{
"alpha_fraction": 0.5873655676841736,
"alphanum_fraction": 0.588813066482544,
"avg_line_length": 41.421051025390625,
"blob_id": "35160eca6d360202b79cdd533ed6af89a92df967",
"content_id": "c980c17732262f42a58ecc5f23bbbfcd98da8a3a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9672,
"license_type": "permissive",
"max_line_length": 88,
"num_lines": 228,
"path": "/pyteal/ast/txn_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import Dict, Callable\n\nimport pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import Expr, TxnField, TxnObject, TxnArray, CompileOptions\n\nfieldToMethod: Dict[TxnField, Callable[[TxnObject], Expr]] = {\n TxnField.sender: lambda txn: txn.sender(),\n TxnField.fee: lambda txn: txn.fee(),\n TxnField.first_valid: lambda txn: txn.first_valid(),\n TxnField.last_valid: lambda txn: txn.last_valid(),\n TxnField.note: lambda txn: txn.note(),\n TxnField.lease: lambda txn: txn.lease(),\n TxnField.receiver: lambda txn: txn.receiver(),\n TxnField.amount: lambda txn: txn.amount(),\n TxnField.close_remainder_to: lambda txn: txn.close_remainder_to(),\n TxnField.vote_pk: lambda txn: txn.vote_pk(),\n TxnField.selection_pk: lambda txn: txn.selection_pk(),\n TxnField.vote_first: lambda txn: txn.vote_first(),\n TxnField.vote_last: lambda txn: txn.vote_last(),\n TxnField.vote_key_dilution: lambda txn: txn.vote_key_dilution(),\n TxnField.type: lambda txn: txn.type(),\n TxnField.type_enum: lambda txn: txn.type_enum(),\n TxnField.xfer_asset: lambda txn: txn.xfer_asset(),\n TxnField.asset_amount: lambda txn: txn.asset_amount(),\n TxnField.asset_sender: lambda txn: txn.asset_sender(),\n TxnField.asset_receiver: lambda txn: txn.asset_receiver(),\n TxnField.asset_close_to: lambda txn: txn.asset_close_to(),\n TxnField.group_index: lambda txn: txn.group_index(),\n TxnField.tx_id: lambda txn: txn.tx_id(),\n TxnField.application_id: lambda txn: txn.application_id(),\n TxnField.on_completion: lambda txn: txn.on_completion(),\n TxnField.approval_program: lambda txn: txn.approval_program(),\n TxnField.clear_state_program: lambda txn: txn.clear_state_program(),\n TxnField.rekey_to: lambda txn: txn.rekey_to(),\n TxnField.config_asset: lambda txn: txn.config_asset(),\n TxnField.config_asset_total: lambda txn: txn.config_asset_total(),\n TxnField.config_asset_decimals: lambda txn: txn.config_asset_decimals(),\n TxnField.config_asset_default_frozen: lambda txn: txn.config_asset_default_frozen(),\n TxnField.config_asset_unit_name: lambda txn: txn.config_asset_unit_name(),\n TxnField.config_asset_name: lambda txn: txn.config_asset_name(),\n TxnField.config_asset_url: lambda txn: txn.config_asset_url(),\n TxnField.config_asset_metadata_hash: lambda txn: txn.config_asset_metadata_hash(),\n TxnField.config_asset_manager: lambda txn: txn.config_asset_manager(),\n TxnField.config_asset_reserve: lambda txn: txn.config_asset_reserve(),\n TxnField.config_asset_freeze: lambda txn: txn.config_asset_freeze(),\n TxnField.config_asset_clawback: lambda txn: txn.config_asset_clawback(),\n TxnField.freeze_asset: lambda txn: txn.freeze_asset(),\n TxnField.freeze_asset_account: lambda txn: txn.freeze_asset_account(),\n TxnField.freeze_asset_frozen: lambda txn: txn.freeze_asset_frozen(),\n TxnField.global_num_uints: lambda txn: txn.global_num_uints(),\n TxnField.global_num_byte_slices: lambda txn: txn.global_num_byte_slices(),\n TxnField.local_num_uints: lambda txn: txn.local_num_uints(),\n TxnField.local_num_byte_slices: lambda txn: txn.local_num_byte_slices(),\n TxnField.extra_program_pages: lambda txn: txn.extra_program_pages(),\n TxnField.nonparticipation: lambda txn: txn.nonparticipation(),\n TxnField.created_asset_id: lambda txn: txn.created_asset_id(),\n TxnField.created_application_id: lambda txn: txn.created_application_id(),\n}\n\narrayFieldToProperty: Dict[TxnField, Callable[[TxnObject], TxnArray]] = {\n TxnField.application_args: lambda txn: txn.application_args,\n TxnField.accounts: lambda txn: txn.accounts,\n TxnField.assets: lambda txn: txn.assets,\n TxnField.applications: lambda txn: txn.applications,\n TxnField.logs: lambda txn: txn.logs,\n}\n\narrayFieldToLengthField: Dict[TxnField, TxnField] = {\n TxnField.application_args: TxnField.num_app_args,\n TxnField.accounts: TxnField.num_accounts,\n TxnField.assets: TxnField.num_assets,\n TxnField.applications: TxnField.num_applications,\n TxnField.logs: TxnField.num_logs,\n}\n\n\ndef test_txn_fields():\n dynamicGtxnArg = Int(0)\n\n txnObjects = [\n (Txn, Op.txn, Op.txna, Op.txnas, [], []),\n *[\n (Gtxn[i], Op.gtxn, Op.gtxna, Op.gtxnas, [i], [])\n for i in range(MAX_GROUP_SIZE)\n ],\n (\n Gtxn[dynamicGtxnArg],\n Op.gtxns,\n Op.gtxnsa,\n Op.gtxnsas,\n [],\n [TealOp(dynamicGtxnArg, Op.int, 0)],\n ),\n (InnerTxn, Op.itxn, Op.itxna, None, [], []),\n ]\n\n for (\n txnObject,\n op,\n staticArrayOp,\n dynamicArrayOp,\n immediateArgsPrefix,\n irPrefix,\n ) in txnObjects:\n for field in TxnField:\n if field.is_array:\n array = arrayFieldToProperty[field](txnObject)\n lengthExpr = array.length()\n\n lengthFieldName = arrayFieldToLengthField[field].arg_name\n immediateArgs = immediateArgsPrefix + [lengthFieldName]\n expected = TealSimpleBlock(\n irPrefix + [TealOp(lengthExpr, op, *immediateArgs)]\n )\n expected.addIncoming()\n expected = TealBlock.NormalizeBlocks(expected)\n\n version = max(op.min_version, field.min_version)\n\n actual, _ = lengthExpr.__teal__(CompileOptions(version=version))\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert (\n actual == expected\n ), \"{}: array length for field {} does not match expected\".format(\n op, field\n )\n\n if version > 2:\n with pytest.raises(TealInputError):\n lengthExpr.__teal__(CompileOptions(version=version - 1))\n\n for i in range(32): # just an arbitrary large int\n elementExpr = array[i]\n\n immediateArgs = immediateArgsPrefix + [field.arg_name, i]\n expected = TealSimpleBlock(\n irPrefix + [TealOp(elementExpr, staticArrayOp, *immediateArgs)]\n )\n expected.addIncoming()\n expected = TealBlock.NormalizeBlocks(expected)\n\n version = max(staticArrayOp.min_version, field.min_version)\n\n actual, _ = elementExpr.__teal__(CompileOptions(version=version))\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert (\n actual == expected\n ), \"{}: static array field {} does not match expected\".format(\n staticArrayOp, field\n )\n\n if version > 2:\n with pytest.raises(TealInputError):\n elementExpr.__teal__(CompileOptions(version=version - 1))\n\n if dynamicArrayOp is not None:\n dynamicIndex = Int(2)\n dynamicElementExpr = array[dynamicIndex]\n\n immediateArgs = immediateArgsPrefix + [field.arg_name]\n expected = TealSimpleBlock(\n irPrefix\n + [\n TealOp(dynamicIndex, Op.int, 2),\n TealOp(dynamicElementExpr, dynamicArrayOp, *immediateArgs),\n ]\n )\n expected.addIncoming()\n expected = TealBlock.NormalizeBlocks(expected)\n\n version = max(dynamicArrayOp.min_version, field.min_version)\n\n actual, _ = dynamicElementExpr.__teal__(\n CompileOptions(version=version)\n )\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert (\n actual == expected\n ), \"{}: dynamic array field {} does not match expected\".format(\n dynamicArrayOp, field\n )\n\n if version > 2:\n with pytest.raises(TealInputError):\n dynamicElementExpr.__teal__(\n CompileOptions(version=version - 1)\n )\n\n continue\n\n if field in arrayFieldToLengthField.values():\n # ignore length fields since they are checked with their arrays\n continue\n\n if field == TxnField.first_valid_time:\n # ignore first_valid_time since it is not exposed on TxnObject yet\n continue\n\n expr = fieldToMethod[field](txnObject)\n\n immediateArgs = immediateArgsPrefix + [field.arg_name]\n expected = TealSimpleBlock(irPrefix + [TealOp(expr, op, *immediateArgs)])\n expected.addIncoming()\n expected = TealBlock.NormalizeBlocks(expected)\n\n version = max(op.min_version, field.min_version)\n\n actual, _ = expr.__teal__(CompileOptions(version=version))\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected, \"{}: field {} does not match expected\".format(\n op, field\n )\n\n if version > 2:\n with pytest.raises(TealInputError):\n expr.__teal__(CompileOptions(version=version - 1))\n"
},
{
"alpha_fraction": 0.7156862616539001,
"alphanum_fraction": 0.7156862616539001,
"avg_line_length": 13.571428298950195,
"blob_id": "9c1e20a54886727b82020b69da21df3d2ecad6e1",
"content_id": "4d9be25af9780ac9ae05d0cb371387488f09a6e7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "INI",
"length_bytes": 102,
"license_type": "permissive",
"max_line_length": 29,
"num_lines": 7,
"path": "/mypy.ini",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "[mypy]\n\n[mypy-pytest.*]\nignore_missing_imports = True\n\n[mypy-algosdk.*]\nignore_missing_imports = True\n"
},
{
"alpha_fraction": 0.6371124982833862,
"alphanum_fraction": 0.6462172269821167,
"avg_line_length": 28.761289596557617,
"blob_id": "c75ff4bb8864844cf36f8f76dac93d220d9b3437",
"content_id": "282c8d12353d1b0b313efa8d6876fa6cbc53fbcb",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4613,
"license_type": "permissive",
"max_line_length": 99,
"num_lines": 155,
"path": "/tests/compile_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import os\nimport pytest\n\nfrom pyteal import *\n\n\ndef test_basic_bank():\n from examples.signature.basic import bank_for_account\n\n program = bank_for_account(\n \"ZZAF5ARA4MEC5PVDOP64JM5O5MQST63Q2KOY2FLYFLXXD3PFSNJJBYAFZM\"\n )\n\n target_path = os.path.join(\n os.path.dirname(__file__), \"../examples/signature/basic.teal\"\n )\n with open(target_path, \"r\") as target_file:\n target = \"\".join(target_file.readlines()).strip()\n assert compileTeal(program, mode=Mode.Signature, version=3) == target\n\n\ndef test_atomic_swap():\n from examples.signature.atomic_swap import htlc\n\n program = htlc()\n\n target_path = os.path.join(\n os.path.dirname(__file__), \"../examples/signature/atomic_swap.teal\"\n )\n with open(target_path, \"r\") as target_file:\n target = \"\".join(target_file.readlines()).strip()\n assert compileTeal(program, mode=Mode.Signature, version=2) == target\n\n\ndef test_periodic_payment():\n from examples.signature.periodic_payment import periodic_payment\n\n program = periodic_payment()\n\n target_path = os.path.join(\n os.path.dirname(__file__), \"../examples/signature/periodic_payment.teal\"\n )\n with open(target_path, \"r\") as target_file:\n target = \"\".join(target_file.readlines()).strip()\n assert compileTeal(program, mode=Mode.Signature, version=2) == target\n\n\ndef test_split():\n from examples.signature.split import split\n\n program = split()\n\n target_path = os.path.join(\n os.path.dirname(__file__), \"../examples/signature/split.teal\"\n )\n with open(target_path, \"r\") as target_file:\n target = \"\".join(target_file.readlines()).strip()\n assert compileTeal(program, mode=Mode.Signature, version=2) == target\n\n\ndef test_dutch_auction():\n from examples.signature.dutch_auction import dutch_auction\n\n program = dutch_auction()\n\n target_path = os.path.join(\n os.path.dirname(__file__), \"../examples/signature/dutch_auction.teal\"\n )\n with open(target_path, \"r\") as target_file:\n target = \"\".join(target_file.readlines()).strip()\n assert compileTeal(program, mode=Mode.Signature, version=2) == target\n\n\ndef test_recurring_swap():\n from examples.signature.recurring_swap import recurring_swap\n\n program = recurring_swap()\n\n target_path = os.path.join(\n os.path.dirname(__file__), \"../examples/signature/recurring_swap.teal\"\n )\n with open(target_path, \"r\") as target_file:\n target = \"\".join(target_file.readlines()).strip()\n assert compileTeal(program, mode=Mode.Signature, version=2) == target\n\n\ndef test_asset():\n from examples.application.asset import approval_program, clear_state_program\n\n approval = approval_program()\n clear_state = clear_state_program()\n\n # only checking for successful compilation for now\n compileTeal(approval, mode=Mode.Application, version=2)\n compileTeal(clear_state, mode=Mode.Application, version=2)\n\n\ndef test_security_token():\n from examples.application.security_token import (\n approval_program,\n clear_state_program,\n )\n\n approval = approval_program()\n clear_state = clear_state_program()\n\n # only checking for successful compilation for now\n compileTeal(approval, mode=Mode.Application, version=2)\n compileTeal(clear_state, mode=Mode.Application, version=2)\n\n\ndef test_vote():\n from examples.application.vote import approval_program, clear_state_program\n\n approval = approval_program()\n clear_state = clear_state_program()\n\n # only checking for successful compilation for now\n compileTeal(approval, mode=Mode.Application, version=2)\n compileTeal(clear_state, mode=Mode.Application, version=2)\n\n\ndef test_cond():\n cond1 = Txn.fee() < Int(2000)\n cond2 = Txn.amount() > Int(5000)\n cond3 = Txn.receiver() == Txn.sender()\n core = Cond(\n [Global.group_size() == Int(2), cond1],\n [Global.group_size() == Int(3), cond2],\n [Global.group_size() == Int(4), cond3],\n )\n compileTeal(core, mode=Mode.Signature, version=2)\n\n\[email protected](2)\ndef test_many_ifs():\n \"\"\"\n Test with many If statements to trigger potential corner cases in code generation.\n Previous versions of PyTeal took an exponential time to generate the TEAL code for this PyTEAL.\n \"\"\"\n\n sv = ScratchVar(TealType.uint64)\n s = Seq(\n [\n If(\n Int(3 * i) == Int(3 * i),\n sv.store(Int(3 * i + 1)),\n sv.store(Int(3 * i + 2)),\n )\n for i in range(30)\n ]\n + [Return(sv.load())]\n )\n\n compileTeal(s, mode=Mode.Signature, version=2)\n"
},
{
"alpha_fraction": 0.73221755027771,
"alphanum_fraction": 0.73221755027771,
"avg_line_length": 20.727272033691406,
"blob_id": "152600e4f1cbffa124ad9bda6ddb6e8da9324eee",
"content_id": "8107e4c30f1533d6a85f9572da8bc1a770f01f4a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 478,
"license_type": "permissive",
"max_line_length": 54,
"num_lines": 22,
"path": "/pyteal/ir/__init__.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from .ops import Op, Mode\n\nfrom .tealcomponent import TealComponent\nfrom .tealop import TealOp\nfrom .teallabel import TealLabel\nfrom .tealblock import TealBlock\nfrom .tealsimpleblock import TealSimpleBlock\nfrom .tealconditionalblock import TealConditionalBlock\n\nfrom .labelref import LabelReference\n\n__all__ = [\n \"Op\",\n \"Mode\",\n \"TealComponent\",\n \"TealOp\",\n \"TealLabel\",\n \"TealBlock\",\n \"TealSimpleBlock\",\n \"TealConditionalBlock\",\n \"LabelReference\",\n]\n"
},
{
"alpha_fraction": 0.6323529481887817,
"alphanum_fraction": 0.6323529481887817,
"avg_line_length": 21.66666603088379,
"blob_id": "64334fdf2f31628c709d7f031fce901dc6047a58",
"content_id": "aec4e51ca027754554479f7daaacb68dc29b965b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 272,
"license_type": "permissive",
"max_line_length": 54,
"num_lines": 12,
"path": "/pyteal/ast/err_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n\ndef test_err():\n expr = Err()\n assert expr.type_of() == TealType.none\n assert expr.has_return()\n expected = TealSimpleBlock([TealOp(expr, Op.err)])\n actual, _ = expr.__teal__(CompileOptions())\n assert actual == expected\n"
},
{
"alpha_fraction": 0.6253916621208191,
"alphanum_fraction": 0.642060399055481,
"avg_line_length": 28.883895874023438,
"blob_id": "278ba7f412bbe6a125f1762e3c58609c46341793",
"content_id": "d174731f8358b58527035887a51dcdc7bb6ad232",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7979,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 267,
"path": "/pyteal/ast/if_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_if_has_return():\n exprWithReturn = If(Int(1), Return(Int(1)), Return(Int(0)))\n assert exprWithReturn.has_return()\n\n exprWithoutReturn = If(Int(1), Int(1), Int(0))\n assert not exprWithoutReturn.has_return()\n\n exprSemiReturn = If(\n Int(1), Return(Int(1)), App.globalPut(Bytes(\"key\"), Bytes(\"value\"))\n )\n assert not exprSemiReturn.has_return()\n\n\ndef test_if_int():\n args = [Int(0), Int(1), Int(2)]\n expr = If(args[0], args[1], args[2])\n assert expr.type_of() == TealType.uint64\n\n expected, _ = args[0].__teal__(options)\n thenBlock, _ = args[1].__teal__(options)\n elseBlock, _ = args[2].__teal__(options)\n expectedBranch = TealConditionalBlock([])\n expectedBranch.setTrueBlock(thenBlock)\n expectedBranch.setFalseBlock(elseBlock)\n expected.setNextBlock(expectedBranch)\n end = TealSimpleBlock([])\n thenBlock.setNextBlock(end)\n elseBlock.setNextBlock(end)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_if_bytes():\n args = [Int(1), Txn.sender(), Txn.receiver()]\n expr = If(args[0], args[1], args[2])\n assert expr.type_of() == TealType.bytes\n\n expected, _ = args[0].__teal__(options)\n thenBlock, _ = args[1].__teal__(options)\n elseBlock, _ = args[2].__teal__(options)\n expectedBranch = TealConditionalBlock([])\n expectedBranch.setTrueBlock(thenBlock)\n expectedBranch.setFalseBlock(elseBlock)\n expected.setNextBlock(expectedBranch)\n end = TealSimpleBlock([])\n thenBlock.setNextBlock(end)\n elseBlock.setNextBlock(end)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_if_none():\n args = [Int(0), Pop(Txn.sender()), Pop(Txn.receiver())]\n expr = If(args[0], args[1], args[2])\n assert expr.type_of() == TealType.none\n\n expected, _ = args[0].__teal__(options)\n thenBlockStart, thenBlockEnd = args[1].__teal__(options)\n elseBlockStart, elseBlockEnd = args[2].__teal__(options)\n expectedBranch = TealConditionalBlock([])\n expectedBranch.setTrueBlock(thenBlockStart)\n expectedBranch.setFalseBlock(elseBlockStart)\n expected.setNextBlock(expectedBranch)\n end = TealSimpleBlock([])\n thenBlockEnd.setNextBlock(end)\n elseBlockEnd.setNextBlock(end)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_if_single():\n args = [Int(1), Pop(Int(1))]\n expr = If(args[0], args[1])\n assert expr.type_of() == TealType.none\n\n expected, _ = args[0].__teal__(options)\n thenBlockStart, thenBlockEnd = args[1].__teal__(options)\n end = TealSimpleBlock([])\n expectedBranch = TealConditionalBlock([])\n expectedBranch.setTrueBlock(thenBlockStart)\n expectedBranch.setFalseBlock(end)\n expected.setNextBlock(expectedBranch)\n thenBlockEnd.setNextBlock(end)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_if_invalid():\n with pytest.raises(TealTypeError):\n If(Int(0), Txn.amount(), Txn.sender())\n\n with pytest.raises(TealTypeError):\n If(Txn.sender(), Int(1), Int(0))\n\n with pytest.raises(TealTypeError):\n If(Int(0), Txn.sender())\n\n with pytest.raises(TealTypeError):\n If(Int(0), Int(2))\n\n with pytest.raises(TealCompileError):\n expr = If(Int(0))\n expr.__teal__(options)\n\n\ndef test_if_alt_int():\n args = [Int(0), Int(1), Int(2)]\n expr = If(args[0]).Then(args[1]).Else(args[2])\n assert expr.type_of() == TealType.uint64\n\n expected, _ = args[0].__teal__(options)\n thenBlock, _ = args[1].__teal__(options)\n elseBlock, _ = args[2].__teal__(options)\n expectedBranch = TealConditionalBlock([])\n expectedBranch.setTrueBlock(thenBlock)\n expectedBranch.setFalseBlock(elseBlock)\n expected.setNextBlock(expectedBranch)\n end = TealSimpleBlock([])\n thenBlock.setNextBlock(end)\n elseBlock.setNextBlock(end)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_if_alt_bytes():\n args = [Int(1), Txn.sender(), Txn.receiver()]\n expr = If(args[0]).Then(args[1]).Else(args[2])\n assert expr.type_of() == TealType.bytes\n\n expected, _ = args[0].__teal__(options)\n thenBlock, _ = args[1].__teal__(options)\n elseBlock, _ = args[2].__teal__(options)\n expectedBranch = TealConditionalBlock([])\n expectedBranch.setTrueBlock(thenBlock)\n expectedBranch.setFalseBlock(elseBlock)\n expected.setNextBlock(expectedBranch)\n end = TealSimpleBlock([])\n thenBlock.setNextBlock(end)\n elseBlock.setNextBlock(end)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_if_alt_none():\n args = [Int(0), Pop(Txn.sender()), Pop(Txn.receiver())]\n expr = If(args[0]).Then(args[1]).Else(args[2])\n assert expr.type_of() == TealType.none\n\n expected, _ = args[0].__teal__(options)\n thenBlockStart, thenBlockEnd = args[1].__teal__(options)\n elseBlockStart, elseBlockEnd = args[2].__teal__(options)\n expectedBranch = TealConditionalBlock([])\n expectedBranch.setTrueBlock(thenBlockStart)\n expectedBranch.setFalseBlock(elseBlockStart)\n expected.setNextBlock(expectedBranch)\n end = TealSimpleBlock([])\n thenBlockEnd.setNextBlock(end)\n elseBlockEnd.setNextBlock(end)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_elseif_syntax():\n args = [Int(0), Int(1), Int(2), Int(3), Int(4)]\n expr = If(args[0]).Then(args[1]).ElseIf(args[2]).Then(args[3]).Else(args[4])\n assert expr.type_of() == TealType.uint64\n\n elseExpr = If(args[2]).Then(args[3]).Else(args[4])\n expected, _ = args[0].__teal__(options)\n thenBlock, _ = args[1].__teal__(options)\n elseStart, elseEnd = elseExpr.__teal__(options)\n expectedBranch = TealConditionalBlock([])\n expectedBranch.setTrueBlock(thenBlock)\n expectedBranch.setFalseBlock(elseStart)\n expected.setNextBlock(expectedBranch)\n end = TealSimpleBlock([])\n thenBlock.setNextBlock(end)\n elseEnd.setNextBlock(end)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_elseif_multiple():\n args = [Int(0), Int(1), Int(2), Int(3), Int(4), Int(5), Int(6)]\n expr = (\n If(args[0])\n .Then(args[1])\n .ElseIf(args[2])\n .Then(args[3])\n .ElseIf(args[4])\n .Then(args[5])\n .Else(args[6])\n )\n assert expr.type_of() == TealType.uint64\n\n elseIfExpr = If(args[2], args[3], If(args[4], args[5], args[6]))\n expected, _ = args[0].__teal__(options)\n thenBlock, _ = args[1].__teal__(options)\n elseStart, elseEnd = elseIfExpr.__teal__(options)\n expectedBranch = TealConditionalBlock([])\n expectedBranch.setTrueBlock(thenBlock)\n expectedBranch.setFalseBlock(elseStart)\n expected.setNextBlock(expectedBranch)\n end = TealSimpleBlock([])\n thenBlock.setNextBlock(end)\n elseEnd.setNextBlock(end)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_if_invalid_alt_syntax():\n with pytest.raises(TealCompileError):\n expr = If(Int(0)).ElseIf(Int(1))\n expr.__teal__(options)\n\n with pytest.raises(TealCompileError):\n expr = If(Int(0)).ElseIf(Int(1)).Then(Int(2))\n expr.__teal__(options)\n\n with pytest.raises(TealCompileError):\n expr = If(Int(0)).Then(Int(1)).ElseIf(Int(2))\n expr.__teal__(options)\n\n with pytest.raises(TealCompileError):\n expr = If(Int(0)).Then(Int(1)).ElseIf(Int(2))\n expr.__teal__(options)\n\n with pytest.raises(TealCompileError):\n expr = If(Int(0)).Else(Int(1))\n expr.__teal__(options)\n\n with pytest.raises(TealInputError):\n expr = If(Int(0)).Else(Int(1)).Then(Int(2))\n\n with pytest.raises(TealInputError):\n expr = If(Int(0)).Else(Int(1)).Else(Int(2))\n\n with pytest.raises(TealInputError):\n expr = If(Int(0), Pop(Int(1))).Else(Int(2))\n"
},
{
"alpha_fraction": 0.7837837934494019,
"alphanum_fraction": 0.8108108043670654,
"avg_line_length": 11.333333015441895,
"blob_id": "86f68ea2273ab0eb8fb9defc53a6485dc9b60249",
"content_id": "e9296768ce844542167da26184a2e2e791e88772",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 37,
"license_type": "permissive",
"max_line_length": 16,
"num_lines": 3,
"path": "/docs/requirements.txt",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "m2r\nsphinx_rtd_theme\npy-algorand-sdk\n"
},
{
"alpha_fraction": 0.6783216595649719,
"alphanum_fraction": 0.7013986110687256,
"avg_line_length": 48.31034469604492,
"blob_id": "5e9869ee657d89acb87c0b55fd6764977b33117d",
"content_id": "d9d742e17f69399da41522d0040e9bfa6cb4bda1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "reStructuredText",
"length_bytes": 1430,
"license_type": "permissive",
"max_line_length": 123,
"num_lines": 29,
"path": "/docs/versions.rst",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": ".. _versions:\n\nTEAL Versions\n=============\n\nEach version of PyTeal compiles contracts for a specific version of TEAL. Newer versions of TEAL\nintroduce new opcodes and transaction fields, so PyTeal must be updated to support these new\nfeatures. Below is a table which shows the relationship between TEAL and PyTeal versions.\n\n============ ==============\nTEAL Version PyTeal Version\n============ ==============\n1 <= 0.5.4\n2 >= 0.6.0\n3 >= 0.7.0\n4 >= 0.8.0\n============ ==============\n\nIn order to support TEAL v2, PyTeal v0.6.0 breaks backward compatibility with v0.5.4. PyTeal\nprograms written for PyTeal version 0.5.4 and below will not compile properly and most likely will\ndisplay an error of the form :code:`AttributeError: * object has no attribute 'teal'`.\n\n**WARNING:** before updating PyTeal to a version with generates TEAL v2 contracts and fixing the\nprograms to use the global function :any:`compileTeal` rather the class method :code:`.teal()`, make\nsure your program abides by the TEAL safety guidelines `<https://developer.algorand.org/docs/reference/teal/guidelines/>`_.\nChanging a v1 TEAL program to a v2 TEAL program without any code changes is insecure because v2\nTEAL programs allow rekeying. Specifically, you must add a check that the :code:`RekeyTo` property\nof any transaction is set to the zero address when updating an older PyTeal program from v0.5.4 and\nbelow.\n"
},
{
"alpha_fraction": 0.6628614664077759,
"alphanum_fraction": 0.6788432002067566,
"avg_line_length": 22.052631378173828,
"blob_id": "073c0b59d5b4af709a575b5096f60ed548330b4c",
"content_id": "d42f57de28192f4d98194ffa1ef04fb14bff6c5c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1314,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 57,
"path": "/pyteal/ast/gaid_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import MAX_GROUP_SIZE, CompileOptions\n\nteal3Options = CompileOptions(version=3)\nteal4Options = CompileOptions(version=4)\n\n\ndef test_gaid_teal_3():\n with pytest.raises(TealInputError):\n GeneratedID(0).__teal__(teal3Options)\n\n\ndef test_gaid():\n expr = GeneratedID(0)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(expr, Op.gaid, 0)])\n\n actual, _ = expr.__teal__(teal4Options)\n\n assert actual == expected\n\n\ndef test_gaid_invalid():\n with pytest.raises(TealInputError):\n GeneratedID(-1)\n\n with pytest.raises(TealInputError):\n GeneratedID(MAX_GROUP_SIZE)\n\n\ndef test_gaid_dynamic_teal_3():\n with pytest.raises(TealInputError):\n GeneratedID(Int(0)).__teal__(teal3Options)\n\n\ndef test_gaid_dynamic():\n arg = Int(0)\n expr = GeneratedID(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 0), TealOp(expr, Op.gaids)])\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_gaid_dynamic_invalid():\n with pytest.raises(TealTypeError):\n GeneratedID(Bytes(\"index\"))\n"
},
{
"alpha_fraction": 0.6393035054206848,
"alphanum_fraction": 0.646766185760498,
"avg_line_length": 19.100000381469727,
"blob_id": "3ef8c1108cdb9ccc1fff133bff00b03f874e74ca",
"content_id": "16809091889748fcc7b04b32ded185bc0d89ec2a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 402,
"license_type": "permissive",
"max_line_length": 41,
"num_lines": 20,
"path": "/pyteal/ast/gtxn_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n\ndef test_gtxn_invalid():\n with pytest.raises(TealInputError):\n Gtxn[-1].fee()\n\n with pytest.raises(TealInputError):\n Gtxn[MAX_GROUP_SIZE + 1].sender()\n\n with pytest.raises(TealTypeError):\n Gtxn[Pop(Int(0))].sender()\n\n with pytest.raises(TealTypeError):\n Gtxn[Bytes(\"index\")].sender()\n\n\n# txn_test.py performs additional testing\n"
},
{
"alpha_fraction": 0.48670682311058044,
"alphanum_fraction": 0.5284557342529297,
"avg_line_length": 30.011272430419922,
"blob_id": "8aed559a6880c4b56b96b785519dc3e2c13fcf27",
"content_id": "12d97b328882731a6b23c9f2fc9f5e3b7d20d547",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 19258,
"license_type": "permissive",
"max_line_length": 117,
"num_lines": 621,
"path": "/pyteal/compiler/constants_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from .. import *\n\nfrom .constants import (\n extractIntValue,\n extractBytesValue,\n extractAddrValue,\n createConstantBlocks,\n)\n\n\ndef test_extractIntValue():\n tests = [\n (TealOp(None, Op.int, 0), 0),\n (TealOp(None, Op.int, 5), 5),\n (TealOp(None, Op.int, \"pay\"), 1),\n (TealOp(None, Op.int, \"NoOp\"), 0),\n (TealOp(None, Op.int, \"UpdateApplication\"), 4),\n (TealOp(None, Op.int, \"TMPL_NAME\"), \"TMPL_NAME\"),\n ]\n\n for op, expected in tests:\n actual = extractIntValue(op)\n assert actual == expected\n\n\ndef test_extractBytesValue():\n tests = [\n (TealOp(None, Op.byte, '\"\"'), b\"\"),\n (TealOp(None, Op.byte, '\"test\"'), b\"test\"),\n (TealOp(None, Op.byte, '\"\\\\t\\\\n\\\\\\\\\\\\\"\"'), b'\\t\\n\\\\\"'),\n (TealOp(None, Op.byte, \"0x\"), b\"\"),\n (TealOp(None, Op.byte, \"0x00\"), b\"\\x00\"),\n (TealOp(None, Op.byte, \"0xFF00\"), b\"\\xff\\x00\"),\n (TealOp(None, Op.byte, \"0xff00\"), b\"\\xff\\x00\"),\n (TealOp(None, Op.byte, \"base32()\"), b\"\"),\n (TealOp(None, Op.byte, \"base32(ORSXG5A)\"), b\"test\"),\n (TealOp(None, Op.byte, \"base32(ORSXG5A=)\"), b\"test\"),\n (TealOp(None, Op.byte, \"base64()\"), b\"\"),\n (TealOp(None, Op.byte, \"base64(dGVzdA==)\"), b\"test\"),\n (TealOp(None, Op.byte, \"TMPL_NAME\"), \"TMPL_NAME\"),\n ]\n\n for op, expected in tests:\n actual = extractBytesValue(op)\n assert actual == expected\n\n\ndef test_extractAddrValue():\n tests = [\n (\n TealOp(\n None,\n Op.byte,\n \"WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M\",\n ),\n b\"\\xb4\\x92v\\xbd>\\xc0\\x97~\\xab\\x86\\xa3!\\xc4I\\xea\\xd8\\x02\\xc9l\\x0b\\xd9|)V\\x13\\x15\\x11\\xd2\\xf1\\x1e\\xeb\\xec\",\n ),\n (TealOp(None, Op.addr, \"TMPL_NAME\"), \"TMPL_NAME\"),\n ]\n\n for op, expected in tests:\n actual = extractAddrValue(op)\n assert actual == expected\n\n\ndef test_createConstantBlocks_empty():\n ops = []\n\n expected = ops[:]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_no_consts():\n ops = [\n TealOp(None, Op.txn, \"Sender\"),\n TealOp(None, Op.txn, \"Receiver\"),\n TealOp(None, Op.eq),\n ]\n\n expected = ops[:]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_pushint():\n ops = [\n TealOp(None, Op.int, 0),\n TealOp(None, Op.int, \"OptIn\"),\n TealOp(None, Op.add),\n ]\n\n expected = [\n TealOp(None, Op.pushint, 0, \"//\", 0),\n TealOp(None, Op.pushint, 1, \"//\", \"OptIn\"),\n TealOp(None, Op.add),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_intblock_single():\n ops = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, \"OptIn\"),\n TealOp(None, Op.add),\n ]\n\n expected = [\n TealOp(None, Op.intcblock, 1),\n TealOp(None, Op.intc_0, \"//\", 1),\n TealOp(None, Op.intc_0, \"//\", \"OptIn\"),\n TealOp(None, Op.add),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_intblock_multiple():\n ops = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, \"OptIn\"),\n TealOp(None, Op.add),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, \"keyreg\"),\n TealOp(None, Op.add),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.int, \"ClearState\"),\n TealOp(None, Op.add),\n ]\n\n expected = [\n TealOp(None, Op.intcblock, 1, 2, 3),\n TealOp(None, Op.intc_0, \"//\", 1),\n TealOp(None, Op.intc_0, \"//\", \"OptIn\"),\n TealOp(None, Op.add),\n TealOp(None, Op.intc_1, \"//\", 2),\n TealOp(None, Op.intc_1, \"//\", \"keyreg\"),\n TealOp(None, Op.add),\n TealOp(None, Op.intc_2, \"//\", 3),\n TealOp(None, Op.intc_2, \"//\", \"ClearState\"),\n TealOp(None, Op.add),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_intblock_pushint():\n ops = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, \"OptIn\"),\n TealOp(None, Op.add),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.add),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.int, \"ClearState\"),\n TealOp(None, Op.add),\n ]\n\n expected = [\n TealOp(None, Op.intcblock, 3, 1),\n TealOp(None, Op.intc_1, \"//\", 1),\n TealOp(None, Op.intc_1, \"//\", \"OptIn\"),\n TealOp(None, Op.add),\n TealOp(None, Op.pushint, 2, \"//\", 2),\n TealOp(None, Op.intc_0, \"//\", 3),\n TealOp(None, Op.add),\n TealOp(None, Op.intc_0, \"//\", 3),\n TealOp(None, Op.intc_0, \"//\", \"ClearState\"),\n TealOp(None, Op.add),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_pushbytes():\n ops = [\n TealOp(None, Op.byte, \"0x0102\"),\n TealOp(None, Op.byte, \"0x0103\"),\n TealOp(None, Op.concat),\n ]\n\n expected = [\n TealOp(None, Op.pushbytes, \"0x0102\", \"//\", \"0x0102\"),\n TealOp(None, Op.pushbytes, \"0x0103\", \"//\", \"0x0103\"),\n TealOp(None, Op.concat),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_byteblock_single():\n ops = [\n TealOp(None, Op.byte, \"0x0102\"),\n TealOp(None, Op.byte, \"base64(AQI=)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, \"base32(AEBA====)\"),\n TealOp(None, Op.concat),\n ]\n\n expected = [\n TealOp(None, Op.bytecblock, \"0x0102\"),\n TealOp(None, Op.bytec_0, \"//\", \"0x0102\"),\n TealOp(None, Op.bytec_0, \"//\", \"base64(AQI=)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.bytec_0, \"//\", \"base32(AEBA====)\"),\n TealOp(None, Op.concat),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_byteblock_multiple():\n ops = [\n TealOp(None, Op.byte, \"0x0102\"),\n TealOp(None, Op.byte, \"base64(AQI=)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, \"base32(AEBA====)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, '\"test\"'),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, \"base32(ORSXG5A=)\"),\n TealOp(None, Op.concat),\n TealOp(\n None,\n Op.byte,\n \"0xb49276bd3ec0977eab86a321c449ead802c96c0bd97c2956131511d2f11eebec\",\n ),\n TealOp(None, Op.concat),\n TealOp(\n None, Op.addr, \"WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M\"\n ),\n TealOp(None, Op.concat),\n ]\n\n expected = [\n TealOp(\n None,\n Op.bytecblock,\n \"0x0102\",\n \"0x74657374\",\n \"0xb49276bd3ec0977eab86a321c449ead802c96c0bd97c2956131511d2f11eebec\",\n ),\n TealOp(None, Op.bytec_0, \"//\", \"0x0102\"),\n TealOp(None, Op.bytec_0, \"//\", \"base64(AQI=)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.bytec_0, \"//\", \"base32(AEBA====)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.bytec_1, \"//\", '\"test\"'),\n TealOp(None, Op.concat),\n TealOp(None, Op.bytec_1, \"//\", \"base32(ORSXG5A=)\"),\n TealOp(None, Op.concat),\n TealOp(\n None,\n Op.bytec_2,\n \"//\",\n \"0xb49276bd3ec0977eab86a321c449ead802c96c0bd97c2956131511d2f11eebec\",\n ),\n TealOp(None, Op.concat),\n TealOp(\n None,\n Op.bytec_2,\n \"//\",\n \"WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M\",\n ),\n TealOp(None, Op.concat),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_byteblock_pushbytes():\n ops = [\n TealOp(None, Op.byte, \"0x0102\"),\n TealOp(None, Op.byte, \"base64(AQI=)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, \"base32(AEBA====)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, '\"test\"'),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, \"base32(ORSXG5A=)\"),\n TealOp(None, Op.concat),\n TealOp(\n None, Op.addr, \"WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M\"\n ),\n TealOp(None, Op.concat),\n ]\n\n expected = [\n TealOp(None, Op.bytecblock, \"0x0102\", \"0x74657374\"),\n TealOp(None, Op.bytec_0, \"//\", \"0x0102\"),\n TealOp(None, Op.bytec_0, \"//\", \"base64(AQI=)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.bytec_0, \"//\", \"base32(AEBA====)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.bytec_1, \"//\", '\"test\"'),\n TealOp(None, Op.concat),\n TealOp(None, Op.bytec_1, \"//\", \"base32(ORSXG5A=)\"),\n TealOp(None, Op.concat),\n TealOp(\n None,\n Op.pushbytes,\n \"0xb49276bd3ec0977eab86a321c449ead802c96c0bd97c2956131511d2f11eebec\",\n \"//\",\n \"WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M\",\n ),\n TealOp(None, Op.concat),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_all():\n ops = [\n TealOp(None, Op.byte, \"0x0102\"),\n TealOp(None, Op.byte, \"base64(AQI=)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, \"base32(AEBA====)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, '\"test\"'),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, \"base32(ORSXG5A=)\"),\n TealOp(None, Op.concat),\n TealOp(\n None, Op.addr, \"WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M\"\n ),\n TealOp(None, Op.concat),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, \"OptIn\"),\n TealOp(None, Op.add),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.add),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.int, \"ClearState\"),\n TealOp(None, Op.add),\n ]\n\n expected = [\n TealOp(None, Op.intcblock, 3, 1),\n TealOp(None, Op.bytecblock, \"0x0102\", \"0x74657374\"),\n TealOp(None, Op.bytec_0, \"//\", \"0x0102\"),\n TealOp(None, Op.bytec_0, \"//\", \"base64(AQI=)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.bytec_0, \"//\", \"base32(AEBA====)\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.bytec_1, \"//\", '\"test\"'),\n TealOp(None, Op.concat),\n TealOp(None, Op.bytec_1, \"//\", \"base32(ORSXG5A=)\"),\n TealOp(None, Op.concat),\n TealOp(\n None,\n Op.pushbytes,\n \"0xb49276bd3ec0977eab86a321c449ead802c96c0bd97c2956131511d2f11eebec\",\n \"//\",\n \"WSJHNPJ6YCLX5K4GUMQ4ISPK3ABMS3AL3F6CSVQTCUI5F4I65PWEMCWT3M\",\n ),\n TealOp(None, Op.concat),\n TealOp(None, Op.intc_1, \"//\", 1),\n TealOp(None, Op.intc_1, \"//\", \"OptIn\"),\n TealOp(None, Op.add),\n TealOp(None, Op.pushint, 2, \"//\", 2),\n TealOp(None, Op.intc_0, \"//\", 3),\n TealOp(None, Op.add),\n TealOp(None, Op.intc_0, \"//\", 3),\n TealOp(None, Op.intc_0, \"//\", \"ClearState\"),\n TealOp(None, Op.add),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_tmpl_int():\n ops = [\n TealOp(None, Op.int, \"TMPL_INT_1\"),\n TealOp(None, Op.int, \"TMPL_INT_1\"),\n TealOp(None, Op.eq),\n TealOp(None, Op.int, \"TMPL_INT_2\"),\n TealOp(None, Op.add),\n ]\n\n expected = [\n TealOp(None, Op.intcblock, \"TMPL_INT_1\"),\n TealOp(None, Op.intc_0, \"//\", \"TMPL_INT_1\"),\n TealOp(None, Op.intc_0, \"//\", \"TMPL_INT_1\"),\n TealOp(None, Op.eq),\n TealOp(None, Op.pushint, \"TMPL_INT_2\", \"//\", \"TMPL_INT_2\"),\n TealOp(None, Op.add),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_tmpl_int_mixed():\n ops = [\n TealOp(None, Op.int, \"TMPL_INT_1\"),\n TealOp(None, Op.int, \"TMPL_INT_1\"),\n TealOp(None, Op.eq),\n TealOp(None, Op.int, \"TMPL_INT_2\"),\n TealOp(None, Op.add),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.add),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n ]\n\n expected = [\n TealOp(None, Op.intcblock, \"TMPL_INT_1\", 0),\n TealOp(None, Op.intc_0, \"//\", \"TMPL_INT_1\"),\n TealOp(None, Op.intc_0, \"//\", \"TMPL_INT_1\"),\n TealOp(None, Op.eq),\n TealOp(None, Op.pushint, \"TMPL_INT_2\", \"//\", \"TMPL_INT_2\"),\n TealOp(None, Op.add),\n TealOp(None, Op.intc_1, \"//\", 0),\n TealOp(None, Op.intc_1, \"//\", 0),\n TealOp(None, Op.add),\n TealOp(None, Op.pushint, 1, \"//\", 1),\n TealOp(None, Op.add),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_tmpl_bytes():\n ops = [\n TealOp(None, Op.byte, \"TMPL_BYTES_1\"),\n TealOp(None, Op.byte, \"TMPL_BYTES_1\"),\n TealOp(None, Op.eq),\n TealOp(None, Op.byte, \"TMPL_BYTES_2\"),\n TealOp(None, Op.concat),\n ]\n\n expected = [\n TealOp(None, Op.bytecblock, \"TMPL_BYTES_1\"),\n TealOp(None, Op.bytec_0, \"//\", \"TMPL_BYTES_1\"),\n TealOp(None, Op.bytec_0, \"//\", \"TMPL_BYTES_1\"),\n TealOp(None, Op.eq),\n TealOp(None, Op.pushbytes, \"TMPL_BYTES_2\", \"//\", \"TMPL_BYTES_2\"),\n TealOp(None, Op.concat),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_tmpl_bytes_mixed():\n ops = [\n TealOp(None, Op.byte, \"TMPL_BYTES_1\"),\n TealOp(None, Op.byte, \"TMPL_BYTES_1\"),\n TealOp(None, Op.eq),\n TealOp(None, Op.byte, \"TMPL_BYTES_2\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, \"0x00\"),\n TealOp(None, Op.byte, \"0x00\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, \"0x01\"),\n TealOp(None, Op.concat),\n ]\n\n expected = [\n TealOp(None, Op.bytecblock, \"TMPL_BYTES_1\", \"0x00\"),\n TealOp(None, Op.bytec_0, \"//\", \"TMPL_BYTES_1\"),\n TealOp(None, Op.bytec_0, \"//\", \"TMPL_BYTES_1\"),\n TealOp(None, Op.eq),\n TealOp(None, Op.pushbytes, \"TMPL_BYTES_2\", \"//\", \"TMPL_BYTES_2\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.bytec_1, \"//\", \"0x00\"),\n TealOp(None, Op.bytec_1, \"//\", \"0x00\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.pushbytes, \"0x01\", \"//\", \"0x01\"),\n TealOp(None, Op.concat),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_tmpl_all():\n ops = [\n TealOp(None, Op.byte, \"TMPL_BYTES_1\"),\n TealOp(None, Op.byte, \"TMPL_BYTES_1\"),\n TealOp(None, Op.eq),\n TealOp(None, Op.byte, \"TMPL_BYTES_2\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, \"0x00\"),\n TealOp(None, Op.byte, \"0x00\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.byte, \"0x01\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.len),\n TealOp(None, Op.int, \"TMPL_INT_1\"),\n TealOp(None, Op.int, \"TMPL_INT_1\"),\n TealOp(None, Op.eq),\n TealOp(None, Op.int, \"TMPL_INT_2\"),\n TealOp(None, Op.add),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.add),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.eq),\n ]\n\n expected = [\n TealOp(None, Op.intcblock, \"TMPL_INT_1\", 0),\n TealOp(None, Op.bytecblock, \"TMPL_BYTES_1\", \"0x00\"),\n TealOp(None, Op.bytec_0, \"//\", \"TMPL_BYTES_1\"),\n TealOp(None, Op.bytec_0, \"//\", \"TMPL_BYTES_1\"),\n TealOp(None, Op.eq),\n TealOp(None, Op.pushbytes, \"TMPL_BYTES_2\", \"//\", \"TMPL_BYTES_2\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.bytec_1, \"//\", \"0x00\"),\n TealOp(None, Op.bytec_1, \"//\", \"0x00\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.pushbytes, \"0x01\", \"//\", \"0x01\"),\n TealOp(None, Op.concat),\n TealOp(None, Op.len),\n TealOp(None, Op.intc_0, \"//\", \"TMPL_INT_1\"),\n TealOp(None, Op.intc_0, \"//\", \"TMPL_INT_1\"),\n TealOp(None, Op.eq),\n TealOp(None, Op.pushint, \"TMPL_INT_2\", \"//\", \"TMPL_INT_2\"),\n TealOp(None, Op.add),\n TealOp(None, Op.intc_1, \"//\", 0),\n TealOp(None, Op.intc_1, \"//\", 0),\n TealOp(None, Op.add),\n TealOp(None, Op.pushint, 1, \"//\", 1),\n TealOp(None, Op.add),\n TealOp(None, Op.eq),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_intc():\n \"\"\"Test scenario where there are more than 4 constants in the intcblock.\n If the 4th constant can't fit in one varuint byte (more than 2**7) it\n should be referenced with the Op.intc 4 command.\n \"\"\"\n\n ops = [\n TealOp(None, Op.int, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.int, 2 ** 7),\n TealOp(None, Op.int, 2 ** 7),\n ]\n\n expected = [\n TealOp(None, Op.intcblock, 0, 1, 2, 3, 2 ** 7),\n TealOp(None, Op.intc_0, \"//\", 0),\n TealOp(None, Op.intc_0, \"//\", 0),\n TealOp(None, Op.intc_1, \"//\", 1),\n TealOp(None, Op.intc_1, \"//\", 1),\n TealOp(None, Op.intc_2, \"//\", 2),\n TealOp(None, Op.intc_2, \"//\", 2),\n TealOp(None, Op.intc_3, \"//\", 3),\n TealOp(None, Op.intc_3, \"//\", 3),\n TealOp(None, Op.intc, 4, \"//\", 2 ** 7),\n TealOp(None, Op.intc, 4, \"//\", 2 ** 7),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n\n\ndef test_createConstantBlocks_small_constant():\n \"\"\"If a constant cannot be referenced using the intc_[0..3] commands\n and it can be stored in one varuint it byte then Op.pushint is used.\n \"\"\"\n\n for cur in range(4, 2 ** 7):\n ops = [\n TealOp(None, Op.int, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.int, cur),\n TealOp(None, Op.int, cur),\n ]\n\n expected = [\n TealOp(None, Op.intcblock, 0, 1, 2, 3),\n TealOp(None, Op.intc_0, \"//\", 0),\n TealOp(None, Op.intc_0, \"//\", 0),\n TealOp(None, Op.intc_1, \"//\", 1),\n TealOp(None, Op.intc_1, \"//\", 1),\n TealOp(None, Op.intc_2, \"//\", 2),\n TealOp(None, Op.intc_2, \"//\", 2),\n TealOp(None, Op.intc_3, \"//\", 3),\n TealOp(None, Op.intc_3, \"//\", 3),\n TealOp(None, Op.pushint, cur, \"//\", cur),\n TealOp(None, Op.pushint, cur, \"//\", cur),\n ]\n\n actual = createConstantBlocks(ops)\n assert actual == expected\n"
},
{
"alpha_fraction": 0.5960952043533325,
"alphanum_fraction": 0.6009762287139893,
"avg_line_length": 31.13725471496582,
"blob_id": "436a19c6ba20f7840599a3d601ed7620bc44cd04",
"content_id": "f2a4a996542dff15d44036eb818b43dbf6b57f9e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1639,
"license_type": "permissive",
"max_line_length": 96,
"num_lines": 51,
"path": "/pyteal/ast/scratchvar.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from ..types import TealType, require_type\nfrom .expr import Expr\nfrom .scratch import ScratchSlot, ScratchLoad\n\n\nclass ScratchVar:\n \"\"\"\n Interface around Scratch space, similiar to get/put local/global state\n\n Example:\n .. code-block:: python\n\n myvar = ScratchVar(TealType.uint64)\n Seq([\n myvar.store(Int(5)),\n Assert(myvar.load() == Int(5))\n ])\n \"\"\"\n\n def __init__(self, type: TealType = TealType.anytype, slotId: int = None):\n \"\"\"Create a new ScratchVar with an optional type.\n\n Args:\n type (optional): The type that this variable can hold. An error will be thrown if an\n expression with an incompatiable type is stored in this variable. Defaults to\n TealType.anytype.\n slotId (optional): A scratch slot id that the compiler must store the value.\n This id may be a Python int in the range [0-256).\n \"\"\"\n self.slot = ScratchSlot(requestedSlotId=slotId)\n self.type = type\n\n def storage_type(self) -> TealType:\n \"\"\"Get the type of expressions that can be stored in this ScratchVar.\"\"\"\n return self.type\n\n def store(self, value: Expr) -> Expr:\n \"\"\"Store value in Scratch Space\n\n Args:\n value: The value to store. Must conform to this ScratchVar's type.\n \"\"\"\n require_type(value.type_of(), self.type)\n return self.slot.store(value)\n\n def load(self) -> ScratchLoad:\n \"\"\"Load value from Scratch Space\"\"\"\n return self.slot.load(self.type)\n\n\nScratchVar.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.5664848685264587,
"alphanum_fraction": 0.6065450310707092,
"avg_line_length": 26.549222946166992,
"blob_id": "968e5d61cd8e37e812c6f85afca8822fd73c24b6",
"content_id": "f5f7c16f7b7c9c9349a99d9ef7f2ae60c0a86184",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5317,
"license_type": "permissive",
"max_line_length": 107,
"num_lines": 193,
"path": "/pyteal/ast/nonce_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_nonce_has_return():\n exprWithReturn = Nonce(\n \"base32\",\n \"7Z5PWO2C6LFNQFGHWKSK5H47IQP5OJW2M3HA2QPXTY3WTNP5NU2MHBW27M\",\n Return(Int(1)),\n )\n assert exprWithReturn.has_return()\n\n exprWithoutReturn = Nonce(\n \"base32\", \"7Z5PWO2C6LFNQFGHWKSK5H47IQP5OJW2M3HA2QPXTY3WTNP5NU2MHBW27M\", Int(1)\n )\n assert not exprWithoutReturn.has_return()\n\n\ndef test_nonce_base32():\n arg = Int(1)\n expr = Nonce(\n \"base32\", \"7Z5PWO2C6LFNQFGHWKSK5H47IQP5OJW2M3HA2QPXTY3WTNP5NU2MHBW27M\", arg\n )\n assert expr.type_of() == TealType.uint64\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n # copying expr from actual.ops[0] and actual.ops[1] because they can't be determined from outside code.\n expected = TealSimpleBlock(\n [\n TealOp(\n actual.ops[0].expr,\n Op.byte,\n \"base32(7Z5PWO2C6LFNQFGHWKSK5H47IQP5OJW2M3HA2QPXTY3WTNP5NU2MHBW27M)\",\n ),\n TealOp(actual.ops[1].expr, Op.pop),\n TealOp(arg, Op.int, 1),\n ]\n )\n\n assert actual == expected\n\n\ndef test_nonce_base32_empty():\n arg = Int(1)\n expr = Nonce(\"base32\", \"\", arg)\n assert expr.type_of() == TealType.uint64\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n # copying expr from actual.ops[0] and actual.ops[1] because they can't be determined from outside code.\n expected = TealSimpleBlock(\n [\n TealOp(actual.ops[0].expr, Op.byte, \"base32()\"),\n TealOp(actual.ops[1].expr, Op.pop),\n TealOp(arg, Op.int, 1),\n ]\n )\n\n assert actual == expected\n\n\ndef test_nonce_base64():\n arg = Txn.sender()\n expr = Nonce(\"base64\", \"Zm9vYmE=\", arg)\n assert expr.type_of() == TealType.bytes\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n # copying expr from actual.ops[0] and actual.ops[1] because they can't be determined from outside code.\n expected = TealSimpleBlock(\n [\n TealOp(actual.ops[0].expr, Op.byte, \"base64(Zm9vYmE=)\"),\n TealOp(actual.ops[1].expr, Op.pop),\n TealOp(arg, Op.txn, \"Sender\"),\n ]\n )\n\n assert actual == expected\n\n\ndef test_nonce_base64_empty():\n arg = Int(1)\n expr = Nonce(\"base64\", \"\", arg)\n assert expr.type_of() == TealType.uint64\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n # copying expr from actual.ops[0] and actual.ops[1] because they can't be determined from outside code.\n expected = TealSimpleBlock(\n [\n TealOp(actual.ops[0].expr, Op.byte, \"base64()\"),\n TealOp(actual.ops[1].expr, Op.pop),\n TealOp(arg, Op.int, 1),\n ]\n )\n\n assert actual == expected\n\n\ndef test_nonce_base16():\n arg = Int(1)\n expr = Nonce(\"base16\", \"A21212EF\", arg)\n assert expr.type_of() == TealType.uint64\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n # copying expr from actual.ops[0] and actual.ops[1] because they can't be determined from outside code.\n expected = TealSimpleBlock(\n [\n TealOp(actual.ops[0].expr, Op.byte, \"0xA21212EF\"),\n TealOp(actual.ops[1].expr, Op.pop),\n TealOp(arg, Op.int, 1),\n ]\n )\n\n assert actual == expected\n\n\ndef test_nonce_base16_prefix():\n arg = Int(1)\n expr = Nonce(\"base16\", \"0xA21212EF\", arg)\n assert expr.type_of() == TealType.uint64\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n # copying expr from actual.ops[0] and actual.ops[1] because they can't be determined from outside code.\n expected = TealSimpleBlock(\n [\n TealOp(actual.ops[0].expr, Op.byte, \"0xA21212EF\"),\n TealOp(actual.ops[1].expr, Op.pop),\n TealOp(arg, Op.int, 1),\n ]\n )\n\n assert actual == expected\n\n\ndef test_nonce_base16_empty():\n arg = Int(6)\n expr = Nonce(\"base16\", \"\", arg)\n assert expr.type_of() == TealType.uint64\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n # copying expr from actual.ops[0] and actual.ops[1] because they can't be determined from outside code.\n expected = TealSimpleBlock(\n [\n TealOp(actual.ops[0].expr, Op.byte, \"0x\"),\n TealOp(actual.ops[1].expr, Op.pop),\n TealOp(arg, Op.int, 6),\n ]\n )\n\n assert actual == expected\n\n\ndef test_nonce_invalid():\n with pytest.raises(TealInputError):\n Nonce(\"base23\", \"\", Int(1))\n\n with pytest.raises(TealInputError):\n Nonce(\"base32\", \"Zm9vYmE=\", Int(1))\n\n with pytest.raises(TealInputError):\n Nonce(\"base64\", \"?????\", Int(1))\n\n with pytest.raises(TealInputError):\n Nonce(\n \"base16\",\n \"7Z5PWO2C6LFNQFGHWKSK5H47IQP5OJW2M3HA2QPXTY3WTNP5NU2MHBW27M\",\n Int(1),\n )\n"
},
{
"alpha_fraction": 0.6524999737739563,
"alphanum_fraction": 0.6558333039283752,
"avg_line_length": 20.428571701049805,
"blob_id": "4c87035e0bfe45c3ebec89b6be94b446f624249d",
"content_id": "5eb372df2e085eac34ca97e7cfa69919b9aa6f3e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1200,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 56,
"path": "/pyteal/ast/tmpl_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_tmpl_int():\n expr = Tmpl.Int(\"TMPL_AMNT\")\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(expr, Op.int, \"TMPL_AMNT\")])\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_tmpl_int_invalid():\n with pytest.raises(TealInputError):\n Tmpl.Int(\"whatever\")\n\n\ndef test_tmpl_bytes():\n expr = Tmpl.Bytes(\"TMPL_NOTE\")\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock([TealOp(expr, Op.byte, \"TMPL_NOTE\")])\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_tmpl_bytes_invalid():\n with pytest.raises(TealInputError):\n Tmpl.Bytes(\"whatever\")\n\n\ndef test_tmpl_addr():\n expr = Tmpl.Addr(\"TMPL_RECEIVER0\")\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock([TealOp(expr, Op.addr, \"TMPL_RECEIVER0\")])\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_tmpl_addr_invalid():\n with pytest.raises(TealInputError):\n Tmpl.Addr(\"whatever\")\n"
},
{
"alpha_fraction": 0.7325077652931213,
"alphanum_fraction": 0.7380805015563965,
"avg_line_length": 43.86111068725586,
"blob_id": "6322b2654395f476d43832cca36891157e05c41e",
"content_id": "ddb1231414b9e7b2dd365b68fc5aa3e6bd8904c6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "reStructuredText",
"length_bytes": 1615,
"license_type": "permissive",
"max_line_length": 105,
"num_lines": 36,
"path": "/docs/scratch.rst",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": ".. _scratch:\n\nScratch Space\n========================\n\n`Scratch space <https://developer.algorand.org/docs/reference/teal/specification/#scratch-space>`_\nis a temporary place to store values for later use in your program. It is temporary because any\nchanges to scratch space do not persist beyond the current tranasaction. Scratch space can be used\nin both Application and Signature mode.\n\nScratch space consists of 256 scratch slots, each capable of storing one integer or byte slice. When\nusing the :any:`ScratchVar` class to work with scratch space, a slot is automatically assigned to\neach variable.\n\nWriting and Reading\n~~~~~~~~~~~~~~~~~~~~~~\n\nTo write to scratch space, first create a :any:`ScratchVar` object and pass in the :any:`TealType`\nof the values that you will store there. It is possible to create a :any:`ScratchVar` that can store\nboth integers and byte slices by passing no arguments to the :any:`ScratchVar` constructor, but note\nthat no type checking takes places in this situation. It is also possible to manually specify which \nslot ID the compiler should assign the scratch slot to in the TEAL code. If no slot ID is specified,\nthe compiler will assign it to any available slot. \n\nTo write or read values, use the corresponding :any:`ScratchVar.store` or :any:`ScratchVar.load` methods.\n\nFor example:\n\n.. code-block:: python\n\n myvar = ScratchVar(TealType.uint64) # assign a scratch slot in any available slot\n program = Seq([\n myvar.store(Int(5)),\n Assert(myvar.load() == Int(5))\n ])\n anotherVar = ScratchVar(TealType.bytes, 4) # assign this scratch slot to slot #4\n"
},
{
"alpha_fraction": 0.7165354490280151,
"alphanum_fraction": 0.7559055089950562,
"avg_line_length": 24.399999618530273,
"blob_id": "2e89cc2548734a99c4c9c8483c45cb3ac55ef6ea",
"content_id": "54b9aee8091726f43913d0ba9d37228319c3af92",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 127,
"license_type": "permissive",
"max_line_length": 46,
"num_lines": 5,
"path": "/pyteal/config.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "# Maximum size of an atomic transaction group.\nMAX_GROUP_SIZE = 16\n\n# Number of scratch space slots available.\nNUM_SLOTS = 256\n"
},
{
"alpha_fraction": 0.6345646381378174,
"alphanum_fraction": 0.6372031569480896,
"avg_line_length": 27.60377311706543,
"blob_id": "ab38607d8a9bb4b9b7deda3afce1d02dabd15c69",
"content_id": "7516a40aad3e4825627a0c2a0b1102c00458da84",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1516,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 53,
"path": "/pyteal/ast/assert_.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import TYPE_CHECKING\n\nfrom ..types import TealType, require_type\nfrom ..ir import TealOp, Op, TealBlock, TealSimpleBlock, TealConditionalBlock\nfrom .expr import Expr\n\nif TYPE_CHECKING:\n from ..compiler import CompileOptions\n\n\nclass Assert(Expr):\n \"\"\"A control flow expression to verify that a condition is true.\"\"\"\n\n def __init__(self, cond: Expr) -> None:\n \"\"\"Create an assert statement that raises an error if the condition is false.\n\n Args:\n cond: The condition to check. Must evaluate to a uint64.\n \"\"\"\n super().__init__()\n require_type(cond.type_of(), TealType.uint64)\n self.cond = cond\n\n def __teal__(self, options: \"CompileOptions\"):\n if options.version >= Op.assert_.min_version:\n # use assert op if available\n return TealBlock.FromOp(options, TealOp(self, Op.assert_), self.cond)\n\n # if assert op is not available, use branches and err\n condStart, condEnd = self.cond.__teal__(options)\n\n end = TealSimpleBlock([])\n errBlock = TealSimpleBlock([TealOp(self, Op.err)])\n\n branchBlock = TealConditionalBlock([])\n branchBlock.setTrueBlock(end)\n branchBlock.setFalseBlock(errBlock)\n\n condEnd.setNextBlock(branchBlock)\n\n return condStart, end\n\n def __str__(self):\n return \"(Assert {})\".format(self.cond)\n\n def type_of(self):\n return TealType.none\n\n def has_return(self):\n return False\n\n\nAssert.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.5891504883766174,
"alphanum_fraction": 0.5902519822120667,
"avg_line_length": 30.716157913208008,
"blob_id": "87f63f03aed7bd638b89cce751e212ec7246e4e9",
"content_id": "5c1a2111e032fdb3d88ece165821e2fa8775f653",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7263,
"license_type": "permissive",
"max_line_length": 122,
"num_lines": 229,
"path": "/pyteal/ast/subroutine.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import Callable, Tuple, List, Optional, cast, TYPE_CHECKING\nfrom inspect import Parameter, signature\nfrom functools import wraps\n\nfrom ..types import TealType, require_type\nfrom ..ir import TealOp, Op, TealBlock\nfrom ..errors import TealInputError, verifyTealVersion\nfrom .expr import Expr\nfrom .seq import Seq\nfrom .scratchvar import ScratchVar\n\nif TYPE_CHECKING:\n from ..ir import TealSimpleBlock\n from ..compiler import CompileOptions\n\n\nclass SubroutineDefinition:\n\n nextSubroutineId = 0\n\n def __init__(\n self, implementation: Callable[..., Expr], returnType: TealType\n ) -> None:\n super().__init__()\n self.id = SubroutineDefinition.nextSubroutineId\n SubroutineDefinition.nextSubroutineId += 1\n\n if not callable(implementation):\n raise TealInputError(\"Input to SubroutineDefinition is not callable\")\n\n sig = signature(implementation)\n\n for name, param in sig.parameters.items():\n if param.kind not in (\n Parameter.POSITIONAL_ONLY,\n Parameter.POSITIONAL_OR_KEYWORD,\n ):\n raise TealInputError(\n \"Function has a parameter type that is not allowed in a subroutine: parameter {} with type {}\".format(\n name, param.kind\n )\n )\n\n if param.default != Parameter.empty:\n raise TealInputError(\n \"Function has a parameter with a default value, which is not allowed in a subroutine: {}\".format(\n name\n )\n )\n\n self.implementation = implementation\n self.implementationParams = sig.parameters\n self.returnType = returnType\n\n self.declaration: Optional[\"SubroutineDeclaration\"] = None\n\n def getDeclaration(self) -> \"SubroutineDeclaration\":\n if self.declaration is None:\n # lazy evaluate subroutine\n self.declaration = evaluateSubroutine(self)\n return self.declaration\n\n def name(self) -> str:\n return self.implementation.__name__\n\n def argumentCount(self) -> int:\n return len(self.implementationParams)\n\n def invoke(self, args: List[Expr]) -> \"SubroutineCall\":\n if len(args) != self.argumentCount():\n raise TealInputError(\n \"Incorrect number of arguments for subroutine call. Expected {} arguments, got {}\".format(\n self.argumentCount(), len(args)\n )\n )\n\n for i, arg in enumerate(args):\n if not isinstance(arg, Expr):\n raise TealInputError(\n \"Argument at index {} of subroutine call is not a PyTeal expression: {}\".format(\n i, arg\n )\n )\n\n return SubroutineCall(self, args)\n\n def __str__(self):\n return \"subroutine#{}\".format(self.id)\n\n def __eq__(self, other):\n if isinstance(other, SubroutineDefinition):\n return self.id == other.id and self.implementation == other.implementation\n return False\n\n def __hash__(self):\n return hash(self.id)\n\n\nSubroutineDefinition.__module__ = \"pyteal\"\n\n\nclass SubroutineDeclaration(Expr):\n def __init__(self, subroutine: SubroutineDefinition, body: Expr) -> None:\n super().__init__()\n self.subroutine = subroutine\n self.body = body\n\n def __teal__(self, options: \"CompileOptions\"):\n return self.body.__teal__(options)\n\n def __str__(self):\n return '(SubroutineDeclaration \"{}\" {})'.format(\n self.subroutine.name(), self.body\n )\n\n def type_of(self):\n return self.body.type_of()\n\n def has_return(self):\n return self.body.has_return()\n\n\nSubroutineDeclaration.__module__ = \"pyteal\"\n\n\nclass SubroutineCall(Expr):\n def __init__(self, subroutine: SubroutineDefinition, args: List[Expr]) -> None:\n super().__init__()\n self.subroutine = subroutine\n self.args = args\n\n for i, arg in enumerate(args):\n if arg.type_of() == TealType.none:\n raise TealInputError(\n \"Subroutine argument at index {} evaluates to TealType.none\".format(\n i\n )\n )\n\n def __teal__(self, options: \"CompileOptions\"):\n verifyTealVersion(\n Op.callsub.min_version,\n options.version,\n \"TEAL version too low to use SubroutineCall expression\",\n )\n\n op = TealOp(self, Op.callsub, self.subroutine)\n return TealBlock.FromOp(options, op, *self.args)\n\n def __str__(self):\n ret_str = '(SubroutineCall \"' + self.subroutine.name() + '\" ('\n for a in self.args:\n ret_str += \" \" + a.__str__()\n ret_str += \"))\"\n return ret_str\n\n def type_of(self):\n return self.subroutine.returnType\n\n def has_return(self):\n return False\n\n\nSubroutineCall.__module__ = \"pyteal\"\n\n\nclass Subroutine:\n \"\"\"Used to create a PyTeal subroutine from a Python function.\n\n This class is meant to be used as a function decorator. For example:\n\n .. code-block:: python\n\n @Subroutine(TealType.uint64)\n def mySubroutine(a: Expr, b: Expr) -> Expr:\n return a + b\n\n program = Seq([\n App.globalPut(Bytes(\"key\"), mySubroutine(Int(1), Int(2))),\n Approve(),\n ])\n \"\"\"\n\n def __init__(self, returnType: TealType) -> None:\n \"\"\"Define a new subroutine with the given return type.\n\n Args:\n returnType: The type that the return value of this subroutine must conform to.\n TealType.none indicates that this subroutine does not return any value.\n \"\"\"\n self.returnType = returnType\n\n def __call__(self, fnImplementation: Callable[..., Expr]) -> Callable[..., Expr]:\n subroutine = SubroutineDefinition(fnImplementation, self.returnType)\n\n @wraps(fnImplementation)\n def subroutineCall(*args: Expr, **kwargs) -> Expr:\n if len(kwargs) != 0:\n raise TealInputError(\n \"Subroutine cannot be called with keyword arguments. Received keyword arguments: {}\".format(\n \",\".join(kwargs.keys())\n )\n )\n return subroutine.invoke(list(args))\n\n return subroutineCall\n\n\nSubroutine.__module__ = \"pyteal\"\n\n\ndef evaluateSubroutine(subroutine: SubroutineDefinition) -> SubroutineDeclaration:\n argumentVars = [ScratchVar() for _ in range(subroutine.argumentCount())]\n loadedArgs = [var.load() for var in argumentVars]\n\n subroutineBody = subroutine.implementation(*loadedArgs)\n\n if not isinstance(subroutineBody, Expr):\n raise TealInputError(\n \"Subroutine function does not return a PyTeal expression. Got type {}\".format(\n type(subroutineBody)\n )\n )\n\n # need to reverse order of argumentVars because the last argument will be on top of the stack\n bodyOps = [var.slot.store() for var in argumentVars[::-1]]\n bodyOps.append(subroutineBody)\n\n return SubroutineDeclaration(subroutine, Seq(bodyOps))\n"
},
{
"alpha_fraction": 0.6266456246376038,
"alphanum_fraction": 0.6276987791061401,
"avg_line_length": 29.14285659790039,
"blob_id": "b210c9963ad357085f6fffb81ce195144e3da403",
"content_id": "b311dc44861153bac50d84a446e4727f4f44dbca",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1899,
"license_type": "permissive",
"max_line_length": 96,
"num_lines": 63,
"path": "/pyteal/ast/tmpl.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import TYPE_CHECKING\n\nfrom ..types import TealType, valid_tmpl\nfrom ..ir import TealOp, Op, TealBlock\nfrom ..errors import TealInternalError\nfrom .leafexpr import LeafExpr\n\nif TYPE_CHECKING:\n from ..compiler import CompileOptions\n\n\nclass Tmpl(LeafExpr):\n \"\"\"Template expression for creating placeholder values.\"\"\"\n\n def __init__(self, op: Op, type: TealType, name: str) -> None:\n super().__init__()\n valid_tmpl(name)\n self.op = op\n self.type = type\n self.name = name\n\n def __str__(self):\n return \"(Tmpl {} {})\".format(self.op, self.name)\n\n def __teal__(self, options: \"CompileOptions\"):\n op = TealOp(self, self.op, self.name)\n return TealBlock.FromOp(options, op)\n\n def type_of(self):\n return self.type\n\n @classmethod\n def Int(cls, placeholder: str):\n \"\"\"Create a new Int template.\n\n Args:\n placeholder: The name to use for this template variable. Must start with `TMPL_` and\n only consist of uppercase alphanumeric characters and underscores.\n \"\"\"\n return cls(Op.int, TealType.uint64, placeholder)\n\n @classmethod\n def Bytes(cls, placeholder: str):\n \"\"\"Create a new Bytes template.\n\n Args:\n placeholder: The name to use for this template variable. Must start with `TMPL_` and\n only consist of uppercase alphanumeric characters and underscores.\n \"\"\"\n return cls(Op.byte, TealType.bytes, placeholder)\n\n @classmethod\n def Addr(cls, placeholder: str):\n \"\"\"Create a new Addr template.\n\n Args:\n placeholder: The name to use for this template variable. Must start with `TMPL_` and\n only consist of uppercase alphanumeric characters and underscores.\n \"\"\"\n return cls(Op.addr, TealType.bytes, placeholder)\n\n\nTmpl.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.5448718070983887,
"alphanum_fraction": 0.6499999761581421,
"avg_line_length": 25,
"blob_id": "f7077148b330ee8fb50de43b00649f8e009de0e8",
"content_id": "66d8b302f78c082a130594a7b1311648681d1ad0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 780,
"license_type": "permissive",
"max_line_length": 77,
"num_lines": 30,
"path": "/pyteal/ast/addr_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n\ndef test_addr():\n expr = Addr(\"NJUWK3DJNZTWU2LFNRUW4Z3KNFSWY2LOM5VGSZLMNFXGO2TJMVWGS3THMF\")\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock(\n [\n TealOp(\n expr,\n Op.addr,\n \"NJUWK3DJNZTWU2LFNRUW4Z3KNFSWY2LOM5VGSZLMNFXGO2TJMVWGS3THMF\",\n )\n ]\n )\n actual, _ = expr.__teal__(CompileOptions())\n assert actual == expected\n\n\ndef test_addr_invalid():\n with pytest.raises(TealInputError):\n Addr(\"NJUWK3DJNZTWU2LFNRUW4Z3KNFSWY2LOM5VGSZLMNFXGO2TJMVWGS3TH\")\n\n with pytest.raises(TealInputError):\n Addr(\"000000000000000000000000000000000000000000000000000000000\")\n\n with pytest.raises(TealInputError):\n Addr(2)\n"
},
{
"alpha_fraction": 0.6783018708229065,
"alphanum_fraction": 0.6807547211647034,
"avg_line_length": 38.84962463378906,
"blob_id": "27ca05eaaedc702d3329af656677ab708cc81d08",
"content_id": "04fd781dda764630e053cad43ac66afaa3eb44bc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5300,
"license_type": "permissive",
"max_line_length": 100,
"num_lines": 133,
"path": "/pyteal/compiler/scratchslots.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import Tuple, List, Set, Dict, Optional, cast\n\nfrom ..ast import ScratchSlot, SubroutineDefinition\nfrom ..ir import Mode, TealComponent, TealBlock\nfrom ..errors import TealInputError, TealInternalError\nfrom ..config import NUM_SLOTS\n\n\ndef collectScratchSlots(\n subroutineMapping: Dict[Optional[SubroutineDefinition], List[TealComponent]]\n) -> Dict[Optional[SubroutineDefinition], Set[ScratchSlot]]:\n \"\"\"Find and return all referenced ScratchSlots for each subroutine.\n\n Args:\n subroutineMapping: A mapping from subroutine to the list of TealComponent which make up that\n subroutine. The key None is taken to mean the main program routine.\n\n Returns:\n A dictionary whose keys are the same as subroutineMapping, and whose values are sets of\n ScratchSlots referenced by each subroutine.\n \"\"\"\n\n subroutineSlots: Dict[Optional[SubroutineDefinition], Set[ScratchSlot]] = dict()\n\n for subroutine, ops in subroutineMapping.items():\n slots: Set[ScratchSlot] = set()\n for stmt in ops:\n for slot in stmt.getSlots():\n slots.add(slot)\n subroutineSlots[subroutine] = slots\n\n return subroutineSlots\n\n\ndef assignScratchSlotsToSubroutines(\n subroutineMapping: Dict[Optional[SubroutineDefinition], List[TealComponent]],\n subroutineBlocks: Dict[Optional[SubroutineDefinition], TealBlock],\n) -> Dict[Optional[SubroutineDefinition], Set[int]]:\n \"\"\"Assign scratch slot values for an entire program.\n\n Args:\n subroutineMapping: A mapping from subroutine to the list of TealComponent which make up that\n subroutine. The key None is taken to mean the main program routine. The values of this\n map will be modified in order to assign specific slot values to all referenced scratch\n slots.\n\n Raises:\n TealInternalError: if the scratch slots referenced by the program do not fit into 256 slots,\n or if multiple ScratchSlots request the same slot ID.\n\n Returns:\n A dictionary whose keys are the same as subroutineMapping, and whose values are sets of\n integers representing the assigned IDs of slots which appear only in that subroutine\n (subroutine local slots).\n \"\"\"\n subroutineSlots = collectScratchSlots(subroutineMapping)\n\n # all scratch slots referenced by the program\n allSlots: Set[ScratchSlot] = cast(Set[ScratchSlot], set()).union(\n *subroutineSlots.values()\n )\n\n # all scratch slots referenced by more than 1 subroutine\n globalSlots: Set[ScratchSlot] = set()\n\n # all scratch slots referenced by only 1 subroutine\n localSlots: Dict[Optional[SubroutineDefinition], Set[ScratchSlot]] = dict()\n\n for subroutine, slots in subroutineSlots.items():\n allOtherSlots: Set[ScratchSlot] = set()\n\n for otherSubroutine, otherSubroutineSlots in subroutineSlots.items():\n if subroutine is not otherSubroutine:\n allOtherSlots |= otherSubroutineSlots\n\n globalSlots |= slots & allOtherSlots\n localSlots[subroutine] = slots - globalSlots\n\n if len(allSlots) > NUM_SLOTS:\n # TODO: identify which slots can be reused\n # subroutines which never invoke each other can use the same slot ID for local slots\n raise TealInternalError(\n \"Too many slots in use: {}, maximum is {}\".format(len(allSlots), NUM_SLOTS)\n )\n\n # verify that all local slots are assigned to before being loaded.\n # TODO: for simplicity, the current implementation does not perform this check with global slots\n # as well, but that would be a good improvement\n for subroutine, start in subroutineBlocks.items():\n errors = start.validateSlots(slotsInUse=globalSlots)\n if len(errors) > 0:\n msg = \"Encountered {} error{} when assigning slots to subroutine\".format(\n len(errors), \"s\" if len(errors) != 1 else \"\"\n )\n raise TealInternalError(msg) from errors[0]\n\n slotAssignments: Dict[ScratchSlot, int] = dict()\n slotIds: Set[int] = set()\n\n for slot in allSlots:\n if not slot.isReservedSlot:\n continue\n\n # If there are two unique slots with same IDs, raise an error\n if slot.id in slotIds:\n raise TealInternalError(\n \"Slot ID {} has been assigned multiple times\".format(slot.id)\n )\n slotIds.add(slot.id)\n\n nextSlotIndex = 0\n for slot in sorted(allSlots, key=lambda slot: slot.id):\n # Find next vacant slot that compiler can assign to\n while nextSlotIndex in slotIds:\n nextSlotIndex += 1\n\n if slot.isReservedSlot:\n # Slot ids under 256 are manually reserved slots\n slotAssignments[slot] = slot.id\n else:\n slotAssignments[slot] = nextSlotIndex\n slotIds.add(nextSlotIndex)\n\n for ops in subroutineMapping.values():\n for stmt in ops:\n for slot in stmt.getSlots():\n stmt.assignSlot(slot, slotAssignments[slot])\n\n assignedLocalSlots: Dict[Optional[SubroutineDefinition], Set[int]] = dict()\n for subroutine, slots in localSlots.items():\n assignedLocalSlots[subroutine] = set(slotAssignments[slot] for slot in slots)\n\n return assignedLocalSlots\n"
},
{
"alpha_fraction": 0.6593614816665649,
"alphanum_fraction": 0.6693723201751709,
"avg_line_length": 23.476821899414062,
"blob_id": "5c7dab65c7cc9b841245d0f39078bb80b3b0dcd3",
"content_id": "0654ec6bd97511b167ae7a078a504047c439a44d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3696,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 151,
"path": "/pyteal/ast/global_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\nteal2Options = CompileOptions(version=2)\nteal3Options = CompileOptions(version=3)\nteal5Options = CompileOptions(version=5)\n\n\ndef test_global_min_txn_fee():\n expr = Global.min_txn_fee()\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(expr, Op.global_, \"MinTxnFee\")])\n\n actual, _ = expr.__teal__(teal2Options)\n\n assert actual == expected\n\n\ndef test_global_min_balance():\n expr = Global.min_balance()\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(expr, Op.global_, \"MinBalance\")])\n\n actual, _ = expr.__teal__(teal2Options)\n\n assert actual == expected\n\n\ndef test_global_max_txn_life():\n expr = Global.max_txn_life()\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(expr, Op.global_, \"MaxTxnLife\")])\n\n actual, _ = expr.__teal__(teal2Options)\n\n assert actual == expected\n\n\ndef test_global_zero_address():\n expr = Global.zero_address()\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock([TealOp(expr, Op.global_, \"ZeroAddress\")])\n\n actual, _ = expr.__teal__(teal2Options)\n\n assert actual == expected\n\n\ndef test_global_group_size():\n expr = Global.group_size()\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(expr, Op.global_, \"GroupSize\")])\n\n actual, _ = expr.__teal__(teal2Options)\n\n assert actual == expected\n\n\ndef test_global_logic_sig_version():\n expr = Global.logic_sig_version()\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(expr, Op.global_, \"LogicSigVersion\")])\n\n actual, _ = expr.__teal__(teal2Options)\n\n assert actual == expected\n\n\ndef test_global_round():\n expr = Global.round()\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(expr, Op.global_, \"Round\")])\n\n actual, _ = expr.__teal__(teal2Options)\n\n assert actual == expected\n\n\ndef test_global_latest_timestamp():\n expr = Global.latest_timestamp()\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(expr, Op.global_, \"LatestTimestamp\")])\n\n actual, _ = expr.__teal__(teal2Options)\n\n assert actual == expected\n\n\ndef test_global_current_application_id():\n expr = Global.current_application_id()\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(expr, Op.global_, \"CurrentApplicationID\")])\n\n actual, _ = expr.__teal__(teal2Options)\n\n assert actual == expected\n\n\ndef test_global_creator_address():\n expr = Global.creator_address()\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock([TealOp(expr, Op.global_, \"CreatorAddress\")])\n\n actual, _ = expr.__teal__(teal3Options)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal2Options)\n\n\ndef test_global_current_application_address():\n expr = Global.current_application_address()\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock([TealOp(expr, Op.global_, \"CurrentApplicationAddress\")])\n\n actual, _ = expr.__teal__(teal5Options)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_global_group_id():\n expr = Global.group_id()\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock([TealOp(expr, Op.global_, \"GroupID\")])\n\n actual, _ = expr.__teal__(teal5Options)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n"
},
{
"alpha_fraction": 0.5725451111793518,
"alphanum_fraction": 0.6034067869186401,
"avg_line_length": 28.011627197265625,
"blob_id": "298c631fd38f8114c2efa75e8e14dc06f28fd190",
"content_id": "3f0bb49f471d14fc9197aecb29de99b6e171f787",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4993,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 172,
"path": "/pyteal/ast/bytes_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_bytes_base32_no_padding():\n for s in (\n \"ME\",\n \"MFRA\",\n \"MFRGG\",\n \"MFRGGZA\",\n \"MFRGGZDF\",\n \"7Z5PWO2C6LFNQFGHWKSK5H47IQP5OJW2M3HA2QPXTY3WTNP5NU2MHBW27M\",\n ):\n expr = Bytes(\"base32\", s)\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock([TealOp(expr, Op.byte, \"base32(\" + s + \")\")])\n actual, _ = expr.__teal__(options)\n assert actual == expected\n\n\ndef test_bytes_base32_padding():\n for s in (\n \"ME======\",\n \"MFRA====\",\n \"MFRGG===\",\n \"MFRGGZA=\",\n \"7Z5PWO2C6LFNQFGHWKSK5H47IQP5OJW2M3HA2QPXTY3WTNP5NU2MHBW27M======\",\n ):\n expr = Bytes(\"base32\", s)\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock([TealOp(expr, Op.byte, \"base32(\" + s + \")\")])\n actual, _ = expr.__teal__(options)\n assert actual == expected\n\n\ndef test_bytes_base32_empty():\n expr = Bytes(\"base32\", \"\")\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock([TealOp(expr, Op.byte, \"base32()\")])\n actual, _ = expr.__teal__(options)\n assert actual == expected\n\n\ndef test_bytes_base64():\n expr = Bytes(\"base64\", \"Zm9vYmE=\")\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock([TealOp(expr, Op.byte, \"base64(Zm9vYmE=)\")])\n actual, _ = expr.__teal__(options)\n assert actual == expected\n\n\ndef test_bytes_base64_empty():\n expr = Bytes(\"base64\", \"\")\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock([TealOp(expr, Op.byte, \"base64()\")])\n actual, _ = expr.__teal__(options)\n assert actual == expected\n\n\ndef test_bytes_base16():\n expr = Bytes(\"base16\", \"A21212EF\")\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock([TealOp(expr, Op.byte, \"0xA21212EF\")])\n actual, _ = expr.__teal__(options)\n assert actual == expected\n\n\ndef test_bytes_base16_prefix():\n expr = Bytes(\"base16\", \"0xA21212EF\")\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock([TealOp(expr, Op.byte, \"0xA21212EF\")])\n actual, _ = expr.__teal__(options)\n assert actual == expected\n\n\ndef test_bytes_base16_empty():\n expr = Bytes(\"base16\", \"\")\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock([TealOp(expr, Op.byte, \"0x\")])\n actual, _ = expr.__teal__(options)\n assert actual == expected\n\n\ndef test_bytes_utf8():\n expr = Bytes(\"hello world\")\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock([TealOp(expr, Op.byte, '\"hello world\"')])\n actual, _ = expr.__teal__(options)\n assert actual == expected\n\n\ndef test_bytes_utf8_special_chars():\n expr = Bytes(\"\\t \\n \\r\\n \\\\ \\\" ' 😀\")\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock(\n [TealOp(expr, Op.byte, '\"\\\\t \\\\n \\\\r\\\\n \\\\\\\\ \\\\\" \\' \\\\xf0\\\\x9f\\\\x98\\\\x80\"')]\n )\n actual, _ = expr.__teal__(options)\n assert actual == expected\n\n\ndef test_bytes_utf8_empty():\n expr = Bytes(\"\")\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock([TealOp(expr, Op.byte, '\"\"')])\n actual, _ = expr.__teal__(options)\n assert actual == expected\n\n\ndef test_bytes_raw():\n for value in (b\"hello world\", bytearray(b\"hello world\")):\n expr = Bytes(value)\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock([TealOp(expr, Op.byte, \"0x\" + value.hex())])\n actual, _ = expr.__teal__(options)\n assert actual == expected\n\n\ndef test_bytes_raw_empty():\n for value in (b\"\", bytearray(b\"\")):\n expr = Bytes(value)\n assert expr.type_of() == TealType.bytes\n expected = TealSimpleBlock([TealOp(expr, Op.byte, \"0x\")])\n actual, _ = expr.__teal__(options)\n assert actual == expected\n\n\ndef test_bytes_invalid():\n with pytest.raises(TealInputError):\n Bytes(\"base16\", b\"FF\")\n\n with pytest.raises(TealInputError):\n Bytes(b\"base16\", \"FF\")\n\n with pytest.raises(TealInputError):\n Bytes(\"base23\", \"\")\n\n with pytest.raises(TealInputError):\n Bytes(\"base32\", \"Zm9vYmE=\")\n\n with pytest.raises(TealInputError):\n Bytes(\"base32\", \"MFRGG====\")\n\n with pytest.raises(TealInputError):\n Bytes(\"base32\", \"MFRGG==\")\n\n with pytest.raises(TealInputError):\n Bytes(\"base32\", \"CCCCCC==\")\n\n with pytest.raises(TealInputError):\n Bytes(\"base32\", \"CCCCCC\")\n\n with pytest.raises(TealInputError):\n Bytes(\"base32\", \"C=======\")\n\n with pytest.raises(TealInputError):\n Bytes(\"base32\", \"C\")\n\n with pytest.raises(TealInputError):\n Bytes(\"base32\", \"=\")\n\n with pytest.raises(TealInputError):\n Bytes(\"base64\", \"?????\")\n\n with pytest.raises(TealInputError):\n Bytes(\"base16\", \"7Z5PWO2C6LFNQFGHWKSK5H47IQP5OJW2M3HA2QPXTY3WTNP5NU2MHBW27M\")\n"
},
{
"alpha_fraction": 0.5639768242835999,
"alphanum_fraction": 0.573896586894989,
"avg_line_length": 32.98484802246094,
"blob_id": "5e6563bec92f7e70c6416a27c6dc3e0e4ff8bd85",
"content_id": "3b8aefc30574376b8e5c424954cfa6112705a009",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8972,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 264,
"path": "/pyteal/ast/asset.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from enum import Enum\n\nfrom ..types import TealType, require_type\nfrom ..ir import Op\nfrom .expr import Expr\nfrom .leafexpr import LeafExpr\nfrom .maybe import MaybeValue\n\n\nclass AssetHolding:\n @classmethod\n def balance(cls, account: Expr, asset: Expr) -> MaybeValue:\n \"\"\"Get the amount of an asset held by an account.\n\n Args:\n account: An index into Txn.Accounts that corresponds to the account to check,\n must be evaluated to uint64 (or, since v4, an account address that appears in\n Txn.Accounts or is Txn.Sender, must be evaluated to bytes).\n asset: The ID of the asset to get, must be evaluated to uint64 (or, since v4,\n a Txn.ForeignAssets offset).\n \"\"\"\n require_type(account.type_of(), TealType.anytype)\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_holding_get,\n TealType.uint64,\n immediate_args=[\"AssetBalance\"],\n args=[account, asset],\n )\n\n @classmethod\n def frozen(cls, account: Expr, asset: Expr) -> MaybeValue:\n \"\"\"Check if an asset is frozen for an account.\n\n A value of 1 indicates frozen and 0 indicates not frozen.\n\n Args:\n account: An index into Txn.Accounts that corresponds to the account to check,\n must be evaluated to uint64 (or, since v4, an account address that appears in\n Txn.Accounts or is Txn.Sender, must be evaluated to bytes).\n asset: The ID of the asset to get, must be evaluated to uint64 (or, since v4,\n a Txn.ForeignAssets offset).\n \"\"\"\n require_type(account.type_of(), TealType.anytype)\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_holding_get,\n TealType.uint64,\n immediate_args=[\"AssetFrozen\"],\n args=[account, asset],\n )\n\n\nAssetHolding.__module__ = \"pyteal\"\n\n\nclass AssetParam:\n @classmethod\n def total(cls, asset: Expr) -> MaybeValue:\n \"\"\"Get the total number of units of an asset.\n\n Args:\n asset: An index into Txn.ForeignAssets that corresponds to the asset to check,\n must be evaluated to uint64 (or since v4, an asset ID that appears in\n Txn.ForeignAssets).\n \"\"\"\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_params_get,\n TealType.uint64,\n immediate_args=[\"AssetTotal\"],\n args=[asset],\n )\n\n @classmethod\n def decimals(cls, asset: Expr) -> MaybeValue:\n \"\"\"Get the number of decimals for an asset.\n\n Args:\n asset: An index into Txn.ForeignAssets that corresponds to the asset to check,\n must be evaluated to uint64 (or since v4, an asset ID that appears in\n Txn.ForeignAssets).\n \"\"\"\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_params_get,\n TealType.uint64,\n immediate_args=[\"AssetDecimals\"],\n args=[asset],\n )\n\n @classmethod\n def defaultFrozen(cls, asset: Expr) -> MaybeValue:\n \"\"\"Check if an asset is frozen by default.\n\n Args:\n asset: An index into Txn.ForeignAssets that corresponds to the asset to check,\n must be evaluated to uint64 (or since v4, an asset ID that appears in\n Txn.ForeignAssets).\n \"\"\"\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_params_get,\n TealType.uint64,\n immediate_args=[\"AssetDefaultFrozen\"],\n args=[asset],\n )\n\n @classmethod\n def unitName(cls, asset: Expr) -> MaybeValue:\n \"\"\"Get the unit name of an asset.\n\n Args:\n asset: An index into Txn.ForeignAssets that corresponds to the asset to check,\n must be evaluated to uint64 (or since v4, an asset ID that appears in\n Txn.ForeignAssets).\n \"\"\"\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_params_get,\n TealType.bytes,\n immediate_args=[\"AssetUnitName\"],\n args=[asset],\n )\n\n @classmethod\n def name(cls, asset: Expr) -> MaybeValue:\n \"\"\"Get the name of an asset.\n\n Args:\n asset: An index into Txn.ForeignAssets that corresponds to the asset to check,\n must be evaluated to uint64 (or since v4, an asset ID that appears in\n Txn.ForeignAssets).\n \"\"\"\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_params_get,\n TealType.bytes,\n immediate_args=[\"AssetName\"],\n args=[asset],\n )\n\n @classmethod\n def url(cls, asset: Expr) -> MaybeValue:\n \"\"\"Get the URL of an asset.\n\n Args:\n asset: An index into Txn.ForeignAssets that corresponds to the asset to check,\n must be evaluated to uint64 (or since v4, an asset ID that appears in\n Txn.ForeignAssets).\n \"\"\"\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_params_get,\n TealType.bytes,\n immediate_args=[\"AssetURL\"],\n args=[asset],\n )\n\n @classmethod\n def metadataHash(cls, asset: Expr) -> MaybeValue:\n \"\"\"Get the arbitrary commitment for an asset.\n\n If set, this will be 32 bytes long.\n\n Args:\n asset: An index into Txn.ForeignAssets that corresponds to the asset to check,\n must be evaluated to uint64 (or since v4, an asset ID that appears in\n Txn.ForeignAssets).\n \"\"\"\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_params_get,\n TealType.bytes,\n immediate_args=[\"AssetMetadataHash\"],\n args=[asset],\n )\n\n @classmethod\n def manager(cls, asset: Expr) -> MaybeValue:\n \"\"\"Get the manager address for an asset.\n\n Args:\n asset: An index into Txn.ForeignAssets that corresponds to the asset to check,\n must be evaluated to uint64 (or since v4, an asset ID that appears in\n Txn.ForeignAssets).\n \"\"\"\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_params_get,\n TealType.bytes,\n immediate_args=[\"AssetManager\"],\n args=[asset],\n )\n\n @classmethod\n def reserve(cls, asset: Expr) -> MaybeValue:\n \"\"\"Get the reserve address for an asset.\n\n Args:\n asset: An index into Txn.ForeignAssets that corresponds to the asset to check,\n must be evaluated to uint64 (or since v4, an asset ID that appears in\n Txn.ForeignAssets).\n \"\"\"\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_params_get,\n TealType.bytes,\n immediate_args=[\"AssetReserve\"],\n args=[asset],\n )\n\n @classmethod\n def freeze(cls, asset: Expr) -> MaybeValue:\n \"\"\"Get the freeze address for an asset.\n\n Args:\n asset: An index into Txn.ForeignAssets that corresponds to the asset to check,\n must be evaluated to uint64 (or since v4, an asset ID that appears in\n Txn.ForeignAssets).\n \"\"\"\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_params_get,\n TealType.bytes,\n immediate_args=[\"AssetFreeze\"],\n args=[asset],\n )\n\n @classmethod\n def clawback(cls, asset: Expr) -> MaybeValue:\n \"\"\"Get the clawback address for an asset.\n\n Args:\n asset: An index into Txn.ForeignAssets that corresponds to the asset to check,\n must be evaluated to uint64 (or since v4, an asset ID that appears in\n Txn.ForeignAssets).\n \"\"\"\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_params_get,\n TealType.bytes,\n immediate_args=[\"AssetClawback\"],\n args=[asset],\n )\n\n @classmethod\n def creator(cls, asset: Expr) -> MaybeValue:\n \"\"\"Get the creator address for an asset.\n\n Args:\n asset: An index into Txn.ForeignAssets that corresponds to the asset to check. Must\n evaluate to uint64.\n \"\"\"\n require_type(asset.type_of(), TealType.uint64)\n return MaybeValue(\n Op.asset_params_get,\n TealType.bytes,\n immediate_args=[\"AssetCreator\"],\n args=[asset],\n )\n\n\nAssetParam.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.5685504674911499,
"alphanum_fraction": 0.593406617641449,
"avg_line_length": 26.010601043701172,
"blob_id": "cac44b16767ddc728dcadee1b3e335e22e2f603c",
"content_id": "c532f299f15ebd324fae997870e5e72bb82b19aa",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7644,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 283,
"path": "/pyteal/ast/substring_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\nteal2Options = CompileOptions(version=2)\nteal3Options = CompileOptions(version=3)\nteal4Options = CompileOptions(version=4)\nteal5Options = CompileOptions(version=5)\n\n\ndef test_substring_immediate_v2():\n args = [Bytes(\"my string\"), Int(0), Int(2)]\n expr = Substring(args[0], args[1], args[2])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"my string\"'),\n TealOp(expr, Op.substring, 0, 2),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_substring_immediate_v5():\n args = [Bytes(\"my string\"), Int(1), Int(2)]\n expr = Substring(args[0], args[1], args[2])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"my string\"'),\n TealOp(expr, Op.extract, 1, 1),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_substring_to_extract():\n my_string = \"a\" * 257\n args = [Bytes(my_string), Int(255), Int(257)]\n expr = Substring(args[0], args[1], args[2])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"{my_string}\"'.format(my_string=my_string)),\n TealOp(expr, Op.extract, 255, 2),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_substring_stack_v2():\n my_string = \"a\" * 257\n args = [Bytes(my_string), Int(256), Int(257)]\n expr = Substring(args[0], args[1], args[2])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"{my_string}\"'.format(my_string=my_string)),\n TealOp(args[1], Op.int, 256),\n TealOp(args[2], Op.int, 257),\n TealOp(expr, Op.substring3),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_substring_stack_v5():\n my_string = \"a\" * 257\n args = [Bytes(my_string), Int(256), Int(257)]\n expr = Substring(args[0], args[1], args[2])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"{my_string}\"'.format(my_string=my_string)),\n TealOp(args[1], Op.int, 256),\n TealOp(Int(1), Op.int, 1),\n TealOp(expr, Op.extract3),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_zero_length_substring_immediate():\n my_string = \"a\" * 257\n args = [Bytes(my_string), Int(1), Int(1)]\n expr = Substring(args[0], args[1], args[2])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"{my_string}\"'.format(my_string=my_string)),\n TealOp(expr, Op.substring, 1, 1),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_substring_invalid():\n with pytest.raises(TealTypeError):\n Substring(Int(0), Int(0), Int(2))\n\n with pytest.raises(TealTypeError):\n Substring(Bytes(\"my string\"), Txn.sender(), Int(2))\n\n with pytest.raises(TealTypeError):\n Substring(Bytes(\"my string\"), Int(0), Txn.sender())\n\n with pytest.raises(Exception):\n Substring(Bytes(\"my string\"), Int(1), Int(0)).__teal__(teal5Options)\n\n\ndef test_extract_immediate():\n args = [Bytes(\"my string\"), Int(0), Int(2)]\n expr = Extract(args[0], args[1], args[2])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"my string\"'),\n TealOp(expr, Op.extract, 0, 2),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal4Options)\n\n\ndef test_extract_zero():\n args = [Bytes(\"my string\"), Int(1), Int(0)]\n expr = Extract(args[0], args[1], args[2])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"my string\"'),\n TealOp(args[1], Op.int, 1),\n TealOp(args[2], Op.int, 0),\n TealOp(expr, Op.extract3),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal4Options)\n\n\ndef test_extract_stack():\n my_string = \"*\" * 257\n args = [Bytes(my_string), Int(256), Int(257)]\n expr = Extract(args[0], args[1], args[2])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"{my_string}\"'.format(my_string=my_string)),\n TealOp(args[1], Op.int, 256),\n TealOp(args[2], Op.int, 257),\n TealOp(expr, Op.extract3),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal4Options)\n\n\ndef test_extract_invalid():\n with pytest.raises(TealTypeError):\n Extract(Int(0), Int(0), Int(2))\n\n with pytest.raises(TealTypeError):\n Extract(Bytes(\"my string\"), Txn.sender(), Int(2))\n\n with pytest.raises(TealTypeError):\n Extract(Bytes(\"my string\"), Int(0), Txn.sender())\n\n\ndef test_suffix_immediate():\n args = [Bytes(\"my string\"), Int(1)]\n expr = Suffix(args[0], args[1])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"my string\"'),\n TealOp(expr, Op.extract, 1, 0),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_suffix_stack():\n my_string = \"*\" * 1024\n args = [Bytes(my_string), Int(257)]\n expr = Suffix(args[0], args[1])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"{my_string}\"'.format(my_string=my_string)),\n TealOp(args[1], Op.int, 257),\n TealOp(expr, Op.dig, 1),\n TealOp(expr, Op.len),\n TealOp(expr, Op.substring3),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_suffix_invalid():\n with pytest.raises(TealTypeError):\n Suffix(Int(0), Int(0))\n\n with pytest.raises(TealTypeError):\n Suffix(Bytes(\"my string\"), Txn.sender())\n"
},
{
"alpha_fraction": 0.5286855697631836,
"alphanum_fraction": 0.5370370149612427,
"avg_line_length": 16.1055908203125,
"blob_id": "8909019dfa2e93f8f1d5b934e94b3902b55f7235",
"content_id": "242eed8012c93618b36c012bfc48fba0ca310c50",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2754,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 161,
"path": "/pyteal/__init__.pyi",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "## File generated from scripts/generate_init.py.\n## DO NOT EDIT DIRECTLY\n\nfrom .ast import *\nfrom .ast import __all__ as ast_all\nfrom .ir import *\nfrom .ir import __all__ as ir_all\nfrom .compiler import (\n MAX_TEAL_VERSION,\n MIN_TEAL_VERSION,\n DEFAULT_TEAL_VERSION,\n CompileOptions,\n compileTeal,\n)\nfrom .types import TealType\nfrom .errors import TealInternalError, TealTypeError, TealInputError, TealCompileError\nfrom .config import MAX_GROUP_SIZE, NUM_SLOTS\n\n__all__ = [\n \"Expr\",\n \"LeafExpr\",\n \"Addr\",\n \"Bytes\",\n \"Int\",\n \"EnumInt\",\n \"Arg\",\n \"TxnType\",\n \"TxnField\",\n \"TxnExpr\",\n \"TxnaExpr\",\n \"TxnArray\",\n \"TxnObject\",\n \"Txn\",\n \"GtxnExpr\",\n \"GtxnaExpr\",\n \"TxnGroup\",\n \"Gtxn\",\n \"GeneratedID\",\n \"ImportScratchValue\",\n \"Global\",\n \"GlobalField\",\n \"App\",\n \"AppField\",\n \"OnComplete\",\n \"AppParam\",\n \"AssetHolding\",\n \"AssetParam\",\n \"InnerTxnBuilder\",\n \"InnerTxn\",\n \"Array\",\n \"Tmpl\",\n \"Nonce\",\n \"UnaryExpr\",\n \"Btoi\",\n \"Itob\",\n \"Len\",\n \"BitLen\",\n \"Sha256\",\n \"Sha512_256\",\n \"Keccak256\",\n \"Not\",\n \"BitwiseNot\",\n \"Sqrt\",\n \"Pop\",\n \"Balance\",\n \"MinBalance\",\n \"BinaryExpr\",\n \"Add\",\n \"Minus\",\n \"Mul\",\n \"Div\",\n \"Mod\",\n \"Exp\",\n \"BitwiseAnd\",\n \"BitwiseOr\",\n \"BitwiseXor\",\n \"ShiftLeft\",\n \"ShiftRight\",\n \"Eq\",\n \"Neq\",\n \"Lt\",\n \"Le\",\n \"Gt\",\n \"Ge\",\n \"GetBit\",\n \"GetByte\",\n \"Ed25519Verify\",\n \"Substring\",\n \"Extract\",\n \"Suffix\",\n \"SetBit\",\n \"SetByte\",\n \"NaryExpr\",\n \"And\",\n \"Or\",\n \"Concat\",\n \"WideRatio\",\n \"If\",\n \"Cond\",\n \"Seq\",\n \"Assert\",\n \"Err\",\n \"Return\",\n \"Approve\",\n \"Reject\",\n \"Subroutine\",\n \"SubroutineDefinition\",\n \"SubroutineDeclaration\",\n \"SubroutineCall\",\n \"ScratchSlot\",\n \"ScratchLoad\",\n \"ScratchStore\",\n \"ScratchStackStore\",\n \"ScratchVar\",\n \"MaybeValue\",\n \"BytesAdd\",\n \"BytesMinus\",\n \"BytesDiv\",\n \"BytesMul\",\n \"BytesMod\",\n \"BytesAnd\",\n \"BytesOr\",\n \"BytesXor\",\n \"BytesEq\",\n \"BytesNeq\",\n \"BytesLt\",\n \"BytesLe\",\n \"BytesGt\",\n \"BytesGe\",\n \"BytesNot\",\n \"BytesZero\",\n \"ExtractUint16\",\n \"ExtractUint32\",\n \"ExtractUint64\",\n \"Log\",\n \"While\",\n \"For\",\n \"Break\",\n \"Continue\",\n \"Op\",\n \"Mode\",\n \"TealComponent\",\n \"TealOp\",\n \"TealLabel\",\n \"TealBlock\",\n \"TealSimpleBlock\",\n \"TealConditionalBlock\",\n \"LabelReference\",\n \"MAX_TEAL_VERSION\",\n \"MIN_TEAL_VERSION\",\n \"DEFAULT_TEAL_VERSION\",\n \"CompileOptions\",\n \"compileTeal\",\n \"TealType\",\n \"TealInternalError\",\n \"TealTypeError\",\n \"TealInputError\",\n \"TealCompileError\",\n \"MAX_GROUP_SIZE\",\n \"NUM_SLOTS\",\n]\n"
},
{
"alpha_fraction": 0.661272406578064,
"alphanum_fraction": 0.6628783345222473,
"avg_line_length": 33.1561164855957,
"blob_id": "904b865b6b1f32801466d6f66449c0b97ca0689d",
"content_id": "1644e8343f98722314bab749888cdc7165f1ed21",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8095,
"license_type": "permissive",
"max_line_length": 119,
"num_lines": 237,
"path": "/pyteal/compiler/compiler.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import List, Tuple, Set, Dict, Optional, cast\n\nfrom ..types import TealType\nfrom ..ast import (\n Expr,\n Return,\n Seq,\n ScratchSlot,\n SubroutineDefinition,\n SubroutineDeclaration,\n)\nfrom ..ir import Mode, TealComponent, TealOp, TealBlock, TealSimpleBlock\nfrom ..errors import TealInputError, TealInternalError\n\nfrom .sort import sortBlocks\nfrom .flatten import flattenBlocks, flattenSubroutines\nfrom .scratchslots import assignScratchSlotsToSubroutines\nfrom .subroutines import (\n findRecursionPoints,\n spillLocalSlotsDuringRecursion,\n resolveSubroutines,\n)\nfrom .constants import createConstantBlocks\n\nMAX_TEAL_VERSION = 5\nMIN_TEAL_VERSION = 2\nDEFAULT_TEAL_VERSION = MIN_TEAL_VERSION\n\n\nclass CompileOptions:\n def __init__(\n self,\n *,\n mode: Mode = Mode.Signature,\n version: int = DEFAULT_TEAL_VERSION,\n ) -> None:\n self.mode = mode\n self.version = version\n\n self.currentSubroutine: Optional[SubroutineDefinition] = None\n\n self.breakBlocksStack: List[List[TealSimpleBlock]] = []\n self.continueBlocksStack: List[List[TealSimpleBlock]] = []\n\n def setSubroutine(self, subroutine: Optional[SubroutineDefinition]) -> None:\n self.currentSubroutine = subroutine\n\n def enterLoop(self) -> None:\n self.breakBlocksStack.append([])\n self.continueBlocksStack.append([])\n\n def isInLoop(self) -> bool:\n return len(self.breakBlocksStack) != 0\n\n def addLoopBreakBlock(self, block: TealSimpleBlock) -> None:\n if len(self.breakBlocksStack) == 0:\n raise TealInternalError(\"Cannot add break block when no loop is active\")\n self.breakBlocksStack[-1].append(block)\n\n def addLoopContinueBlock(self, block: TealSimpleBlock) -> None:\n if len(self.continueBlocksStack) == 0:\n raise TealInternalError(\"Cannot add continue block when no loop is active\")\n self.continueBlocksStack[-1].append(block)\n\n def exitLoop(self) -> Tuple[List[TealSimpleBlock], List[TealSimpleBlock]]:\n if len(self.breakBlocksStack) == 0 or len(self.continueBlocksStack) == 0:\n raise TealInternalError(\"Cannot exit loop when no loop is active\")\n return (self.breakBlocksStack.pop(), self.continueBlocksStack.pop())\n\n\ndef verifyOpsForVersion(teal: List[TealComponent], version: int):\n \"\"\"Verify that all TEAL operations are allowed in the specified version.\n\n Args:\n teal: Code to check.\n mode: The version to check against.\n\n Raises:\n TealInputError: if teal contains an operation not allowed in version.\n \"\"\"\n for stmt in teal:\n if isinstance(stmt, TealOp):\n op = stmt.getOp()\n if op.min_version > version:\n raise TealInputError(\n \"Op not supported in TEAL version {}: {}. Minimum required version is {}\".format(\n version, op, op.min_version\n )\n )\n\n\ndef verifyOpsForMode(teal: List[TealComponent], mode: Mode):\n \"\"\"Verify that all TEAL operations are allowed in mode.\n\n Args:\n teal: Code to check.\n mode: The mode to check against.\n\n Raises:\n TealInputError: if teal contains an operation not allowed in mode.\n \"\"\"\n for stmt in teal:\n if isinstance(stmt, TealOp):\n op = stmt.getOp()\n if not op.mode & mode:\n raise TealInputError(\n \"Op not supported in {} mode: {}\".format(mode.name, op)\n )\n\n\ndef compileSubroutine(\n ast: Expr,\n options: CompileOptions,\n subroutineMapping: Dict[Optional[SubroutineDefinition], List[TealComponent]],\n subroutineGraph: Dict[SubroutineDefinition, Set[SubroutineDefinition]],\n subroutineBlocks: Dict[Optional[SubroutineDefinition], TealBlock],\n) -> None:\n currentSubroutine = (\n cast(SubroutineDeclaration, ast).subroutine\n if isinstance(ast, SubroutineDeclaration)\n else None\n )\n\n if not ast.has_return():\n if ast.type_of() == TealType.none:\n ast = Seq([ast, Return()])\n else:\n ast = Return(ast)\n\n options.setSubroutine(currentSubroutine)\n start, end = ast.__teal__(options)\n start.addIncoming()\n start.validateTree()\n\n start = TealBlock.NormalizeBlocks(start)\n start.validateTree()\n\n order = sortBlocks(start, end)\n teal = flattenBlocks(order)\n\n verifyOpsForVersion(teal, options.version)\n verifyOpsForMode(teal, options.mode)\n\n subroutineMapping[currentSubroutine] = teal\n subroutineBlocks[currentSubroutine] = start\n\n referencedSubroutines: Set[SubroutineDefinition] = set()\n for stmt in teal:\n for subroutine in stmt.getSubroutines():\n referencedSubroutines.add(subroutine)\n\n if currentSubroutine is not None:\n subroutineGraph[currentSubroutine] = referencedSubroutines\n\n newSubroutines = referencedSubroutines - subroutineMapping.keys()\n for subroutine in sorted(newSubroutines, key=lambda subroutine: subroutine.id):\n compileSubroutine(\n subroutine.getDeclaration(),\n options,\n subroutineMapping,\n subroutineGraph,\n subroutineBlocks,\n )\n\n\ndef compileTeal(\n ast: Expr,\n mode: Mode,\n *,\n version: int = DEFAULT_TEAL_VERSION,\n assembleConstants: bool = False,\n) -> str:\n \"\"\"Compile a PyTeal expression into TEAL assembly.\n\n Args:\n ast: The PyTeal expression to assemble.\n mode: The mode of the program to assemble. Must be Signature or Application.\n version (optional): The TEAL version used to assemble the program. This will determine which\n expressions and fields are able to be used in the program and how expressions compile to\n TEAL opcodes. Defaults to 2 if not included.\n assembleConstants (optional): When true, the compiler will produce a program with fully\n assembled constants, rather than using the pseudo-ops `int`, `byte`, and `addr`. These\n constants will be assembled in the most space-efficient way, so enabling this may reduce\n the compiled program's size. Enabling this option requires a minimum TEAL version of 3.\n Defaults to false.\n\n Returns:\n A TEAL assembly program compiled from the input expression.\n\n Raises:\n TealInputError: if an operation in ast is not supported by the supplied mode and version.\n TealInternalError: if an internal error is encounter during compilation.\n \"\"\"\n if (\n not (MIN_TEAL_VERSION <= version <= MAX_TEAL_VERSION)\n or type(version) is not int\n ):\n raise TealInputError(\n \"Unsupported TEAL version: {}. Excepted an integer in the range [{}, {}]\".format(\n version, MIN_TEAL_VERSION, MAX_TEAL_VERSION\n )\n )\n\n options = CompileOptions(mode=mode, version=version)\n\n subroutineMapping: Dict[\n Optional[SubroutineDefinition], List[TealComponent]\n ] = dict()\n subroutineGraph: Dict[SubroutineDefinition, Set[SubroutineDefinition]] = dict()\n subroutineBlocks: Dict[Optional[SubroutineDefinition], TealBlock] = dict()\n compileSubroutine(\n ast, options, subroutineMapping, subroutineGraph, subroutineBlocks\n )\n\n localSlotAssignments = assignScratchSlotsToSubroutines(\n subroutineMapping, subroutineBlocks\n )\n\n spillLocalSlotsDuringRecursion(\n version, subroutineMapping, subroutineGraph, localSlotAssignments\n )\n\n subroutineLabels = resolveSubroutines(subroutineMapping)\n teal = flattenSubroutines(subroutineMapping, subroutineLabels)\n\n if assembleConstants:\n if version < 3:\n raise TealInternalError(\n \"The minimum TEAL version required to enable assembleConstants is 3. The current version is {}\".format(\n version\n )\n )\n teal = createConstantBlocks(teal)\n\n lines = [\"#pragma version {}\".format(version)]\n lines += [i.assemble() for i in teal]\n return \"\\n\".join(lines)\n"
},
{
"alpha_fraction": 0.5652705430984497,
"alphanum_fraction": 0.5891779661178589,
"avg_line_length": 24.643762588500977,
"blob_id": "d35199ac3831827190a5a97a0a00ce6004cb78e6",
"content_id": "9602be2085634212bc519f2ff26774808dab7d37",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 38440,
"license_type": "permissive",
"max_line_length": 88,
"num_lines": 1499,
"path": "/pyteal/ast/binaryexpr_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\nteal2Options = CompileOptions(version=2)\nteal3Options = CompileOptions(version=3)\nteal4Options = CompileOptions(version=4)\nteal5Options = CompileOptions(version=5)\n\n\ndef test_add():\n args = [Int(2), Int(3)]\n expr = Add(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 2), TealOp(args[1], Op.int, 3), TealOp(expr, Op.add)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_add_overload():\n args = [Int(2), Int(3), Int(4)]\n expr = args[0] + args[1] + args[2]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 2),\n TealOp(args[1], Op.int, 3),\n TealOp(None, Op.add),\n TealOp(args[2], Op.int, 4),\n TealOp(None, Op.add),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_add_invalid():\n with pytest.raises(TealTypeError):\n Add(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n Add(Txn.sender(), Int(2))\n\n\ndef test_minus():\n args = [Int(5), Int(6)]\n expr = Minus(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 5), TealOp(args[1], Op.int, 6), TealOp(expr, Op.minus)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_minus_overload():\n args = [Int(10), Int(1), Int(2)]\n expr = args[0] - args[1] - args[2]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 10),\n TealOp(args[1], Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(args[2], Op.int, 2),\n TealOp(None, Op.minus),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_minus_invalid():\n with pytest.raises(TealTypeError):\n Minus(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n Minus(Txn.sender(), Int(2))\n\n\ndef test_mul():\n args = [Int(3), Int(8)]\n expr = Mul(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 3), TealOp(args[1], Op.int, 8), TealOp(expr, Op.mul)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_mul_overload():\n args = [Int(3), Int(8), Int(10)]\n expr = args[0] * args[1] * args[2]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 3),\n TealOp(args[1], Op.int, 8),\n TealOp(None, Op.mul),\n TealOp(args[2], Op.int, 10),\n TealOp(None, Op.mul),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_mul_invalid():\n with pytest.raises(TealTypeError):\n Mul(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n Mul(Txn.sender(), Int(2))\n\n\ndef test_div():\n args = [Int(9), Int(3)]\n expr = Div(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 9), TealOp(args[1], Op.int, 3), TealOp(expr, Op.div)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_div_overload():\n args = [Int(9), Int(3), Int(3)]\n expr = args[0] / args[1] / args[2]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 9),\n TealOp(args[1], Op.int, 3),\n TealOp(None, Op.div),\n TealOp(args[2], Op.int, 3),\n TealOp(None, Op.div),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_div_invalid():\n with pytest.raises(TealTypeError):\n Div(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n Div(Txn.sender(), Int(2))\n\n\ndef test_mod():\n args = [Int(10), Int(9)]\n expr = Mod(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 10), TealOp(args[1], Op.int, 9), TealOp(expr, Op.mod)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_mod_overload():\n args = [Int(10), Int(9), Int(100)]\n expr = args[0] % args[1] % args[2]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 10),\n TealOp(args[1], Op.int, 9),\n TealOp(None, Op.mod),\n TealOp(args[2], Op.int, 100),\n TealOp(None, Op.mod),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_mod_invalid():\n with pytest.raises(TealTypeError):\n Mod(Txn.receiver(), Int(2))\n\n with pytest.raises(TealTypeError):\n Mod(Int(2), Txn.sender())\n\n\ndef test_exp():\n args = [Int(2), Int(9)]\n expr = Exp(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 2), TealOp(args[1], Op.int, 9), TealOp(expr, Op.exp)]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_exp_overload():\n args = [Int(2), Int(3), Int(1)]\n # this is equivalent to args[0] ** (args[1] ** args[2])\n expr = args[0] ** args[1] ** args[2]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 2),\n TealOp(args[1], Op.int, 3),\n TealOp(args[2], Op.int, 1),\n TealOp(None, Op.exp),\n TealOp(None, Op.exp),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_exp_invalid():\n with pytest.raises(TealTypeError):\n Exp(Txn.receiver(), Int(2))\n\n with pytest.raises(TealTypeError):\n Exp(Int(2), Txn.sender())\n\n\ndef test_arithmetic():\n args = [Int(2), Int(3), Int(5), Int(6), Int(8), Int(9)]\n v = ((args[0] + args[1]) / ((args[2] - args[3]) * args[4])) % args[5]\n assert v.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 2),\n TealOp(args[1], Op.int, 3),\n TealOp(None, Op.add),\n TealOp(args[2], Op.int, 5),\n TealOp(args[3], Op.int, 6),\n TealOp(None, Op.minus),\n TealOp(args[4], Op.int, 8),\n TealOp(None, Op.mul),\n TealOp(None, Op.div),\n TealOp(args[5], Op.int, 9),\n TealOp(None, Op.mod),\n ]\n )\n\n actual, _ = v.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_bitwise_and():\n args = [Int(1), Int(2)]\n expr = BitwiseAnd(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 1),\n TealOp(args[1], Op.int, 2),\n TealOp(expr, Op.bitwise_and),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_bitwise_and_overload():\n args = [Int(1), Int(2), Int(4)]\n expr = args[0] & args[1] & args[2]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 1),\n TealOp(args[1], Op.int, 2),\n TealOp(None, Op.bitwise_and),\n TealOp(args[2], Op.int, 4),\n TealOp(None, Op.bitwise_and),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_bitwise_and_invalid():\n with pytest.raises(TealTypeError):\n BitwiseAnd(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BitwiseAnd(Txn.sender(), Int(2))\n\n\ndef test_bitwise_or():\n args = [Int(1), Int(2)]\n expr = BitwiseOr(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 1),\n TealOp(args[1], Op.int, 2),\n TealOp(expr, Op.bitwise_or),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_bitwise_or_overload():\n args = [Int(1), Int(2), Int(4)]\n expr = args[0] | args[1] | args[2]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 1),\n TealOp(args[1], Op.int, 2),\n TealOp(None, Op.bitwise_or),\n TealOp(args[2], Op.int, 4),\n TealOp(None, Op.bitwise_or),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_bitwise_or_invalid():\n with pytest.raises(TealTypeError):\n BitwiseOr(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BitwiseOr(Txn.sender(), Int(2))\n\n\ndef test_bitwise_xor():\n args = [Int(1), Int(3)]\n expr = BitwiseXor(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 1),\n TealOp(args[1], Op.int, 3),\n TealOp(expr, Op.bitwise_xor),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_bitwise_xor_overload():\n args = [Int(1), Int(3), Int(5)]\n expr = args[0] ^ args[1] ^ args[2]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 1),\n TealOp(args[1], Op.int, 3),\n TealOp(None, Op.bitwise_xor),\n TealOp(args[2], Op.int, 5),\n TealOp(None, Op.bitwise_xor),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_bitwise_xor_invalid():\n with pytest.raises(TealTypeError):\n BitwiseXor(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BitwiseXor(Txn.sender(), Int(2))\n\n\ndef test_shift_left():\n args = [Int(5), Int(1)]\n expr = ShiftLeft(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 5), TealOp(args[1], Op.int, 1), TealOp(expr, Op.shl)]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_shift_left_overload():\n args = [Int(5), Int(1), Int(2)]\n expr = args[0] << args[1] << args[2]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 5),\n TealOp(args[1], Op.int, 1),\n TealOp(None, Op.shl),\n TealOp(args[2], Op.int, 2),\n TealOp(None, Op.shl),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_shift_left_invalid():\n with pytest.raises(TealTypeError):\n ShiftLeft(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n ShiftLeft(Txn.sender(), Int(2))\n\n\ndef test_shift_right():\n args = [Int(5), Int(1)]\n expr = ShiftRight(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 5), TealOp(args[1], Op.int, 1), TealOp(expr, Op.shr)]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_shift_right_overload():\n args = [Int(5), Int(1), Int(2)]\n expr = args[0] >> args[1] >> args[2]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 5),\n TealOp(args[1], Op.int, 1),\n TealOp(None, Op.shr),\n TealOp(args[2], Op.int, 2),\n TealOp(None, Op.shr),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_shift_right_invalid():\n with pytest.raises(TealTypeError):\n ShiftRight(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n ShiftRight(Txn.sender(), Int(2))\n\n\ndef test_eq():\n args_int = [Int(2), Int(3)]\n expr_int = Eq(args_int[0], args_int[1])\n assert expr_int.type_of() == TealType.uint64\n\n expected_int = TealSimpleBlock(\n [\n TealOp(args_int[0], Op.int, 2),\n TealOp(args_int[1], Op.int, 3),\n TealOp(expr_int, Op.eq),\n ]\n )\n\n actual_int, _ = expr_int.__teal__(teal2Options)\n actual_int.addIncoming()\n actual_int = TealBlock.NormalizeBlocks(actual_int)\n\n assert actual_int == expected_int\n\n args_bytes = [Txn.receiver(), Txn.sender()]\n expr_bytes = Eq(args_bytes[0], args_bytes[1])\n assert expr_bytes.type_of() == TealType.uint64\n\n expected_bytes = TealSimpleBlock(\n [\n TealOp(args_bytes[0], Op.txn, \"Receiver\"),\n TealOp(args_bytes[1], Op.txn, \"Sender\"),\n TealOp(expr_bytes, Op.eq),\n ]\n )\n\n actual_bytes, _ = expr_bytes.__teal__(teal2Options)\n actual_bytes.addIncoming()\n actual_bytes = TealBlock.NormalizeBlocks(actual_bytes)\n\n assert actual_bytes == expected_bytes\n\n\ndef test_eq_overload():\n args_int = [Int(2), Int(3)]\n expr_int = args_int[0] == args_int[1]\n assert expr_int.type_of() == TealType.uint64\n\n expected_int = TealSimpleBlock(\n [\n TealOp(args_int[0], Op.int, 2),\n TealOp(args_int[1], Op.int, 3),\n TealOp(expr_int, Op.eq),\n ]\n )\n\n actual_int, _ = expr_int.__teal__(teal2Options)\n actual_int.addIncoming()\n actual_int = TealBlock.NormalizeBlocks(actual_int)\n\n assert actual_int == expected_int\n\n args_bytes = [Txn.receiver(), Txn.sender()]\n expr_bytes = args_bytes[0] == args_bytes[1]\n assert expr_bytes.type_of() == TealType.uint64\n\n expected_bytes = TealSimpleBlock(\n [\n TealOp(args_bytes[0], Op.txn, \"Receiver\"),\n TealOp(args_bytes[1], Op.txn, \"Sender\"),\n TealOp(expr_bytes, Op.eq),\n ]\n )\n\n actual_bytes, _ = expr_bytes.__teal__(teal2Options)\n actual_bytes.addIncoming()\n actual_bytes = TealBlock.NormalizeBlocks(actual_bytes)\n\n assert actual_bytes == expected_bytes\n\n\ndef test_eq_invalid():\n with pytest.raises(TealTypeError):\n Eq(Txn.fee(), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n Eq(Txn.sender(), Int(7))\n\n\ndef test_neq():\n args_int = [Int(2), Int(3)]\n expr_int = Neq(args_int[0], args_int[1])\n assert expr_int.type_of() == TealType.uint64\n\n expected_int = TealSimpleBlock(\n [\n TealOp(args_int[0], Op.int, 2),\n TealOp(args_int[1], Op.int, 3),\n TealOp(expr_int, Op.neq),\n ]\n )\n\n actual_int, _ = expr_int.__teal__(teal2Options)\n actual_int.addIncoming()\n actual_int = TealBlock.NormalizeBlocks(actual_int)\n\n assert actual_int == expected_int\n\n args_bytes = [Txn.receiver(), Txn.sender()]\n expr_bytes = Neq(args_bytes[0], args_bytes[1])\n assert expr_bytes.type_of() == TealType.uint64\n\n expected_bytes = TealSimpleBlock(\n [\n TealOp(args_bytes[0], Op.txn, \"Receiver\"),\n TealOp(args_bytes[1], Op.txn, \"Sender\"),\n TealOp(expr_bytes, Op.neq),\n ]\n )\n\n actual_bytes, _ = expr_bytes.__teal__(teal2Options)\n actual_bytes.addIncoming()\n actual_bytes = TealBlock.NormalizeBlocks(actual_bytes)\n\n assert actual_bytes == expected_bytes\n\n\ndef test_neq_overload():\n args_int = [Int(2), Int(3)]\n expr_int = args_int[0] != args_int[1]\n assert expr_int.type_of() == TealType.uint64\n\n expected_int = TealSimpleBlock(\n [\n TealOp(args_int[0], Op.int, 2),\n TealOp(args_int[1], Op.int, 3),\n TealOp(expr_int, Op.neq),\n ]\n )\n\n actual_int, _ = expr_int.__teal__(teal2Options)\n actual_int.addIncoming()\n actual_int = TealBlock.NormalizeBlocks(actual_int)\n\n assert actual_int == expected_int\n\n args_bytes = [Txn.receiver(), Txn.sender()]\n expr_bytes = args_bytes[0] != args_bytes[1]\n assert expr_bytes.type_of() == TealType.uint64\n\n expected_bytes = TealSimpleBlock(\n [\n TealOp(args_bytes[0], Op.txn, \"Receiver\"),\n TealOp(args_bytes[1], Op.txn, \"Sender\"),\n TealOp(expr_bytes, Op.neq),\n ]\n )\n\n actual_bytes, _ = expr_bytes.__teal__(teal2Options)\n actual_bytes.addIncoming()\n actual_bytes = TealBlock.NormalizeBlocks(actual_bytes)\n\n assert actual_bytes == expected_bytes\n\n\ndef test_neq_invalid():\n with pytest.raises(TealTypeError):\n Neq(Txn.fee(), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n Neq(Txn.sender(), Int(7))\n\n\ndef test_lt():\n args = [Int(2), Int(3)]\n expr = Lt(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 2), TealOp(args[1], Op.int, 3), TealOp(expr, Op.lt)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_lt_overload():\n args = [Int(2), Int(3)]\n expr = args[0] < args[1]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 2), TealOp(args[1], Op.int, 3), TealOp(expr, Op.lt)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_lt_invalid():\n with pytest.raises(TealTypeError):\n Lt(Int(7), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n Lt(Txn.sender(), Int(7))\n\n\ndef test_le():\n args = [Int(1), Int(2)]\n expr = Le(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 1), TealOp(args[1], Op.int, 2), TealOp(expr, Op.le)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_le_overload():\n args = [Int(1), Int(2)]\n expr = args[0] <= args[1]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 1), TealOp(args[1], Op.int, 2), TealOp(expr, Op.le)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_le_invalid():\n with pytest.raises(TealTypeError):\n Le(Int(1), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n Le(Txn.sender(), Int(1))\n\n\ndef test_gt():\n args = [Int(2), Int(3)]\n expr = Gt(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 2), TealOp(args[1], Op.int, 3), TealOp(expr, Op.gt)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_gt_overload():\n args = [Int(2), Int(3)]\n expr = args[0] > args[1]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 2), TealOp(args[1], Op.int, 3), TealOp(expr, Op.gt)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_gt_invalid():\n with pytest.raises(TealTypeError):\n Gt(Int(1), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n Gt(Txn.receiver(), Int(1))\n\n\ndef test_ge():\n args = [Int(1), Int(10)]\n expr = Ge(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 1), TealOp(args[1], Op.int, 10), TealOp(expr, Op.ge)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_ge_overload():\n args = [Int(1), Int(10)]\n expr = args[0] >= args[1]\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(args[0], Op.int, 1), TealOp(args[1], Op.int, 10), TealOp(expr, Op.ge)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_ge_invalid():\n with pytest.raises(TealTypeError):\n Ge(Int(1), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n Ge(Txn.receiver(), Int(1))\n\n\ndef test_get_bit_int():\n args = [Int(3), Int(1)]\n expr = GetBit(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 3),\n TealOp(args[1], Op.int, 1),\n TealOp(expr, Op.getbit),\n ]\n )\n\n actual, _ = expr.__teal__(teal3Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal2Options)\n\n\ndef test_get_bit_bytes():\n args = [Bytes(\"base16\", \"0xFF\"), Int(1)]\n expr = GetBit(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFF\"),\n TealOp(args[1], Op.int, 1),\n TealOp(expr, Op.getbit),\n ]\n )\n\n actual, _ = expr.__teal__(teal3Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal2Options)\n\n\ndef test_get_bit_invalid():\n with pytest.raises(TealTypeError):\n GetBit(Int(3), Bytes(\"index\"))\n\n with pytest.raises(TealTypeError):\n GetBit(Bytes(\"base16\", \"0xFF\"), Bytes(\"index\"))\n\n\ndef test_get_byte():\n args = [Bytes(\"base16\", \"0xFF\"), Int(0)]\n expr = GetByte(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFF\"),\n TealOp(args[1], Op.int, 0),\n TealOp(expr, Op.getbyte),\n ]\n )\n\n actual, _ = expr.__teal__(teal3Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal2Options)\n\n\ndef test_get_byte_invalid():\n with pytest.raises(TealTypeError):\n GetByte(Int(3), Int(0))\n\n with pytest.raises(TealTypeError):\n GetBit(Bytes(\"base16\", \"0xFF\"), Bytes(\"index\"))\n\n\ndef test_b_add():\n args = [\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFF\"),\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFE\"),\n ]\n expr = BytesAdd(args[0], args[1])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFFFF\"),\n TealOp(args[1], Op.byte, \"0xFFFFFFFFFFFFFFFFFE\"),\n TealOp(expr, Op.b_add),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_add_invalid():\n with pytest.raises(TealTypeError):\n BytesAdd(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesAdd(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_b_minus():\n args = [\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFF\"),\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFE\"),\n ]\n expr = BytesMinus(args[0], args[1])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFFFF\"),\n TealOp(args[1], Op.byte, \"0xFFFFFFFFFFFFFFFFFE\"),\n TealOp(expr, Op.b_minus),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_minus_invalid():\n with pytest.raises(TealTypeError):\n BytesMinus(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesMinus(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_b_div():\n args = [Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFF00\"), Bytes(\"base16\", \"0xFF\")]\n expr = BytesDiv(args[0], args[1])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFF00\"),\n TealOp(args[1], Op.byte, \"0xFF\"),\n TealOp(expr, Op.b_div),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_div_invalid():\n with pytest.raises(TealTypeError):\n BytesDiv(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesDiv(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_b_mul():\n args = [Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFF\"), Bytes(\"base16\", \"0xFF\")]\n expr = BytesMul(args[0], args[1])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFF\"),\n TealOp(args[1], Op.byte, \"0xFF\"),\n TealOp(expr, Op.b_mul),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_mul_invalid():\n with pytest.raises(TealTypeError):\n BytesMul(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesMul(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_b_mod():\n args = [Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFF\"), Bytes(\"base16\", \"0xFF\")]\n expr = BytesMod(args[0], args[1])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFFFF\"),\n TealOp(args[1], Op.byte, \"0xFF\"),\n TealOp(expr, Op.b_mod),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_mod_invalid():\n with pytest.raises(TealTypeError):\n BytesMod(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesMod(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_b_and():\n args = [Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFF0\"), Bytes(\"base16\", \"0xFF\")]\n expr = BytesAnd(args[0], args[1])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFFF0\"),\n TealOp(args[1], Op.byte, \"0xFF\"),\n TealOp(expr, Op.b_and),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_and_invalid():\n with pytest.raises(TealTypeError):\n BytesAnd(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesAnd(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_b_or():\n args = [Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFF0\"), Bytes(\"base16\", \"0xFF\")]\n expr = BytesOr(args[0], args[1])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFFF0\"),\n TealOp(args[1], Op.byte, \"0xFF\"),\n TealOp(expr, Op.b_or),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_or_invalid():\n with pytest.raises(TealTypeError):\n BytesOr(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesOr(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_b_xor():\n args = [Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFF0\"), Bytes(\"base16\", \"0xFF\")]\n expr = BytesXor(args[0], args[1])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFFF0\"),\n TealOp(args[1], Op.byte, \"0xFF\"),\n TealOp(expr, Op.b_xor),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_xor_invalid():\n with pytest.raises(TealTypeError):\n BytesXor(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesXor(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_b_eq():\n args = [\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFF\"),\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFF\"),\n ]\n expr = BytesEq(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFFFF\"),\n TealOp(args[1], Op.byte, \"0xFFFFFFFFFFFFFFFFFF\"),\n TealOp(expr, Op.b_eq),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_eq_invalid():\n with pytest.raises(TealTypeError):\n BytesEq(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesEq(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_b_neq():\n args = [\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFF\"),\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFF\"),\n ]\n expr = BytesNeq(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFFFF\"),\n TealOp(args[1], Op.byte, \"0xFFFFFFFFFFFFFFFFFF\"),\n TealOp(expr, Op.b_neq),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_neq_invalid():\n with pytest.raises(TealTypeError):\n BytesNeq(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesNeq(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_b_lt():\n args = [\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFF0\"),\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFF\"),\n ]\n expr = BytesLt(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFFF0\"),\n TealOp(args[1], Op.byte, \"0xFFFFFFFFFFFFFFFFFF\"),\n TealOp(expr, Op.b_lt),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_lt_invalid():\n with pytest.raises(TealTypeError):\n BytesLt(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesLt(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_b_le():\n args = [\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFF0\"),\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFF\"),\n ]\n expr = BytesLe(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFFF0\"),\n TealOp(args[1], Op.byte, \"0xFFFFFFFFFFFFFFFFFF\"),\n TealOp(expr, Op.b_le),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_le_invalid():\n with pytest.raises(TealTypeError):\n BytesLe(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesLe(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_b_gt():\n args = [\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFF\"),\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFF0\"),\n ]\n expr = BytesGt(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFFFF\"),\n TealOp(args[1], Op.byte, \"0xFFFFFFFFFFFFFFFFF0\"),\n TealOp(expr, Op.b_gt),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_gt_invalid():\n with pytest.raises(TealTypeError):\n BytesGt(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesGt(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_b_ge():\n args = [\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFF\"),\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFF0\"),\n ]\n expr = BytesGe(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFFFF\"),\n TealOp(args[1], Op.byte, \"0xFFFFFFFFFFFFFFFFF0\"),\n TealOp(expr, Op.b_ge),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal3Options)\n\n\ndef test_b_ge_invalid():\n with pytest.raises(TealTypeError):\n BytesGe(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n BytesGe(Bytes(\"base16\", \"0xFF\"), Int(2))\n\n\ndef test_extract_uint():\n for expression, op in (\n (ExtractUint16, Op.extract_uint16),\n (ExtractUint32, Op.extract_uint32),\n (ExtractUint64, Op.extract_uint64),\n ):\n args = [\n Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFF\"),\n Int(2),\n ]\n expr = expression(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFFFFFFFFFFFFFFFFFF\"),\n TealOp(args[1], Op.int, 2),\n TealOp(expr, op),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal4Options)\n\n\ndef test_extract_uint_invalid():\n for expression in (ExtractUint16, ExtractUint32, ExtractUint64):\n with pytest.raises(TealTypeError):\n expression(Int(2), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n expression(Bytes(\"base16\", \"0xFF\"), Txn.receiver())\n"
},
{
"alpha_fraction": 0.6047678589820862,
"alphanum_fraction": 0.6047678589820862,
"avg_line_length": 21.77142906188965,
"blob_id": "cc237eb4e035623ac2aa974fba2bbbc2dd1bc675",
"content_id": "85ba62b5f181c35138cb530c4e63d2f5baec88e5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 797,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 35,
"path": "/pyteal/__init__.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from .ast import *\nfrom .ast import __all__ as ast_all\nfrom .ir import *\nfrom .ir import __all__ as ir_all\nfrom .compiler import (\n MAX_TEAL_VERSION,\n MIN_TEAL_VERSION,\n DEFAULT_TEAL_VERSION,\n CompileOptions,\n compileTeal,\n)\nfrom .types import TealType\nfrom .errors import TealInternalError, TealTypeError, TealInputError, TealCompileError\nfrom .config import MAX_GROUP_SIZE, NUM_SLOTS\n\n# begin __all__\n__all__ = (\n ast_all\n + ir_all\n + [\n \"MAX_TEAL_VERSION\",\n \"MIN_TEAL_VERSION\",\n \"DEFAULT_TEAL_VERSION\",\n \"CompileOptions\",\n \"compileTeal\",\n \"TealType\",\n \"TealInternalError\",\n \"TealTypeError\",\n \"TealInputError\",\n \"TealCompileError\",\n \"MAX_GROUP_SIZE\",\n \"NUM_SLOTS\",\n ]\n)\n# end __all__\n"
},
{
"alpha_fraction": 0.5981583595275879,
"alphanum_fraction": 0.6000000238418579,
"avg_line_length": 29.16666603088379,
"blob_id": "c232c992cdc5fd55167169099e913ef179a2e561",
"content_id": "ff629a885762627ce692ec304211fdc38faa76ba",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2715,
"license_type": "permissive",
"max_line_length": 97,
"num_lines": 90,
"path": "/pyteal/ast/maybe.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import List, Union, TYPE_CHECKING\n\nfrom ..types import TealType\nfrom ..ir import TealOp, Op, TealBlock\nfrom .expr import Expr\nfrom .leafexpr import LeafExpr\nfrom .scratch import ScratchSlot, ScratchLoad\n\nif TYPE_CHECKING:\n from ..compiler import CompileOptions\n\n\nclass MaybeValue(LeafExpr):\n \"\"\"Represents a get operation returning a value that may not exist.\"\"\"\n\n def __init__(\n self,\n op: Op,\n type: TealType,\n *,\n immediate_args: List[Union[int, str]] = None,\n args: List[Expr] = None\n ):\n \"\"\"Create a new MaybeValue.\n\n Args:\n op: The operation that returns values.\n type: The type of the returned value.\n immediate_args (optional): Immediate arguments for the op. Defaults to None.\n args (optional): Stack arguments for the op. Defaults to None.\n \"\"\"\n super().__init__()\n self.op = op\n self.type = type\n self.immediate_args = immediate_args if immediate_args is not None else []\n self.args = args if args is not None else []\n self.slotOk = ScratchSlot()\n self.slotValue = ScratchSlot()\n\n def hasValue(self) -> ScratchLoad:\n \"\"\"Check if the value exists.\n\n This will return 1 if the value exists, otherwise 0.\n \"\"\"\n return self.slotOk.load(TealType.uint64)\n\n def value(self) -> ScratchLoad:\n \"\"\"Get the value.\n\n If the value exists, it will be returned. Otherwise, the zero value for this type will be\n returned (i.e. either 0 or an empty byte string, depending on the type).\n \"\"\"\n return self.slotValue.load(self.type)\n\n def __str__(self):\n ret_str = \"(({}\".format(self.op)\n for a in self.immediate_args:\n ret_str += \" \" + a.__str__()\n\n for a in self.args:\n ret_str += \" \" + a.__str__()\n ret_str += \") \"\n\n storeOk = self.slotOk.store()\n storeValue = self.slotValue.store()\n\n ret_str += storeOk.__str__() + \" \" + storeValue.__str__() + \")\"\n\n return ret_str\n\n def __teal__(self, options: \"CompileOptions\"):\n tealOp = TealOp(self, self.op, *self.immediate_args)\n callStart, callEnd = TealBlock.FromOp(options, tealOp, *self.args)\n\n storeOk = self.slotOk.store()\n storeValue = self.slotValue.store()\n\n storeOkStart, storeOkEnd = storeOk.__teal__(options)\n storeValueStart, storeValueEnd = storeValue.__teal__(options)\n\n callEnd.setNextBlock(storeOkStart)\n storeOkEnd.setNextBlock(storeValueStart)\n\n return callStart, storeValueEnd\n\n def type_of(self):\n return TealType.none\n\n\nMaybeValue.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.6529045701026917,
"alphanum_fraction": 0.666799008846283,
"avg_line_length": 29.22800064086914,
"blob_id": "21f2eb501eb6184d5c5e03daa49a5f1e1f406a7e",
"content_id": "511ed26a739fd3e4d8ef62d8c29f2422f4f15993",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7557,
"license_type": "permissive",
"max_line_length": 77,
"num_lines": 250,
"path": "/pyteal/ir/tealblock_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_from_op_no_args():\n op = TealOp(None, Op.int, 1)\n\n expected = TealSimpleBlock([op])\n\n actual, _ = TealBlock.FromOp(options, op)\n\n assert actual == expected\n\n\ndef test_from_op_1_arg():\n op = TealOp(None, Op.pop)\n arg_1 = Bytes(\"message\")\n\n expected = TealSimpleBlock([TealOp(arg_1, Op.byte, '\"message\"'), op])\n\n actual, _ = TealBlock.FromOp(options, op, arg_1)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n actual.validateTree()\n\n assert actual == expected\n\n\ndef test_from_op_2_args():\n op = TealOp(None, Op.app_global_put)\n arg_1 = Bytes(\"key\")\n arg_2 = Int(5)\n\n expected = TealSimpleBlock(\n [TealOp(arg_1, Op.byte, '\"key\"'), TealOp(arg_2, Op.int, 5), op]\n )\n\n actual, _ = TealBlock.FromOp(options, op, arg_1, arg_2)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n actual.validateTree()\n\n assert actual == expected\n\n\ndef test_from_op_3_args():\n op = TealOp(None, Op.app_local_put)\n arg_1 = Int(0)\n arg_2 = Bytes(\"key\")\n arg_3 = Int(1)\n arg_4 = Int(2)\n arg_3_plus_4 = arg_3 + arg_4\n\n expected = TealSimpleBlock(\n [\n TealOp(arg_1, Op.int, 0),\n TealOp(arg_2, Op.byte, '\"key\"'),\n TealOp(arg_3, Op.int, 1),\n TealOp(arg_4, Op.int, 2),\n TealOp(arg_3_plus_4, Op.add),\n op,\n ]\n )\n\n actual, _ = TealBlock.FromOp(options, op, arg_1, arg_2, arg_3_plus_4)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n actual.validateTree()\n\n assert actual == expected\n\n\ndef test_iterate_single():\n block = TealSimpleBlock([TealOp(None, Op.int, 1)])\n\n blocks = list(TealBlock.Iterate(block))\n\n assert blocks == [block]\n\n\ndef test_iterate_sequence():\n block5 = TealSimpleBlock([TealOp(None, Op.int, 5)])\n block4 = TealSimpleBlock([TealOp(None, Op.int, 4)])\n block4.setNextBlock(block5)\n block3 = TealSimpleBlock([TealOp(None, Op.int, 3)])\n block3.setNextBlock(block4)\n block2 = TealSimpleBlock([TealOp(None, Op.int, 2)])\n block2.setNextBlock(block3)\n block1 = TealSimpleBlock([TealOp(None, Op.int, 1)])\n block1.setNextBlock(block2)\n\n blocks = list(TealBlock.Iterate(block1))\n\n assert blocks == [block1, block2, block3, block4, block5]\n\n\ndef test_iterate_branch():\n blockTrue = TealSimpleBlock([TealOp(None, Op.byte, '\"true\"')])\n blockFalse = TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')])\n block = TealConditionalBlock([TealOp(None, Op.int, 1)])\n block.setTrueBlock(blockTrue)\n block.setFalseBlock(blockFalse)\n\n blocks = list(TealBlock.Iterate(block))\n\n assert blocks == [block, blockTrue, blockFalse]\n\n\ndef test_iterate_multiple_branch():\n blockTrueTrue = TealSimpleBlock([TealOp(None, Op.byte, '\"true true\"')])\n blockTrueFalse = TealSimpleBlock([TealOp(None, Op.byte, '\"true false\"')])\n blockTrueBranch = TealConditionalBlock([])\n blockTrueBranch.setTrueBlock(blockTrueTrue)\n blockTrueBranch.setFalseBlock(blockTrueFalse)\n blockTrue = TealSimpleBlock([TealOp(None, Op.byte, '\"true\"')])\n blockTrue.setNextBlock(blockTrueBranch)\n blockFalse = TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')])\n block = TealConditionalBlock([TealOp(None, Op.int, 1)])\n block.setTrueBlock(blockTrue)\n block.setFalseBlock(blockFalse)\n\n blocks = list(TealBlock.Iterate(block))\n\n assert blocks == [\n block,\n blockTrue,\n blockFalse,\n blockTrueBranch,\n blockTrueTrue,\n blockTrueFalse,\n ]\n\n\ndef test_iterate_branch_converge():\n blockEnd = TealSimpleBlock([TealOp(None, Op.return_)])\n blockTrue = TealSimpleBlock([TealOp(None, Op.byte, '\"true\"')])\n blockTrue.setNextBlock(blockEnd)\n blockFalse = TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')])\n blockFalse.setNextBlock(blockEnd)\n block = TealConditionalBlock([TealOp(None, Op.int, 1)])\n block.setTrueBlock(blockTrue)\n block.setFalseBlock(blockFalse)\n\n blocks = list(TealBlock.Iterate(block))\n\n assert blocks == [block, blockTrue, blockFalse, blockEnd]\n\n\ndef test_normalize_single():\n original = TealSimpleBlock([TealOp(None, Op.int, 1)])\n\n expected = TealSimpleBlock([TealOp(None, Op.int, 1)])\n\n original.addIncoming()\n actual = TealBlock.NormalizeBlocks(original)\n actual.validateTree()\n\n assert actual == expected\n\n\ndef test_normalize_sequence():\n block6 = TealSimpleBlock([])\n block5 = TealSimpleBlock([TealOp(None, Op.int, 5)])\n block5.setNextBlock(block6)\n block4 = TealSimpleBlock([TealOp(None, Op.int, 4)])\n block4.setNextBlock(block5)\n block3 = TealSimpleBlock([TealOp(None, Op.int, 3)])\n block3.setNextBlock(block4)\n block2 = TealSimpleBlock([TealOp(None, Op.int, 2)])\n block2.setNextBlock(block3)\n block1 = TealSimpleBlock([TealOp(None, Op.int, 1)])\n block1.setNextBlock(block2)\n\n expected = TealSimpleBlock(\n [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.int, 4),\n TealOp(None, Op.int, 5),\n ]\n )\n\n block1.addIncoming()\n actual = TealBlock.NormalizeBlocks(block1)\n actual.validateTree()\n\n assert actual == expected\n\n\ndef test_normalize_branch():\n blockTrueNext = TealSimpleBlock([TealOp(None, Op.int, 4)])\n blockTrue = TealSimpleBlock([TealOp(None, Op.byte, '\"true\"')])\n blockTrue.setNextBlock(blockTrueNext)\n blockFalse = TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')])\n blockBranch = TealConditionalBlock([TealOp(None, Op.int, 1)])\n blockBranch.setTrueBlock(blockTrue)\n blockBranch.setFalseBlock(blockFalse)\n original = TealSimpleBlock([])\n original.setNextBlock(blockBranch)\n\n expectedTrue = TealSimpleBlock(\n [TealOp(None, Op.byte, '\"true\"'), TealOp(None, Op.int, 4)]\n )\n expectedFalse = TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')])\n expected = TealConditionalBlock([TealOp(None, Op.int, 1)])\n expected.setTrueBlock(expectedTrue)\n expected.setFalseBlock(expectedFalse)\n\n original.addIncoming()\n actual = TealBlock.NormalizeBlocks(original)\n actual.validateTree()\n\n assert actual == expected\n\n\ndef test_normalize_branch_converge():\n blockEnd = TealSimpleBlock([])\n blockTrueNext = TealSimpleBlock([TealOp(None, Op.int, 4)])\n blockTrueNext.setNextBlock(blockEnd)\n blockTrue = TealSimpleBlock([TealOp(None, Op.byte, '\"true\"')])\n blockTrue.setNextBlock(blockTrueNext)\n blockFalse = TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')])\n blockFalse.setNextBlock(blockEnd)\n blockBranch = TealConditionalBlock([TealOp(None, Op.int, 1)])\n blockBranch.setTrueBlock(blockTrue)\n blockBranch.setFalseBlock(blockFalse)\n original = TealSimpleBlock([])\n original.setNextBlock(blockBranch)\n\n expectedEnd = TealSimpleBlock([])\n expectedTrue = TealSimpleBlock(\n [TealOp(None, Op.byte, '\"true\"'), TealOp(None, Op.int, 4)]\n )\n expectedTrue.setNextBlock(expectedEnd)\n expectedFalse = TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')])\n expectedFalse.setNextBlock(expectedEnd)\n expected = TealConditionalBlock([TealOp(None, Op.int, 1)])\n expected.setTrueBlock(expectedTrue)\n expected.setFalseBlock(expectedFalse)\n\n original.addIncoming()\n actual = TealBlock.NormalizeBlocks(original)\n actual.validateTree()\n\n assert actual == expected\n"
},
{
"alpha_fraction": 0.5371767282485962,
"alphanum_fraction": 0.5759698152542114,
"avg_line_length": 63,
"blob_id": "140c1f7f18154275ef06b3d525b788fdf27df9d6",
"content_id": "7aa64991a29b474601b691a4917b6ae1e9081e32",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "reStructuredText",
"length_bytes": 1856,
"license_type": "permissive",
"max_line_length": 160,
"num_lines": 29,
"path": "/docs/crypto.rst",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": ".. _crypto:\n\nCryptographic Primitives\n========================\n\nAlgorand Smart Contracts support 4 cryptographic primitives, including 3 cryptographic\nhash functions and 1 digital signature verification. Each of these cryptographic\nprimitives is associated with a cost, which is a number indicating its relative performance\noverhead comparing with simple TEAL operations such as addition and substraction.\nSimple TEAL opcodes have cost `1`, and more advanced cryptographic operations have a larger\ncost. Below is how you express cryptographic primitives in PyTeal:\n\n\n=============================== ========= ========================================================================================\nOperator Cost Description\n=============================== ========= ========================================================================================\n:code:`Sha256(e)` `35` `SHA-256` hash function, produces 32 bytes\n:code:`Keccak256(e)` `130` `Keccak-256` hash funciton, produces 32 bytes\n:code:`Sha512_256(e)` `45` `SHA-512/256` hash function, produces 32 bytes\n:code:`Ed25519Verify(d, s, p)` `1900`\\* `1` if :code:`s` is the signature of :code:`d` signed by private key :code:`p`, else `0`\n=============================== ========= ========================================================================================\n\n\\* :code:`Ed25519Verify` is only available in signature mode.\n\nNote the cost amount is accurate for version 2 of TEAL and higher.\n\nThese cryptographic primitives cover the most used ones in blockchains and cryptocurrencies. For example, Bitcoin uses `SHA-256` for creating Bitcoin addresses;\nAlogrand uses `ed25519` signature scheme for authorization and uses `SHA-512/256` hash function for\ncreating contract account addresses from TEAL bytecode.\n"
},
{
"alpha_fraction": 0.7483841180801392,
"alphanum_fraction": 0.7548476457595825,
"avg_line_length": 33.380950927734375,
"blob_id": "1374e9270904cf4ab8fae3e09eadf3c0aa26c592",
"content_id": "a3442d2520710534228bdedc41a8f7e6fd4ff60b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2166,
"license_type": "permissive",
"max_line_length": 157,
"num_lines": 63,
"path": "/README.md",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "\n\n\n# PyTeal: Algorand Smart Contracts in Python\n\n[](https://travis-ci.com/algorand/pyteal)\n[](https://badge.fury.io/py/pyteal)\n[](https://pyteal.readthedocs.io/en/latest/?badge=latest)\n[](https://github.com/psf/black)\n\nPyTeal is a Python language binding for [Algorand Smart Contracts (ASC1s)](https://developer.algorand.org/docs/features/asc1/). \n\nAlgorand Smart Contracts are implemented using a new language that is stack-based, \ncalled [Transaction Execution Approval Language (TEAL)](https://developer.algorand.org/docs/features/asc1/teal/). \n\nHowever, TEAL is essentially an assembly language. With PyTeal, developers can express smart contract logic purely using Python. \nPyTeal provides high level, functional programming style abstractions over TEAL and does type checking at construction time.\n\n### Install \n\nPyTeal requires Python version >= 3.6.\n\n#### Recommended: Install from PyPi\n\nInstall the latest official release from PyPi:\n\n* `pip install pyteal`\n\n#### Install Latest Commit\n\nIf needed, it's possible to install directly from the latest commit on master to use unreleased features:\n\n> **WARNING:** Unreleased code is experimental and may not be backwards compatible or function properly. Use extreme caution when installing PyTeal this way.\n\n* `pip install git+https://github.com/algorand/pyteal`\n\n### Documentation\n\n[PyTeal Docs](https://pyteal.readthedocs.io/)\n\n### Development Setup\n\nSetup venv (one time):\n * `python3 -m venv venv`\n\nActive venv:\n * `. venv/bin/activate` (if your shell is bash/zsh)\n * `. venv/bin/activate.fish` (if your shell is fish)\n\nPip install PyTeal in editable state\n * `pip install -e .`\n\nInstall dependencies:\n* `pip install -r requirements.txt`\n \nType checking using mypy:\n* `mypy pyteal`\n\nRun tests:\n* `pytest`\n\nFormat code:\n* `black .`\n"
},
{
"alpha_fraction": 0.6702600717544556,
"alphanum_fraction": 0.6714578866958618,
"avg_line_length": 36.46154022216797,
"blob_id": "70165a0c8e542cd70d8839e8ed0fd1677c0a4e9c",
"content_id": "4ba229db84f730f98c2820cc360615d77e26d7bf",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5844,
"license_type": "permissive",
"max_line_length": 137,
"num_lines": 156,
"path": "/pyteal/ast/itxn.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import Dict, Tuple, TYPE_CHECKING\n\nfrom ..types import TealType, require_type\nfrom ..errors import TealInputError, verifyTealVersion\nfrom ..ir import TealOp, Op, TealBlock\nfrom .expr import Expr\nfrom .txn import TxnField, TxnExprBuilder, TxnaExprBuilder, TxnObject\nfrom .seq import Seq\n\nif TYPE_CHECKING:\n from ..compiler import CompileOptions\n\n\nclass InnerTxnActionExpr(Expr):\n def __init__(self, begin: bool) -> None:\n super().__init__()\n self.begin = begin\n\n def __str__(self):\n return \"(InnerTxn{})\".format(\"Begin\" if self.begin else \"Submit\")\n\n def __teal__(self, options: \"CompileOptions\"):\n op = Op.itxn_begin if self.begin else Op.itxn_submit\n\n verifyTealVersion(\n op.min_version,\n options.version,\n \"TEAL version too low to create inner transactions\",\n )\n\n return TealBlock.FromOp(options, TealOp(self, op))\n\n def type_of(self):\n return TealType.none\n\n def has_return(self):\n return False\n\n\nclass InnerTxnFieldExpr(Expr):\n def __init__(self, field: TxnField, value: Expr) -> None:\n super().__init__()\n if field.is_array:\n raise TealInputError(\"Unexpected array field: {}\".format(field))\n require_type(value.type_of(), field.type_of())\n self.field = field\n self.value = value\n\n def __str__(self):\n return \"(InnerTxnSetField {} {})\".format(self.field.arg_name, self.value)\n\n def __teal__(self, options: \"CompileOptions\"):\n verifyTealVersion(\n Op.itxn_field.min_version,\n options.version,\n \"TEAL version too low to create inner transactions\",\n )\n\n return TealBlock.FromOp(\n options, TealOp(self, Op.itxn_field, self.field.arg_name), self.value\n )\n\n def type_of(self):\n return TealType.none\n\n def has_return(self):\n return False\n\n\nclass InnerTxnBuilder:\n \"\"\"This class represents expressions used to create, modify, and submit inner transactions.\n\n Inner transactions are transactions which applications can dynamically create. Each inner\n transaction will appear as a transaction inside of the current transaction being executed.\n\n As of TEAL version 5, only the transaction types :any:`TxnType.Payment`, :any:`TxnType.AssetTransfer`,\n :any:`TxnType.AssetConfig`, and :any:`TxnType.AssetFreeze` are allowed. Additionally, not all\n fields are allowed to be set. For example, it is not currently allowed to set the rekeyTo field\n of an inner transaction.\n \"\"\"\n\n @classmethod\n def Begin(cls) -> Expr:\n \"\"\"Begin preparation of a new inner transaction.\n\n This new inner transaction is initialized with its sender to the application address (:any:`Global.current_application_address`);\n fee to the minimum allowable, taking into account :code:`MinTxnFee` and credit from\n overpaying in earlier transactions; :code:`FirstValid`/:code:`LastValid` to the values in\n the top-level transaction, and all other fields to zero values.\n\n Requires TEAL version 5 or higher. This operation is only permitted in application mode.\n \"\"\"\n return InnerTxnActionExpr(True)\n\n @classmethod\n def Submit(cls) -> Expr:\n \"\"\"Execute the current inner transaction.\n\n :any:`InnerTxnBuilder.Begin` and :any:`InnerTxnBuilder.SetField` must be called before\n submitting an inner transaction.\n\n This will fail fail if 16 inner transactions have already been executed, or if the\n inner transaction itself fails. Upon failure, the current program will immediately exit and\n fail as well.\n\n If the inner transaction is successful, then its effects can be immediately observed by this\n program with stateful expressions such as :any:`Balance`. Additionally, the fields of the\n most recently submitted inner transaction can be examined using the :any:`InnerTxn` object.\n If the inner transaction creates an asset, the new asset ID can be found by looking at\n :any:`InnerTxn.created_asset_id() <TxnObject.created_asset_id>`.\n\n Requires TEAL version 5 or higher. This operation is only permitted in application mode.\n \"\"\"\n return InnerTxnActionExpr(False)\n\n @classmethod\n def SetField(cls, field: TxnField, value: Expr) -> Expr:\n \"\"\"Set a field of the current inner transaction.\n\n :any:`InnerTxnBuilder.Begin` must be called before setting any fields on an inner\n transaction.\n\n Requires TEAL version 5 or higher. This operation is only permitted in application mode.\n\n Args:\n field: The field to set on the inner transaction.\n value: The value to that the field should take. This must evaluate to a type that is\n compatible with the field being set.\n \"\"\"\n return InnerTxnFieldExpr(field, value)\n\n @classmethod\n def SetFields(cls, fields: Dict[TxnField, Expr]) -> Expr:\n \"\"\"Set multiple fields of the current inner transaction.\n\n :any:`InnerTxnBuilder.Begin` must be called before setting any fields on an inner\n transaction.\n\n Requires TEAL version 5 or higher. This operation is only permitted in application mode.\n\n Args:\n fields: A dictionary whose keys are fields to set and whose values are the value each\n field should take. Each value must evaluate to a type that is compatible with the\n field being set.\n \"\"\"\n fieldsToSet = [cls.SetField(field, value) for field, value in fields.items()]\n return Seq(fieldsToSet)\n\n\nInnerTxnBuilder.__module__ = \"pyteal\"\n\nInnerTxn: TxnObject = TxnObject(\n TxnExprBuilder(Op.itxn, \"InnerTxn\"), TxnaExprBuilder(Op.itxna, None, \"InnerTxna\")\n)\n\nInnerTxn.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.5998585820198059,
"alphanum_fraction": 0.6161187887191772,
"avg_line_length": 24.718181610107422,
"blob_id": "fee9ba0ddf5b2622bd47cabfe6ccdaca6ead477e",
"content_id": "0487a558f3908e840b8ea23388ea84a76757c82c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2829,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 110,
"path": "/pyteal/ast/seq_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_seq_zero():\n for expr in (Seq(), Seq([])):\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n expected = TealSimpleBlock([])\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_seq_one():\n items = [Int(0)]\n expr = Seq(items)\n assert expr.type_of() == TealType.uint64\n\n expected, _ = items[0].__teal__(options)\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_seq_two():\n items = [App.localPut(Int(0), Bytes(\"key\"), Int(1)), Int(7)]\n expr = Seq(items)\n assert expr.type_of() == items[-1].type_of()\n\n expected, first_end = items[0].__teal__(options)\n first_end.setNextBlock(items[1].__teal__(options)[0])\n expected.addIncoming()\n expected = TealBlock.NormalizeBlocks(expected)\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_seq_three():\n items = [\n App.localPut(Int(0), Bytes(\"key1\"), Int(1)),\n App.localPut(Int(1), Bytes(\"key2\"), Bytes(\"value2\")),\n Pop(Bytes(\"end\")),\n ]\n expr = Seq(items)\n assert expr.type_of() == items[-1].type_of()\n\n expected, first_end = items[0].__teal__(options)\n second_start, second_end = items[1].__teal__(options)\n first_end.setNextBlock(second_start)\n third_start, _ = items[2].__teal__(options)\n second_end.setNextBlock(third_start)\n\n expected.addIncoming()\n expected = TealBlock.NormalizeBlocks(expected)\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_seq_has_return():\n exprWithReturn = Seq([App.localPut(Int(0), Bytes(\"key1\"), Int(1)), Return(Int(1))])\n assert exprWithReturn.has_return()\n\n exprWithoutReturn = Seq([App.localPut(Int(0), Bytes(\"key1\"), Int(1)), Int(1)])\n assert not exprWithoutReturn.has_return()\n\n\ndef test_seq_invalid():\n with pytest.raises(TealTypeError):\n Seq([Int(1), Pop(Int(2))])\n\n with pytest.raises(TealTypeError):\n Seq([Int(1), Int(2)])\n\n with pytest.raises(TealTypeError):\n Seq([Seq([Pop(Int(1)), Int(2)]), Int(3)])\n\n\ndef test_seq_overloads_equivalence():\n items = [\n App.localPut(Int(0), Bytes(\"key1\"), Int(1)),\n App.localPut(Int(1), Bytes(\"key2\"), Bytes(\"value2\")),\n Pop(Bytes(\"end\")),\n ]\n expr1 = Seq(items)\n expr2 = Seq(*items)\n\n expected = expr1.__teal__(options)\n actual = expr2.__teal__(options)\n\n assert actual == expected\n"
},
{
"alpha_fraction": 0.6313579082489014,
"alphanum_fraction": 0.6430237889289856,
"avg_line_length": 30.74814796447754,
"blob_id": "392ba1874dadff09bc182436153f04f2e3114560",
"content_id": "b52a0f092b75f24eb67a43d8caeb85b06820a5ed",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4286,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 135,
"path": "/pyteal/ast/global_.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import TYPE_CHECKING\nfrom enum import Enum\n\nfrom ..types import TealType\nfrom ..errors import verifyFieldVersion\nfrom ..ir import TealOp, Op, TealBlock\nfrom .leafexpr import LeafExpr\n\nif TYPE_CHECKING:\n from ..compiler import CompileOptions\n\n\nclass GlobalField(Enum):\n min_txn_fee = (0, \"MinTxnFee\", TealType.uint64, 2)\n min_balance = (1, \"MinBalance\", TealType.uint64, 2)\n max_txn_life = (2, \"MaxTxnLife\", TealType.uint64, 2)\n zero_address = (3, \"ZeroAddress\", TealType.bytes, 2)\n group_size = (4, \"GroupSize\", TealType.uint64, 2)\n logic_sig_version = (5, \"LogicSigVersion\", TealType.uint64, 2)\n round = (6, \"Round\", TealType.uint64, 2)\n latest_timestamp = (7, \"LatestTimestamp\", TealType.uint64, 2)\n current_app_id = (8, \"CurrentApplicationID\", TealType.uint64, 2)\n creator_address = (9, \"CreatorAddress\", TealType.bytes, 3)\n current_app_address = (10, \"CurrentApplicationAddress\", TealType.bytes, 5)\n group_id = (11, \"GroupID\", TealType.bytes, 5)\n\n def __init__(self, id: int, name: str, type: TealType, min_version: int) -> None:\n self.id = id\n self.arg_name = name\n self.ret_type = type\n self.min_version = min_version\n\n def type_of(self) -> TealType:\n return self.ret_type\n\n\nGlobalField.__module__ = \"pyteal\"\n\n\nclass Global(LeafExpr):\n \"\"\"An expression that accesses a global property.\"\"\"\n\n def __init__(self, field: GlobalField) -> None:\n super().__init__()\n self.field = field\n\n def __teal__(self, options: \"CompileOptions\"):\n verifyFieldVersion(self.field.arg_name, self.field.min_version, options.version)\n\n op = TealOp(self, Op.global_, self.field.arg_name)\n return TealBlock.FromOp(options, op)\n\n def __str__(self):\n return \"(Global {})\".format(self.field.arg_name)\n\n def type_of(self):\n return self.field.type_of()\n\n @classmethod\n def min_txn_fee(cls) -> \"Global\":\n \"\"\"Get the minumum transaction fee in micro Algos.\"\"\"\n return cls(GlobalField.min_txn_fee)\n\n @classmethod\n def min_balance(cls) -> \"Global\":\n \"\"\"Get the minumum balance in micro Algos.\"\"\"\n return cls(GlobalField.min_balance)\n\n @classmethod\n def max_txn_life(cls) -> \"Global\":\n \"\"\"Get the maximum number of rounds a transaction can have.\"\"\"\n return cls(GlobalField.max_txn_life)\n\n @classmethod\n def zero_address(cls) -> \"Global\":\n \"\"\"Get the 32 byte zero address.\"\"\"\n return cls(GlobalField.zero_address)\n\n @classmethod\n def group_size(cls) -> \"Global\":\n \"\"\"Get the number of transactions in this atomic transaction group.\n\n This will be at least 1.\n \"\"\"\n return cls(GlobalField.group_size)\n\n @classmethod\n def logic_sig_version(cls) -> \"Global\":\n \"\"\"Get the maximum supported TEAL version.\"\"\"\n return cls(GlobalField.logic_sig_version)\n\n @classmethod\n def round(cls) -> \"Global\":\n \"\"\"Get the current round number.\"\"\"\n return cls(GlobalField.round)\n\n @classmethod\n def latest_timestamp(cls) -> \"Global\":\n \"\"\"Get the latest confirmed block UNIX timestamp.\n\n Fails if negative.\"\"\"\n return cls(GlobalField.latest_timestamp)\n\n @classmethod\n def current_application_id(cls) -> \"Global\":\n \"\"\"Get the ID of the current application executing.\n\n Fails during Signature mode.\"\"\"\n return cls(GlobalField.current_app_id)\n\n @classmethod\n def creator_address(cls) -> \"Global\":\n \"\"\"Address of the creator of the current application.\n\n Fails during Signature mode. Requires TEAL version 3 or higher.\"\"\"\n return cls(GlobalField.creator_address)\n\n @classmethod\n def current_application_address(cls) -> \"Global\":\n \"\"\"Get the address of that the current application controls.\n\n Fails during Signature mode. Requires TEAL version 5 or higher.\"\"\"\n return cls(GlobalField.current_app_address)\n\n @classmethod\n def group_id(cls) -> \"Global\":\n \"\"\"Get the ID of the current transaction group.\n\n If the current transaction is not part of a group, this will return 32 zero bytes.\n\n Requires TEAL version 5 or higher.\"\"\"\n return cls(GlobalField.group_id)\n\n\nGlobal.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.6111946105957031,
"alphanum_fraction": 0.6160781383514404,
"avg_line_length": 37.02857208251953,
"blob_id": "71c5fabf8a5cf91ab29fb2fc68b26695afa6fd13",
"content_id": "39322e8428f357801b23eabc3415fc32401575b9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2662,
"license_type": "permissive",
"max_line_length": 103,
"num_lines": 70,
"path": "/pyteal/ast/gload.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import cast, Union, TYPE_CHECKING\n\nfrom ..types import TealType, require_type\nfrom ..ir import TealOp, Op, TealBlock\nfrom ..errors import TealInputError, verifyTealVersion\nfrom ..config import MAX_GROUP_SIZE, NUM_SLOTS\nfrom .expr import Expr\nfrom .int import Int\nfrom .leafexpr import LeafExpr\n\nif TYPE_CHECKING:\n from ..compiler import CompileOptions\n\n\nclass ImportScratchValue(LeafExpr):\n \"\"\"An expression to load a scratch value created by another transaction in the current group\"\"\"\n\n def __init__(self, txnIndex: Union[int, Expr], slotId: int) -> None:\n \"\"\"Create an expression to load a scratch space slot from a transaction in the current group.\n\n Requires TEAL version 4 or higher. This operation is only permitted in application mode.\n\n Args:\n txnIndex: The index of the transaction from which the created ID should be obtained.\n This index may be a Python int, or it may be a PyTeal expression that evaluates at\n runtime. If it's an expression, it must evaluate to a uint64. In all cases, the index\n must be less than the index of the current transaction.\n slotId: The index of the scratch slot that should be loaded. The index must be a Python int\n in the range [0-256).\n \"\"\"\n super().__init__()\n if type(txnIndex) is int:\n if txnIndex < 0 or txnIndex >= MAX_GROUP_SIZE:\n raise TealInputError(\n \"Invalid transaction index {}, shoud be in [0, {})\".format(\n txnIndex, MAX_GROUP_SIZE\n )\n )\n else:\n require_type(cast(Expr, txnIndex).type_of(), TealType.uint64)\n if slotId < 0 or slotId >= NUM_SLOTS:\n raise TealInputError(\n \"Invalid slot ID {}, shoud be in [0, {})\".format(slotId, NUM_SLOTS)\n )\n\n self.txnIndex = txnIndex\n self.slotId = slotId\n\n def __str__(self) -> str:\n return \"(Gload {} {})\".format(self.txnIndex, self.slotId)\n\n def __teal__(self, options: \"CompileOptions\"):\n verifyTealVersion(\n Op.gload.min_version,\n options.version,\n \"TEAL version too low to use Gload expression\",\n )\n\n if type(self.txnIndex) is int:\n op = TealOp(self, Op.gload, self.txnIndex, self.slotId)\n return TealBlock.FromOp(options, op)\n\n op = TealOp(self, Op.gloads, self.slotId)\n return TealBlock.FromOp(options, op, cast(Expr, self.txnIndex))\n\n def type_of(self):\n return TealType.anytype\n\n\nImportScratchValue.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.5679202079772949,
"alphanum_fraction": 0.5713737607002258,
"avg_line_length": 29.658823013305664,
"blob_id": "f48203ed3011d743e52b69c971a3065f3447527b",
"content_id": "8768e4a7c31de1b296e0acf51392727df448ff23",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2606,
"license_type": "permissive",
"max_line_length": 101,
"num_lines": 85,
"path": "/pyteal/ast/seq.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import List, cast, TYPE_CHECKING, overload\n\nfrom ..types import TealType, require_type\nfrom ..errors import TealInputError\nfrom ..ir import TealSimpleBlock\nfrom .expr import Expr\n\nif TYPE_CHECKING:\n from ..compiler import CompileOptions\n\n\nclass Seq(Expr):\n \"\"\"A control flow expression to represent a sequence of expressions.\"\"\"\n\n @overload\n def __init__(self, *exprs: Expr):\n ...\n\n @overload\n def __init__(self, exprs: List[Expr]):\n ...\n\n def __init__(self, *exprs):\n \"\"\"Create a new Seq expression.\n\n The new Seq expression will take on the return value of the final expression in the sequence.\n\n Args:\n exprs: The expressions to include in this sequence. All expressions that are not the\n final one in this list must not return any values.\n\n Example:\n .. code-block:: python\n\n Seq([\n App.localPut(Bytes(\"key\"), Bytes(\"value\")),\n Int(1)\n ])\n \"\"\"\n super().__init__()\n\n # Handle case where a list of expressions is provided\n if len(exprs) == 1 and isinstance(exprs[0], list):\n exprs = exprs[0]\n\n for i, expr in enumerate(exprs):\n if not isinstance(expr, Expr):\n raise TealInputError(\"{} is not a pyteal expression.\".format(expr))\n if i + 1 < len(exprs):\n require_type(expr.type_of(), TealType.none)\n\n self.args = exprs\n\n def __teal__(self, options: \"CompileOptions\"):\n start = TealSimpleBlock([])\n end = start\n for arg in self.args:\n argStart, argEnd = arg.__teal__(options)\n end.setNextBlock(argStart)\n end = argEnd\n return start, end\n\n def __str__(self):\n ret_str = \"(Seq\"\n for a in self.args:\n ret_str += \" \" + a.__str__()\n ret_str += \")\"\n return ret_str\n\n def type_of(self):\n if len(self.args) == 0:\n return TealType.none\n return self.args[-1].type_of()\n\n def has_return(self):\n # this expression declares it has a return op only if its final expression has a return op\n # TODO: technically if ANY expression, not just the final one, returns true for has_return,\n # this could return true as well. But in that case all expressions after the one that\n # returns true for has_return is dead code, so it could be optimized away\n if len(self.args) == 0:\n return False\n return self.args[-1].has_return()\n\n\nSeq.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.6411009430885315,
"alphanum_fraction": 0.6480733752250671,
"avg_line_length": 24.70754623413086,
"blob_id": "7cb3d3a87c84d95db1506cee69bf6396941382ee",
"content_id": "a8ddbc9e3605eed51e484fa09e98dc656afee878",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2725,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 106,
"path": "/pyteal/ast/scratchvar_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_scratchvar_type():\n myvar_default = ScratchVar()\n assert myvar_default.storage_type() == TealType.anytype\n assert myvar_default.store(Bytes(\"value\")).type_of() == TealType.none\n assert myvar_default.load().type_of() == TealType.anytype\n\n with pytest.raises(TealTypeError):\n myvar_default.store(Pop(Int(1)))\n\n myvar_int = ScratchVar(TealType.uint64)\n assert myvar_int.storage_type() == TealType.uint64\n assert myvar_int.store(Int(1)).type_of() == TealType.none\n assert myvar_int.load().type_of() == TealType.uint64\n\n with pytest.raises(TealTypeError):\n myvar_int.store(Bytes(\"value\"))\n\n with pytest.raises(TealTypeError):\n myvar_int.store(Pop(Int(1)))\n\n myvar_bytes = ScratchVar(TealType.bytes)\n assert myvar_bytes.storage_type() == TealType.bytes\n assert myvar_bytes.store(Bytes(\"value\")).type_of() == TealType.none\n assert myvar_bytes.load().type_of() == TealType.bytes\n\n with pytest.raises(TealTypeError):\n myvar_bytes.store(Int(0))\n\n with pytest.raises(TealTypeError):\n myvar_bytes.store(Pop(Int(1)))\n\n\ndef test_scratchvar_store():\n myvar = ScratchVar(TealType.bytes)\n arg = Bytes(\"value\")\n expr = myvar.store(arg)\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.byte, '\"value\"'),\n TealOp(expr, Op.store, myvar.slot),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_scratchvar_load():\n myvar = ScratchVar()\n expr = myvar.load()\n\n expected = TealSimpleBlock([TealOp(expr, Op.load, myvar.slot)])\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_scratchvar_assign_store():\n slotId = 2\n myvar = ScratchVar(TealType.uint64, slotId)\n arg = Int(10)\n expr = myvar.store(arg)\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 10),\n TealOp(expr, Op.store, myvar.slot),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_scratchvar_assign_load():\n slotId = 5\n myvar = ScratchVar(slotId=slotId)\n expr = myvar.load()\n\n expected = TealSimpleBlock([TealOp(expr, Op.load, myvar.slot)])\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n"
},
{
"alpha_fraction": 0.589958131313324,
"alphanum_fraction": 0.6150627732276917,
"avg_line_length": 19.340425491333008,
"blob_id": "4c97ad710a4ea5587e648b761d2fea2daf4cb566",
"content_id": "c665f2ff181d4ea5c2f69a551872246df7f03ddd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 956,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 47,
"path": "/pyteal/ast/int_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_int():\n values = [0, 1, 8, 232323, 2 ** 64 - 1]\n\n for value in values:\n expr = Int(value)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(expr, Op.int, value)])\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_int_invalid():\n with pytest.raises(TealInputError):\n Int(6.7)\n\n with pytest.raises(TealInputError):\n Int(-1)\n\n with pytest.raises(TealInputError):\n Int(2 ** 64)\n\n with pytest.raises(TealInputError):\n Int(\"0\")\n\n\ndef test_enum_int():\n expr = EnumInt(\"OptIn\")\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(expr, Op.int, \"OptIn\")])\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n"
},
{
"alpha_fraction": 0.6003431677818298,
"alphanum_fraction": 0.6158421635627747,
"avg_line_length": 31.082569122314453,
"blob_id": "f9cd0f7d07f894f71e1ff205de36e9f5b6ed3d70",
"content_id": "792371cb8387c10468bc185b97f4e1eb0c2d5d36",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 17485,
"license_type": "permissive",
"max_line_length": 88,
"num_lines": 545,
"path": "/pyteal/compiler/flatten_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from collections import OrderedDict\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom ..ast import *\n\nfrom .flatten import flattenBlocks, flattenSubroutines\n\n\ndef test_flattenBlocks_none():\n blocks = []\n\n expected = []\n actual = flattenBlocks(blocks)\n\n assert actual == expected\n\n\ndef test_flattenBlocks_single_empty():\n blocks = [TealSimpleBlock([])]\n\n expected = []\n actual = flattenBlocks(blocks)\n\n assert actual == expected\n\n\ndef test_flattenBlocks_single_one():\n blocks = [TealSimpleBlock([TealOp(None, Op.int, 1)])]\n\n expected = [TealOp(None, Op.int, 1)]\n actual = flattenBlocks(blocks)\n\n assert actual == expected\n\n\ndef test_flattenBlocks_single_many():\n blocks = [\n TealSimpleBlock(\n [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.add),\n TealOp(None, Op.add),\n ]\n )\n ]\n\n expected = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.add),\n TealOp(None, Op.add),\n ]\n actual = flattenBlocks(blocks)\n\n assert actual == expected\n\n\ndef test_flattenBlocks_sequence():\n block5 = TealSimpleBlock([TealOp(None, Op.int, 5)])\n block4 = TealSimpleBlock([TealOp(None, Op.int, 4)])\n block4.setNextBlock(block5)\n block3 = TealSimpleBlock([TealOp(None, Op.int, 3)])\n block3.setNextBlock(block4)\n block2 = TealSimpleBlock([TealOp(None, Op.int, 2)])\n block2.setNextBlock(block3)\n block1 = TealSimpleBlock([TealOp(None, Op.int, 1)])\n block1.setNextBlock(block2)\n block1.addIncoming()\n block1.validateTree()\n blocks = [block1, block2, block3, block4, block5]\n\n expected = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.int, 4),\n TealOp(None, Op.int, 5),\n ]\n actual = flattenBlocks(blocks)\n\n assert actual == expected\n\n\ndef test_flattenBlocks_branch():\n blockTrue = TealSimpleBlock(\n [TealOp(None, Op.byte, '\"true\"'), TealOp(None, Op.return_)]\n )\n blockFalse = TealSimpleBlock(\n [TealOp(None, Op.byte, '\"false\"'), TealOp(None, Op.return_)]\n )\n block = TealConditionalBlock([TealOp(None, Op.int, 1)])\n block.setTrueBlock(blockTrue)\n block.setFalseBlock(blockFalse)\n block.addIncoming()\n block.validateTree()\n blocks = [block, blockFalse, blockTrue]\n\n expected = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.bnz, LabelReference(\"l2\")),\n TealOp(None, Op.byte, '\"false\"'),\n TealOp(None, Op.return_),\n TealLabel(None, LabelReference(\"l2\")),\n TealOp(None, Op.byte, '\"true\"'),\n TealOp(None, Op.return_),\n ]\n actual = flattenBlocks(blocks)\n\n assert actual == expected\n\n\ndef test_flattenBlocks_branch_equal_end_nodes():\n blockTrueEnd = TealSimpleBlock([TealOp(None, Op.return_)])\n blockTrue = TealSimpleBlock([TealOp(None, Op.byte, '\"true\"')])\n blockTrue.setNextBlock(blockTrueEnd)\n blockFalseEnd = TealSimpleBlock([TealOp(None, Op.return_)])\n blockFalse = TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')])\n blockFalse.setNextBlock(blockFalseEnd)\n block = TealConditionalBlock([TealOp(None, Op.int, 1)])\n block.setTrueBlock(blockTrue)\n block.setFalseBlock(blockFalse)\n block.addIncoming()\n block.validateTree()\n blocks = [block, blockFalse, blockFalseEnd, blockTrue, blockTrueEnd]\n\n expected = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.bnz, LabelReference(\"l3\")),\n TealOp(None, Op.byte, '\"false\"'),\n TealOp(None, Op.return_),\n TealLabel(None, LabelReference(\"l3\")),\n TealOp(None, Op.byte, '\"true\"'),\n TealOp(None, Op.return_),\n ]\n actual = flattenBlocks(blocks)\n\n assert actual == expected\n\n\ndef test_flattenBlocks_branch_converge():\n blockEnd = TealSimpleBlock([TealOp(None, Op.return_)])\n blockTrue = TealSimpleBlock([TealOp(None, Op.byte, '\"true\"')])\n blockTrue.setNextBlock(blockEnd)\n blockFalse = TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')])\n blockFalse.setNextBlock(blockEnd)\n block = TealConditionalBlock([TealOp(None, Op.int, 1)])\n block.setTrueBlock(blockTrue)\n block.setFalseBlock(blockFalse)\n block.addIncoming()\n block.validateTree()\n blocks = [block, blockFalse, blockTrue, blockEnd]\n\n expected = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.bnz, LabelReference(\"l2\")),\n TealOp(None, Op.byte, '\"false\"'),\n TealOp(None, Op.b, LabelReference(\"l3\")),\n TealLabel(None, LabelReference(\"l2\")),\n TealOp(None, Op.byte, '\"true\"'),\n TealLabel(None, LabelReference(\"l3\")),\n TealOp(None, Op.return_),\n ]\n actual = flattenBlocks(blocks)\n\n assert actual == expected\n\n\ndef test_flattenBlocks_multiple_branch():\n blockTrueTrue = TealSimpleBlock(\n [TealOp(None, Op.byte, '\"true true\"'), TealOp(None, Op.return_)]\n )\n blockTrueFalse = TealSimpleBlock(\n [TealOp(None, Op.byte, '\"true false\"'), TealOp(None, Op.err)]\n )\n blockTrueBranch = TealConditionalBlock([])\n blockTrueBranch.setTrueBlock(blockTrueTrue)\n blockTrueBranch.setFalseBlock(blockTrueFalse)\n blockTrue = TealSimpleBlock([TealOp(None, Op.byte, '\"true\"')])\n blockTrue.setNextBlock(blockTrueBranch)\n blockFalse = TealSimpleBlock(\n [TealOp(None, Op.byte, '\"false\"'), TealOp(None, Op.return_)]\n )\n block = TealConditionalBlock([TealOp(None, Op.int, 1)])\n block.setTrueBlock(blockTrue)\n block.setFalseBlock(blockFalse)\n block.addIncoming()\n block.validateTree()\n blocks = [\n block,\n blockFalse,\n blockTrue,\n blockTrueBranch,\n blockTrueFalse,\n blockTrueTrue,\n ]\n\n expected = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.bnz, LabelReference(\"l2\")),\n TealOp(None, Op.byte, '\"false\"'),\n TealOp(None, Op.return_),\n TealLabel(None, LabelReference(\"l2\")),\n TealOp(None, Op.byte, '\"true\"'),\n TealOp(None, Op.bnz, LabelReference(\"l5\")),\n TealOp(None, Op.byte, '\"true false\"'),\n TealOp(None, Op.err),\n TealLabel(None, LabelReference(\"l5\")),\n TealOp(None, Op.byte, '\"true true\"'),\n TealOp(None, Op.return_),\n ]\n actual = flattenBlocks(blocks)\n\n assert actual == expected\n\n\ndef test_flattenBlocks_multiple_branch_converge():\n blockEnd = TealSimpleBlock([TealOp(None, Op.return_)])\n blockTrueTrue = TealSimpleBlock([TealOp(None, Op.byte, '\"true true\"')])\n blockTrueTrue.setNextBlock(blockEnd)\n blockTrueFalse = TealSimpleBlock(\n [TealOp(None, Op.byte, '\"true false\"'), TealOp(None, Op.err)]\n )\n blockTrueBranch = TealConditionalBlock([])\n blockTrueBranch.setTrueBlock(blockTrueTrue)\n blockTrueBranch.setFalseBlock(blockTrueFalse)\n blockTrue = TealSimpleBlock([TealOp(None, Op.byte, '\"true\"')])\n blockTrue.setNextBlock(blockTrueBranch)\n blockFalse = TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')])\n blockFalse.setNextBlock(blockEnd)\n block = TealConditionalBlock([TealOp(None, Op.int, 1)])\n block.setTrueBlock(blockTrue)\n block.setFalseBlock(blockFalse)\n block.addIncoming()\n block.validateTree()\n blocks = [\n block,\n blockFalse,\n blockTrue,\n blockTrueBranch,\n blockTrueFalse,\n blockTrueTrue,\n blockEnd,\n ]\n\n expected = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.bnz, LabelReference(\"l2\")),\n TealOp(None, Op.byte, '\"false\"'),\n TealOp(None, Op.b, LabelReference(\"l6\")),\n TealLabel(None, LabelReference(\"l2\")),\n TealOp(None, Op.byte, '\"true\"'),\n TealOp(None, Op.bnz, LabelReference(\"l5\")),\n TealOp(None, Op.byte, '\"true false\"'),\n TealOp(None, Op.err),\n TealLabel(None, LabelReference(\"l5\")),\n TealOp(None, Op.byte, '\"true true\"'),\n TealLabel(None, LabelReference(\"l6\")),\n TealOp(None, Op.return_),\n ]\n actual = flattenBlocks(blocks)\n\n assert actual == expected\n\n\ndef test_flattenSubroutines_no_subroutines():\n subroutineToLabel = OrderedDict()\n\n l1Label = LabelReference(\"l1\")\n mainOps = [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {None: mainOps}\n\n expectedL1Label = LabelReference(\"main_l1\")\n expected = [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, expectedL1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.return_),\n TealLabel(None, expectedL1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n ]\n\n actual = flattenSubroutines(subroutineMapping, subroutineToLabel)\n\n assert actual == expected\n\n\ndef test_flattenSubroutines_1_subroutine():\n subroutine = SubroutineDefinition(lambda: Int(1) + Int(2) + Int(3), TealType.uint64)\n\n subroutineToLabel = OrderedDict()\n subroutineToLabel[subroutine] = \"sub0\"\n\n subroutineLabel = LabelReference(subroutineToLabel[subroutine])\n subroutineOps = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.add),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n ]\n\n l1Label = LabelReference(\"l1\")\n mainOps = [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.callsub, subroutineLabel),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {None: mainOps, subroutine: subroutineOps}\n\n expectedL1Label = LabelReference(\"main_l1\")\n expectedSubroutineLabel = LabelReference(\"sub0\")\n expected = [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, expectedL1Label),\n TealOp(None, Op.callsub, expectedSubroutineLabel),\n TealOp(None, Op.return_),\n TealLabel(None, expectedL1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n TealLabel(None, expectedSubroutineLabel, \"<lambda>\"),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.add),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n ]\n\n actual = flattenSubroutines(subroutineMapping, subroutineToLabel)\n\n assert actual == expected\n\n\ndef test_flattenSubroutines_multiple_subroutines():\n def sub1Impl():\n return None\n\n def sub2Impl(a1):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub2Impl, TealType.bytes)\n subroutine3 = SubroutineDefinition(sub3Impl, TealType.none)\n\n subroutineToLabel = OrderedDict()\n subroutineToLabel[subroutine1] = \"sub0\"\n subroutineToLabel[subroutine2] = \"sub1\"\n subroutineToLabel[subroutine3] = \"sub2\"\n\n subroutine1Label = LabelReference(subroutineToLabel[subroutine1])\n subroutine1Ops = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.add),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n ]\n\n subroutine2Label = LabelReference(subroutineToLabel[subroutine2])\n subroutine2L1Label = LabelReference(\"l1\")\n subroutine2L2Label = LabelReference(\"l2\")\n subroutine2L3Label = LabelReference(\"l3\")\n subroutine2L4Label = LabelReference(\"l4\")\n subroutine2Ops = [\n TealOp(None, Op.dup),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine2L1Label),\n TealOp(None, Op.dup),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine2L2Label),\n TealOp(None, Op.dup),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine2L3Label),\n TealOp(None, Op.dup),\n TealOp(None, Op.int, 4),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine2L4Label),\n TealOp(None, Op.err),\n TealLabel(None, subroutine2L1Label),\n TealOp(None, Op.pop),\n TealOp(None, Op.byte, '\"1\"'),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine2L2Label),\n TealOp(None, Op.pop),\n TealOp(None, Op.byte, '\"2\"'),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine2L3Label),\n TealOp(None, Op.pop),\n TealOp(None, Op.byte, '\"3\"'),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine2L4Label),\n TealOp(None, Op.pop),\n TealOp(None, Op.byte, '\"4\"'),\n TealOp(None, Op.retsub),\n ]\n\n subroutine3Label = LabelReference(subroutineToLabel[subroutine3])\n subroutine3L1Label = LabelReference(\"l1\")\n subroutine3Ops = [\n TealLabel(None, subroutine3L1Label),\n TealOp(None, Op.app_local_put),\n TealOp(None, Op.retsub),\n TealOp(None, Op.b, subroutine3L1Label),\n ]\n\n l1Label = LabelReference(\"l1\")\n mainOps = [\n TealOp(None, Op.byte, '\"account\"'),\n TealOp(None, Op.byte, '\"key\"'),\n TealOp(None, Op.byte, '\"value\"'),\n TealOp(None, Op.callsub, subroutine3Label),\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine2Label),\n TealOp(None, Op.pop),\n TealOp(None, Op.callsub, subroutine1Label),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine1: subroutine1Ops,\n subroutine2: subroutine2Ops,\n subroutine3: subroutine3Ops,\n }\n\n expectedL1Label = LabelReference(\"main_l1\")\n expectedSubroutine1Label = LabelReference(\"sub0\")\n expectedSubroutine2Label = LabelReference(\"sub1\")\n expectedSubroutine2L1Label = LabelReference(\"sub1_l1\")\n expectedSubroutine2L2Label = LabelReference(\"sub1_l2\")\n expectedSubroutine2L3Label = LabelReference(\"sub1_l3\")\n expectedSubroutine2L4Label = LabelReference(\"sub1_l4\")\n expectedSubroutine3Label = LabelReference(\"sub2\")\n expectedSubroutine3L1Label = LabelReference(\"sub2_l1\")\n expected = [\n TealOp(None, Op.byte, '\"account\"'),\n TealOp(None, Op.byte, '\"key\"'),\n TealOp(None, Op.byte, '\"value\"'),\n TealOp(None, Op.callsub, subroutine3Label),\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine2Label),\n TealOp(None, Op.pop),\n TealOp(None, Op.callsub, subroutine1Label),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n TealLabel(None, expectedSubroutine1Label, \"sub1Impl\"),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.add),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, expectedSubroutine2Label, \"sub2Impl\"),\n TealOp(None, Op.dup),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, expectedSubroutine2L1Label),\n TealOp(None, Op.dup),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, expectedSubroutine2L2Label),\n TealOp(None, Op.dup),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, expectedSubroutine2L3Label),\n TealOp(None, Op.dup),\n TealOp(None, Op.int, 4),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, expectedSubroutine2L4Label),\n TealOp(None, Op.err),\n TealLabel(None, expectedSubroutine2L1Label),\n TealOp(None, Op.pop),\n TealOp(None, Op.byte, '\"1\"'),\n TealOp(None, Op.retsub),\n TealLabel(None, expectedSubroutine2L2Label),\n TealOp(None, Op.pop),\n TealOp(None, Op.byte, '\"2\"'),\n TealOp(None, Op.retsub),\n TealLabel(None, expectedSubroutine2L3Label),\n TealOp(None, Op.pop),\n TealOp(None, Op.byte, '\"3\"'),\n TealOp(None, Op.retsub),\n TealLabel(None, expectedSubroutine2L4Label),\n TealOp(None, Op.pop),\n TealOp(None, Op.byte, '\"4\"'),\n TealOp(None, Op.retsub),\n TealLabel(None, expectedSubroutine3Label, \"sub3Impl\"),\n TealLabel(None, expectedSubroutine3L1Label),\n TealOp(None, Op.app_local_put),\n TealOp(None, Op.retsub),\n TealOp(None, Op.b, expectedSubroutine3L1Label),\n ]\n\n actual = flattenSubroutines(subroutineMapping, subroutineToLabel)\n\n assert actual == expected\n"
},
{
"alpha_fraction": 0.6159629821777344,
"alphanum_fraction": 0.6451706290245056,
"avg_line_length": 27.344263076782227,
"blob_id": "e2bc63d882c83631de8cca7a43622de050ac274c",
"content_id": "419030e39ff6a991cbc2ef0a7e4fc615f91f1198",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3458,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 122,
"path": "/pyteal/ast/cond_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_cond_one_pred():\n expr = Cond([Int(1), Int(2)])\n assert expr.type_of() == TealType.uint64\n\n cond1, _ = Int(1).__teal__(options)\n pred1, _ = Int(2).__teal__(options)\n cond1Branch = TealConditionalBlock([])\n cond1.setNextBlock(cond1Branch)\n cond1Branch.setTrueBlock(pred1)\n cond1Branch.setFalseBlock(Err().__teal__(options)[0])\n pred1.setNextBlock(TealSimpleBlock([]))\n expected = cond1\n\n actual, _ = expr.__teal__(options)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_cond_two_pred():\n expr = Cond([Int(1), Bytes(\"one\")], [Int(0), Bytes(\"zero\")])\n assert expr.type_of() == TealType.bytes\n\n cond1, _ = Int(1).__teal__(options)\n pred1, _ = Bytes(\"one\").__teal__(options)\n cond1Branch = TealConditionalBlock([])\n cond2, _ = Int(0).__teal__(options)\n pred2, _ = Bytes(\"zero\").__teal__(options)\n cond2Branch = TealConditionalBlock([])\n end = TealSimpleBlock([])\n\n cond1.setNextBlock(cond1Branch)\n cond1Branch.setTrueBlock(pred1)\n cond1Branch.setFalseBlock(cond2)\n pred1.setNextBlock(end)\n\n cond2.setNextBlock(cond2Branch)\n cond2Branch.setTrueBlock(pred2)\n cond2Branch.setFalseBlock(Err().__teal__(options)[0])\n pred2.setNextBlock(end)\n\n expected = cond1\n\n actual, _ = expr.__teal__(options)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_cond_three_pred():\n expr = Cond([Int(1), Int(2)], [Int(3), Int(4)], [Int(5), Int(6)])\n assert expr.type_of() == TealType.uint64\n\n cond1, _ = Int(1).__teal__(options)\n pred1, _ = Int(2).__teal__(options)\n cond1Branch = TealConditionalBlock([])\n cond2, _ = Int(3).__teal__(options)\n pred2, _ = Int(4).__teal__(options)\n cond2Branch = TealConditionalBlock([])\n cond3, _ = Int(5).__teal__(options)\n pred3, _ = Int(6).__teal__(options)\n cond3Branch = TealConditionalBlock([])\n end = TealSimpleBlock([])\n\n cond1.setNextBlock(cond1Branch)\n cond1Branch.setTrueBlock(pred1)\n cond1Branch.setFalseBlock(cond2)\n pred1.setNextBlock(end)\n\n cond2.setNextBlock(cond2Branch)\n cond2Branch.setTrueBlock(pred2)\n cond2Branch.setFalseBlock(cond3)\n pred2.setNextBlock(end)\n\n cond3.setNextBlock(cond3Branch)\n cond3Branch.setTrueBlock(pred3)\n cond3Branch.setFalseBlock(Err().__teal__(options)[0])\n pred3.setNextBlock(end)\n\n expected = cond1\n\n actual, _ = expr.__teal__(options)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_cond_has_return():\n exprWithReturn = Cond([Int(1), Return(Int(1))], [Int(0), Return(Int(0))])\n assert exprWithReturn.has_return()\n\n exprWithoutReturn = Cond([Int(1), Bytes(\"one\")], [Int(0), Bytes(\"zero\")])\n assert not exprWithoutReturn.has_return()\n\n exprSemiReturn = Cond(\n [Int(1), Return(Int(1))], [Int(0), App.globalPut(Bytes(\"key\"), Bytes(\"value\"))]\n )\n assert not exprSemiReturn.has_return()\n\n\ndef test_cond_invalid():\n with pytest.raises(TealInputError):\n Cond()\n\n with pytest.raises(TealInputError):\n Cond([])\n\n with pytest.raises(TealTypeError):\n Cond([Int(1), Int(2)], [Int(2), Txn.receiver()])\n\n with pytest.raises(TealTypeError):\n Cond([Arg(0), Int(2)])\n"
},
{
"alpha_fraction": 0.5119616985321045,
"alphanum_fraction": 0.5741626620292664,
"avg_line_length": 18,
"blob_id": "3cb381e3db030aa2b04b14abe0157a4388247017",
"content_id": "81ac8c7a5863379187b8a2f53c870b004aad314b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 418,
"license_type": "permissive",
"max_line_length": 52,
"num_lines": 22,
"path": "/pyteal/ir/tealcomponent_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n\ndef test_EqualityContext():\n expr1 = Int(1)\n expr2 = Int(1)\n\n op1 = TealOp(expr1, Op.int, 1)\n op2 = TealOp(expr2, Op.int, 1)\n\n assert op1 == op1\n assert op2 == op2\n assert op1 != op2\n assert op2 != op1\n\n with TealComponent.Context.ignoreExprEquality():\n assert op1 == op1\n assert op2 == op2\n assert op1 == op2\n assert op2 == op1\n"
},
{
"alpha_fraction": 0.6299527287483215,
"alphanum_fraction": 0.6478192210197449,
"avg_line_length": 22.728179931640625,
"blob_id": "e71d5073aa650720c5eed39e8d6d4a38df5d9720",
"content_id": "7da54a3a482ec4d73f4e5bb58a5b0bd27b61f2a0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9515,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 401,
"path": "/pyteal/ast/unaryexpr_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\nteal2Options = CompileOptions(version=2)\nteal3Options = CompileOptions(version=3)\nteal4Options = CompileOptions(version=4)\nteal5Options = CompileOptions(version=5)\n\n\ndef test_btoi():\n arg = Arg(1)\n expr = Btoi(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(arg, Op.arg, 1), TealOp(expr, Op.btoi)])\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_btoi_invalid():\n with pytest.raises(TealTypeError):\n Btoi(Int(1))\n\n\ndef test_itob():\n arg = Int(1)\n expr = Itob(arg)\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 1), TealOp(expr, Op.itob)])\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_itob_invalid():\n with pytest.raises(TealTypeError):\n Itob(Arg(1))\n\n\ndef test_len():\n arg = Txn.receiver()\n expr = Len(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(arg, Op.txn, \"Receiver\"), TealOp(expr, Op.len)])\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_len_invalid():\n with pytest.raises(TealTypeError):\n Len(Int(1))\n\n\ndef test_bitlen_int():\n arg = Int(7)\n expr = BitLen(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 7), TealOp(expr, Op.bitlen)])\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_bitlen_bytes():\n arg = Txn.receiver()\n expr = BitLen(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(arg, Op.txn, \"Receiver\"), TealOp(expr, Op.bitlen)]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_sha256():\n arg = Arg(0)\n expr = Sha256(arg)\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock([TealOp(arg, Op.arg, 0), TealOp(expr, Op.sha256)])\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_sha256_invalid():\n with pytest.raises(TealTypeError):\n Sha256(Int(1))\n\n\ndef test_sha512_256():\n arg = Arg(0)\n expr = Sha512_256(arg)\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock([TealOp(arg, Op.arg, 0), TealOp(expr, Op.sha512_256)])\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_sha512_256_invalid():\n with pytest.raises(TealTypeError):\n Sha512_256(Int(1))\n\n\ndef test_keccak256():\n arg = Arg(0)\n expr = Keccak256(arg)\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock([TealOp(arg, Op.arg, 0), TealOp(expr, Op.keccak256)])\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_keccak256_invalid():\n with pytest.raises(TealTypeError):\n Keccak256(Int(1))\n\n\ndef test_not():\n arg = Int(1)\n expr = Not(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 1), TealOp(expr, Op.logic_not)])\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_not_invalid():\n with pytest.raises(TealTypeError):\n Not(Txn.receiver())\n\n\ndef test_bitwise_not():\n arg = Int(2)\n expr = BitwiseNot(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 2), TealOp(expr, Op.bitwise_not)])\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_bitwise_not_overload():\n arg = Int(10)\n expr = ~arg\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 10), TealOp(expr, Op.bitwise_not)])\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_bitwise_not_invalid():\n with pytest.raises(TealTypeError):\n BitwiseNot(Txn.receiver())\n\n\ndef test_sqrt():\n arg = Int(4)\n expr = Sqrt(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 4), TealOp(expr, Op.sqrt)])\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_sqrt_invalid():\n with pytest.raises(TealTypeError):\n Sqrt(Txn.receiver())\n\n\ndef test_pop():\n arg_int = Int(3)\n expr_int = Pop(arg_int)\n assert expr_int.type_of() == TealType.none\n\n expected_int = TealSimpleBlock(\n [TealOp(arg_int, Op.int, 3), TealOp(expr_int, Op.pop)]\n )\n\n actual_int, _ = expr_int.__teal__(teal2Options)\n actual_int.addIncoming()\n actual_int = TealBlock.NormalizeBlocks(actual_int)\n\n assert actual_int == expected_int\n\n arg_bytes = Txn.receiver()\n expr_bytes = Pop(arg_bytes)\n assert expr_bytes.type_of() == TealType.none\n\n expected_bytes = TealSimpleBlock(\n [TealOp(arg_bytes, Op.txn, \"Receiver\"), TealOp(expr_bytes, Op.pop)]\n )\n\n actual_bytes, _ = expr_bytes.__teal__(teal2Options)\n actual_bytes.addIncoming()\n actual_bytes = TealBlock.NormalizeBlocks(actual_bytes)\n\n assert actual_bytes == expected_bytes\n\n\ndef test_pop_invalid():\n expr = Pop(Int(0))\n with pytest.raises(TealTypeError):\n Pop(expr)\n\n\ndef test_balance():\n arg = Int(0)\n expr = Balance(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 0), TealOp(expr, Op.balance)])\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_balance_direct_ref():\n arg = Txn.sender()\n expr = Balance(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(arg, Op.txn, \"Sender\"), TealOp(expr, Op.balance)]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_balance_invalid():\n with pytest.raises(TealTypeError):\n args = [Txn.sender(), Int(17)]\n expr = AssetHolding.balance(args[0], args[1])\n MinBalance(expr)\n\n\ndef test_min_balance():\n arg = Int(0)\n expr = MinBalance(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 0), TealOp(expr, Op.min_balance)])\n\n actual, _ = expr.__teal__(teal3Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_min_balance_direct_ref():\n arg = Txn.sender()\n expr = MinBalance(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [TealOp(arg, Op.txn, \"Sender\"), TealOp(expr, Op.min_balance)]\n )\n\n actual, _ = expr.__teal__(teal3Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_min_balance_invalid():\n with pytest.raises(TealTypeError):\n args = [Txn.sender(), Int(17)]\n expr = AssetHolding.balance(args[0], args[1])\n MinBalance(expr)\n\n\ndef test_b_not():\n arg = Bytes(\"base16\", \"0xFFFFFFFFFFFFFFFFFF\")\n expr = BytesNot(arg)\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [TealOp(arg, Op.byte, \"0xFFFFFFFFFFFFFFFFFF\"), TealOp(expr, Op.b_not)]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_b_not_invalid():\n with pytest.raises(TealTypeError):\n BytesNot(Int(2))\n\n\ndef test_b_zero():\n arg = Int(8)\n expr = BytesZero(arg)\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 8), TealOp(expr, Op.bzero)])\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_b_zero_invalid():\n with pytest.raises(TealTypeError):\n BytesZero(Bytes(\"base16\", \"0x11\"))\n\n\ndef test_log():\n arg = Bytes(\"message\")\n expr = Log(arg)\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n expected = TealSimpleBlock(\n [TealOp(arg, Op.byte, '\"message\"'), TealOp(expr, Op.log)]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal4Options)\n\n\ndef test_log_invalid():\n with pytest.raises(TealTypeError):\n Log(Int(7))\n"
},
{
"alpha_fraction": 0.5968908071517944,
"alphanum_fraction": 0.6048445701599121,
"avg_line_length": 24.61111068725586,
"blob_id": "4eade2959587cfaa10b1927f69e38831eb432443",
"content_id": "9661ef1b52fb1c176d6de0338afeb24f626dd588",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2766,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 108,
"path": "/pyteal/ast/scratch_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_scratch_slot():\n slot = ScratchSlot()\n assert slot == slot\n assert slot.__hash__() == slot.__hash__()\n assert slot != ScratchSlot()\n\n with TealComponent.Context.ignoreExprEquality():\n assert (\n slot.store().__teal__(options)[0]\n == ScratchStackStore(slot).__teal__(options)[0]\n )\n assert (\n slot.store(Int(1)).__teal__(options)[0]\n == ScratchStore(slot, Int(1)).__teal__(options)[0]\n )\n\n assert slot.load().type_of() == TealType.anytype\n assert slot.load(TealType.uint64).type_of() == TealType.uint64\n assert (\n slot.load().__teal__(options)[0] == ScratchLoad(slot).__teal__(options)[0]\n )\n\n\ndef test_scratch_load_default():\n slot = ScratchSlot()\n expr = ScratchLoad(slot)\n assert expr.type_of() == TealType.anytype\n\n expected = TealSimpleBlock([TealOp(expr, Op.load, slot)])\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_scratch_load_type():\n for type in (TealType.uint64, TealType.bytes, TealType.anytype):\n slot = ScratchSlot()\n expr = ScratchLoad(slot, type)\n assert expr.type_of() == type\n\n expected = TealSimpleBlock([TealOp(expr, Op.load, slot)])\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_scratch_store():\n for value in (\n Int(1),\n Bytes(\"test\"),\n App.globalGet(Bytes(\"key\")),\n If(Int(1), Int(2), Int(3)),\n ):\n slot = ScratchSlot()\n expr = ScratchStore(slot, value)\n assert expr.type_of() == TealType.none\n\n expected, valueEnd = value.__teal__(options)\n storeBlock = TealSimpleBlock([TealOp(expr, Op.store, slot)])\n valueEnd.setNextBlock(storeBlock)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_scratch_stack_store():\n slot = ScratchSlot()\n expr = ScratchStackStore(slot)\n assert expr.type_of() == TealType.none\n\n expected = TealSimpleBlock([TealOp(expr, Op.store, slot)])\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_scratch_assign_id():\n slot = ScratchSlot(255)\n expr = ScratchStackStore(slot)\n assert expr.type_of() == TealType.none\n\n expected = TealSimpleBlock([TealOp(expr, Op.store, slot)])\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_scratch_assign_id_invalid():\n with pytest.raises(TealInputError):\n slot = ScratchSlot(-1)\n\n with pytest.raises(TealInputError):\n slot = ScratchSlot(NUM_SLOTS)\n"
},
{
"alpha_fraction": 0.46301165223121643,
"alphanum_fraction": 0.47014644742012024,
"avg_line_length": 36.507041931152344,
"blob_id": "dce43d58ee0139aedff0aa30347234feff1c80b1",
"content_id": "d9e4de00c72f7e5b6e4e8547e04b52bafa362174",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2663,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 71,
"path": "/pyteal/ast/maybe_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_maybe_value():\n ops = (\n Op.app_global_get_ex,\n Op.app_local_get_ex,\n Op.asset_holding_get,\n Op.asset_params_get,\n )\n types = (TealType.uint64, TealType.bytes, TealType.anytype)\n immedate_argv = ([], [\"AssetFrozen\"])\n argv = ([], [Int(0)], [Int(1), Int(2)])\n\n for op in ops:\n for type in types:\n for iargs in immedate_argv:\n for args in argv:\n expr = MaybeValue(op, type, immediate_args=iargs, args=args)\n\n assert expr.slotOk != expr.slotValue\n\n assert expr.hasValue().type_of() == TealType.uint64\n with TealComponent.Context.ignoreExprEquality():\n assert expr.hasValue().__teal__(options) == ScratchLoad(\n expr.slotOk\n ).__teal__(options)\n\n assert expr.value().type_of() == type\n with TealComponent.Context.ignoreExprEquality():\n assert expr.value().__teal__(options) == ScratchLoad(\n expr.slotValue\n ).__teal__(options)\n\n assert expr.type_of() == TealType.none\n\n expected_call = TealSimpleBlock(\n [\n TealOp(expr, op, *iargs),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n if len(args) == 0:\n expected = expected_call\n elif len(args) == 1:\n expected, after_arg = args[0].__teal__(options)\n after_arg.setNextBlock(expected_call)\n elif len(args) == 2:\n expected, after_arg_1 = args[0].__teal__(options)\n arg_2, after_arg_2 = args[1].__teal__(options)\n after_arg_1.setNextBlock(arg_2)\n after_arg_2.setNextBlock(expected_call)\n\n expected.addIncoming()\n expected = TealBlock.NormalizeBlocks(expected)\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n"
},
{
"alpha_fraction": 0.6541095972061157,
"alphanum_fraction": 0.6563926935195923,
"avg_line_length": 20.899999618530273,
"blob_id": "a06e32fecb20cb917d6af87ef9da8ff380705008",
"content_id": "30f4ccb086e54302f90c5c506d58d09ee27363b4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 876,
"license_type": "permissive",
"max_line_length": 63,
"num_lines": 40,
"path": "/pyteal/ast/continue_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_continue_fail():\n with pytest.raises(TealCompileError):\n Continue().__teal__(options)\n\n with pytest.raises(TealCompileError):\n If(Int(1), Continue()).__teal__(options)\n\n with pytest.raises(TealCompileError):\n Seq([Continue()]).__teal__(options)\n\n with pytest.raises(TypeError):\n Continue(Int(1))\n\n\ndef test_continue():\n\n expr = Continue()\n\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n expected = TealSimpleBlock([])\n\n options.enterLoop()\n actual, _ = expr.__teal__(options)\n breakBlocks, continueBlocks = options.exitLoop()\n\n assert actual == expected\n assert breakBlocks == []\n assert continueBlocks == [actual]\n"
},
{
"alpha_fraction": 0.5667539238929749,
"alphanum_fraction": 0.5817127823829651,
"avg_line_length": 26.147207260131836,
"blob_id": "ceba80e1e84368b415024e580f864fc3c2f69375",
"content_id": "a5ccfbb2f22b90a70e6cedb63d6cef3545be9744",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5348,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 197,
"path": "/pyteal/ast/for_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_for_compiles():\n i = ScratchVar()\n\n expr = For(i.store(Int(0)), Int(1), i.store(i.load() + Int(1))).Do(\n App.globalPut(Itob(Int(0)), Itob(Int(2)))\n )\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n expr.__teal__(options)\n\n\ndef test_nested_for_compiles():\n i = ScratchVar()\n expr = For(i.store(Int(0)), Int(1), i.store(i.load() + Int(1))).Do(\n Seq(\n [\n For(i.store(Int(0)), Int(1), i.store(i.load() + Int(1))).Do(\n Seq([i.store(Int(0))])\n )\n ]\n )\n )\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n\ndef test_continue_break():\n i = ScratchVar()\n expr = For(i.store(Int(0)), Int(1), i.store(i.load() + Int(1))).Do(\n Seq([If(Int(1), Break(), Continue())])\n )\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n expr.__teal__(options)\n\n\ndef test_for():\n i = ScratchVar()\n items = [\n (i.store(Int(0))),\n i.load() < Int(10),\n i.store(i.load() + Int(1)),\n App.globalPut(Itob(i.load()), i.load() * Int(2)),\n ]\n expr = For(items[0], items[1], items[2]).Do(Seq([items[3]]))\n\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n expected, varEnd = items[0].__teal__(options)\n condStart, condEnd = items[1].__teal__(options)\n stepStart, stepEnd = items[2].__teal__(options)\n do, doEnd = Seq([items[3]]).__teal__(options)\n expectedBranch = TealConditionalBlock([])\n end = TealSimpleBlock([])\n\n varEnd.setNextBlock(condStart)\n doEnd.setNextBlock(stepStart)\n\n expectedBranch.setTrueBlock(do)\n expectedBranch.setFalseBlock(end)\n condEnd.setNextBlock(expectedBranch)\n stepEnd.setNextBlock(condStart)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_for_continue():\n i = ScratchVar()\n items = [\n (i.store(Int(0))),\n i.load() < Int(10),\n i.store(i.load() + Int(1)),\n If(i.load() < Int(4), Continue()),\n App.globalPut(Itob(i.load()), i.load() * Int(2)),\n ]\n expr = For(items[0], items[1], items[2]).Do(Seq([items[3], items[4]]))\n\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n options.enterLoop()\n\n expected, varEnd = items[0].__teal__(options)\n condStart, condEnd = items[1].__teal__(options)\n stepStart, stepEnd = items[2].__teal__(options)\n do, doEnd = Seq([items[3], items[4]]).__teal__(options)\n expectedBranch = TealConditionalBlock([])\n end = TealSimpleBlock([])\n\n doEnd.setNextBlock(stepStart)\n stepEnd.setNextBlock(condStart)\n\n expectedBranch.setTrueBlock(do)\n expectedBranch.setFalseBlock(end)\n condEnd.setNextBlock(expectedBranch)\n varEnd.setNextBlock(condStart)\n\n _, continueBlocks = options.exitLoop()\n\n for block in continueBlocks:\n block.setNextBlock(stepStart)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_for_break():\n i = ScratchVar()\n items = [\n (i.store(Int(0))),\n i.load() < Int(10),\n i.store(i.load() + Int(1)),\n If(i.load() == Int(6), Break()),\n App.globalPut(Itob(i.load()), i.load() * Int(2)),\n ]\n expr = For(items[0], items[1], items[2]).Do(Seq([items[3], items[4]]))\n\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n options.enterLoop()\n\n expected, varEnd = items[0].__teal__(options)\n condStart, condEnd = items[1].__teal__(options)\n stepStart, stepEnd = items[2].__teal__(options)\n do, doEnd = Seq([items[3], items[4]]).__teal__(options)\n expectedBranch = TealConditionalBlock([])\n end = TealSimpleBlock([])\n\n doEnd.setNextBlock(stepStart)\n stepEnd.setNextBlock(condStart)\n\n expectedBranch.setTrueBlock(do)\n expectedBranch.setFalseBlock(end)\n condEnd.setNextBlock(expectedBranch)\n varEnd.setNextBlock(condStart)\n\n breakBlocks, _ = options.exitLoop()\n\n for block in breakBlocks:\n block.setNextBlock(end)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_invalid_for():\n with pytest.raises(TypeError):\n expr = For()\n\n with pytest.raises(TypeError):\n expr = For(Int(2))\n\n with pytest.raises(TypeError):\n expr = For(Int(1), Int(2))\n\n with pytest.raises(TealTypeError):\n i = ScratchVar()\n expr = For(i.store(Int(0)), Int(1), Int(2))\n expr.__teal__(options)\n\n with pytest.raises(TealCompileError):\n i = ScratchVar()\n expr = For(i.store(Int(0)), Int(1), i.store(i.load() + Int(1)))\n expr.type_of()\n\n with pytest.raises(TealCompileError):\n i = ScratchVar()\n expr = For(i.store(Int(0)), Int(1), i.store(i.load() + Int(1)))\n expr.__str__()\n\n with pytest.raises(TealTypeError):\n i = ScratchVar()\n expr = For(i.store(Int(0)), Int(1), i.store(i.load() + Int(1))).Do(Int(0))\n\n with pytest.raises(TealCompileError):\n expr = (\n For(i.store(Int(0)), Int(1), i.store(i.load() + Int(1)))\n .Do(Continue())\n .Do(Continue())\n )\n expr.__str__()\n"
},
{
"alpha_fraction": 0.65625,
"alphanum_fraction": 0.78125,
"avg_line_length": 11.800000190734863,
"blob_id": "77cff0d1f89c6ec1795ffac90f2263969becc504",
"content_id": "a6f36fa289a7eaf94c51de458db8f3e55b4790e2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 64,
"license_type": "permissive",
"max_line_length": 15,
"num_lines": 5,
"path": "/requirements.txt",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "black==21.7b0\nmypy==0.910\npytest\npytest-timeout\npy-algorand-sdk\n"
},
{
"alpha_fraction": 0.6115702390670776,
"alphanum_fraction": 0.6115702390670776,
"avg_line_length": 28.512195587158203,
"blob_id": "1cea02a365573b7f1b72b6387ced05ee6cdc769e",
"content_id": "2aa40fa8024e33c53e677b58e1b1b92a6dc56593",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1210,
"license_type": "permissive",
"max_line_length": 83,
"num_lines": 41,
"path": "/pyteal/ir/teallabel.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import Optional, TYPE_CHECKING\n\nfrom .tealcomponent import TealComponent\nfrom .labelref import LabelReference\n\nif TYPE_CHECKING:\n from ..ast import Expr\n\n\nclass TealLabel(TealComponent):\n def __init__(\n self, expr: Optional[\"Expr\"], label: LabelReference, comment: str = None\n ) -> None:\n super().__init__(expr)\n self.label = label\n self.comment = comment\n\n def getLabelRef(self) -> LabelReference:\n return self.label\n\n def assemble(self) -> str:\n comment = \" // {}\".format(self.comment) if self.comment is not None else \"\"\n return \"{}:{}\".format(self.label.getLabel(), comment)\n\n def __repr__(self) -> str:\n return \"TealLabel({}, {}, {})\".format(\n self.expr, repr(self.label), repr(self.comment)\n )\n\n def __hash__(self) -> int:\n return hash((self.label, self.comment))\n\n def __eq__(self, other: object) -> bool:\n if not isinstance(other, TealLabel):\n return False\n if TealComponent.Context.checkExpr and self.expr is not other.expr:\n return False\n return self.label == other.label and self.comment == other.comment\n\n\nTealLabel.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.5184618830680847,
"alphanum_fraction": 0.5414107441902161,
"avg_line_length": 29.454103469848633,
"blob_id": "3e0ea3294c2ed172176f8232f779d1598f31a4a9",
"content_id": "6ec26f7090bb480eaa242e30de65c7fbe8fd9957",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 43793,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 1438,
"path": "/pyteal/compiler/subroutines_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from collections import OrderedDict\n\nfrom .. import *\n\nfrom .subroutines import (\n findRecursionPoints,\n spillLocalSlotsDuringRecursion,\n resolveSubroutines,\n)\n\n\ndef test_findRecursionPoints_empty():\n subroutines = dict()\n\n expected = dict()\n actual = findRecursionPoints(subroutines)\n assert actual == expected\n\n\ndef test_findRecursionPoints_none():\n def sub1Impl():\n return None\n\n def sub2Impl(a1):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub2Impl, TealType.bytes)\n subroutine3 = SubroutineDefinition(sub3Impl, TealType.none)\n\n subroutines = {\n subroutine1: {subroutine2, subroutine3},\n subroutine2: {subroutine3},\n subroutine3: set(),\n }\n\n expected = {\n subroutine1: set(),\n subroutine2: set(),\n subroutine3: set(),\n }\n\n actual = findRecursionPoints(subroutines)\n assert actual == expected\n\n\ndef test_findRecursionPoints_direct_recursion():\n def sub1Impl():\n return None\n\n def sub2Impl(a1):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub2Impl, TealType.bytes)\n subroutine3 = SubroutineDefinition(sub3Impl, TealType.none)\n\n subroutines = {\n subroutine1: {subroutine2, subroutine3},\n subroutine2: {subroutine2, subroutine3},\n subroutine3: set(),\n }\n\n expected = {\n subroutine1: set(),\n subroutine2: {subroutine2},\n subroutine3: set(),\n }\n\n actual = findRecursionPoints(subroutines)\n assert actual == expected\n\n\ndef test_findRecursionPoints_mutual_recursion():\n def sub1Impl():\n return None\n\n def sub2Impl(a1):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub2Impl, TealType.bytes)\n subroutine3 = SubroutineDefinition(sub3Impl, TealType.none)\n\n subroutines = {\n subroutine1: {subroutine2, subroutine3},\n subroutine2: {subroutine1, subroutine3},\n subroutine3: set(),\n }\n\n expected = {\n subroutine1: {subroutine2},\n subroutine2: {subroutine1},\n subroutine3: set(),\n }\n\n actual = findRecursionPoints(subroutines)\n assert actual == expected\n\n\ndef test_findRecursionPoints_direct_and_mutual_recursion():\n def sub1Impl():\n return None\n\n def sub2Impl(a1):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub2Impl, TealType.bytes)\n subroutine3 = SubroutineDefinition(sub3Impl, TealType.none)\n\n subroutines = {\n subroutine1: {subroutine2, subroutine3},\n subroutine2: {subroutine1, subroutine2, subroutine3},\n subroutine3: set(),\n }\n\n expected = {\n subroutine1: {subroutine2},\n subroutine2: {subroutine1, subroutine2},\n subroutine3: set(),\n }\n\n actual = findRecursionPoints(subroutines)\n assert actual == expected\n\n\ndef test_spillLocalSlotsDuringRecursion_no_subroutines():\n for version in (4, 5):\n l1Label = LabelReference(\"l1\")\n mainOps = [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {None: mainOps}\n\n subroutineGraph = dict()\n\n localSlots = {None: set()}\n\n spillLocalSlotsDuringRecursion(\n version, subroutineMapping, subroutineGraph, localSlots\n )\n\n assert mainOps == [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n ]\n\n\ndef test_spillLocalSlotsDuringRecursion_1_subroutine_no_recursion():\n for version in (4, 5):\n subroutine = SubroutineDefinition(lambda: None, TealType.uint64)\n\n subroutineL1Label = LabelReference(\"l1\")\n subroutineOps = [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutineL1Label),\n TealOp(None, Op.err),\n TealLabel(None, subroutineL1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ]\n\n l1Label = LabelReference(\"l1\")\n mainOps = [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {None: mainOps, subroutine: subroutineOps}\n\n subroutineGraph = {subroutine: set()}\n\n localSlots = {None: set(), subroutine: {0}}\n\n spillLocalSlotsDuringRecursion(\n version, subroutineMapping, subroutineGraph, localSlots\n )\n\n assert subroutineMapping == {\n None: [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n ],\n subroutine: [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutineL1Label),\n TealOp(None, Op.err),\n TealLabel(None, subroutineL1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ],\n }\n\n\ndef test_spillLocalSlotsDuringRecursion_1_subroutine_recursion_v4():\n def sub1Impl(a1):\n return None\n\n subroutine = SubroutineDefinition(sub1Impl, TealType.uint64)\n\n subroutineL1Label = LabelReference(\"l1\")\n subroutineOps = [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutineL1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutineL1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ]\n\n l1Label = LabelReference(\"l1\")\n mainOps = [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {None: mainOps, subroutine: subroutineOps}\n\n subroutineGraph = {subroutine: {subroutine}}\n\n localSlots = {None: set(), subroutine: {0}}\n\n spillLocalSlotsDuringRecursion(4, subroutineMapping, subroutineGraph, localSlots)\n\n assert subroutineMapping == {\n None: [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n ],\n subroutine: [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutineL1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.dig, 1),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.swap),\n TealOp(None, Op.store, 0),\n TealOp(None, Op.swap),\n TealOp(None, Op.pop),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutineL1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ],\n }\n\n\ndef test_spillLocalSlotsDuringRecursion_1_subroutine_recursion_v5():\n def sub1Impl(a1):\n return None\n\n subroutine = SubroutineDefinition(sub1Impl, TealType.uint64)\n\n subroutineL1Label = LabelReference(\"l1\")\n subroutineOps = [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutineL1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutineL1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ]\n\n l1Label = LabelReference(\"l1\")\n mainOps = [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {None: mainOps, subroutine: subroutineOps}\n\n subroutineGraph = {subroutine: {subroutine}}\n\n localSlots = {None: set(), subroutine: {0}}\n\n spillLocalSlotsDuringRecursion(5, subroutineMapping, subroutineGraph, localSlots)\n\n assert subroutineMapping == {\n None: [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.return_),\n ],\n subroutine: [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutineL1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.swap),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.swap),\n TealOp(None, Op.store, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutineL1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ],\n }\n\n\ndef test_spillLocalSlotsDuringRecursion_multiple_subroutines_no_recursion():\n for version in (4, 5):\n\n def sub1Impl(a1):\n return None\n\n def sub2Impl(a1, a2):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine3 = SubroutineDefinition(sub1Impl, TealType.none)\n\n subroutine1L1Label = LabelReference(\"l1\")\n subroutine1Ops = [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine1L1Label),\n TealOp(None, Op.err),\n TealLabel(None, subroutine1L1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.callsub, subroutine3),\n TealOp(None, Op.retsub),\n ]\n\n subroutine2L1Label = LabelReference(\"l1\")\n subroutine2Ops = [\n TealOp(None, Op.store, 1),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine2L1Label),\n TealOp(None, Op.err),\n TealLabel(None, subroutine2L1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine3Ops = [\n TealOp(None, Op.retsub),\n ]\n\n l1Label = LabelReference(\"l1\")\n mainOps = [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 101),\n TealOp(None, Op.callsub, subroutine2),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine1: subroutine1Ops,\n subroutine2: subroutine2Ops,\n subroutine3: subroutine3Ops,\n }\n\n subroutineGraph = {\n subroutine1: {subroutine2},\n subroutine2: set(),\n subroutine3: set(),\n }\n\n localSlots = {None: set(), subroutine1: {0}, subroutine2: {1}, subroutine3: {}}\n\n spillLocalSlotsDuringRecursion(\n version, subroutineMapping, subroutineGraph, localSlots\n )\n\n assert subroutineMapping == {\n None: [\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 101),\n TealOp(None, Op.callsub, subroutine2),\n TealOp(None, Op.return_),\n ],\n subroutine1: [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine1L1Label),\n TealOp(None, Op.err),\n TealLabel(None, subroutine1L1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.callsub, subroutine3),\n TealOp(None, Op.retsub),\n ],\n subroutine2: [\n TealOp(None, Op.store, 1),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine2L1Label),\n TealOp(None, Op.err),\n TealLabel(None, subroutine2L1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ],\n subroutine3: [\n TealOp(None, Op.retsub),\n ],\n }\n\n\ndef test_spillLocalSlotsDuringRecursion_multiple_subroutines_recursion_v4():\n def sub1Impl(a1):\n return None\n\n def sub2Impl(a1, a2):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine3 = SubroutineDefinition(sub1Impl, TealType.none)\n\n subroutine1L1Label = LabelReference(\"l1\")\n subroutine1Ops = [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine1L1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine1L1Label),\n TealOp(None, Op.load, 255),\n TealOp(None, Op.retsub),\n ]\n\n subroutine2L1Label = LabelReference(\"l1\")\n subroutine2Ops = [\n TealOp(None, Op.store, 1),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine2L1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine2L1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine3Ops = [\n TealOp(None, Op.callsub, subroutine3),\n TealOp(None, Op.retsub),\n ]\n\n l1Label = LabelReference(\"l1\")\n mainOps = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, 255),\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 101),\n TealOp(None, Op.callsub, subroutine2),\n TealOp(None, Op.return_),\n TealOp(None, Op.callsub, subroutine3),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine1: subroutine1Ops,\n subroutine2: subroutine2Ops,\n subroutine3: subroutine3Ops,\n }\n\n subroutineGraph = {\n subroutine1: {subroutine1},\n subroutine2: {subroutine1},\n subroutine3: {subroutine3},\n }\n\n localSlots = {None: set(), subroutine1: {0}, subroutine2: {1}, subroutine3: {}}\n\n spillLocalSlotsDuringRecursion(4, subroutineMapping, subroutineGraph, localSlots)\n\n assert subroutineMapping == {\n None: [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, 255),\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 101),\n TealOp(None, Op.callsub, subroutine2),\n TealOp(None, Op.return_),\n TealOp(None, Op.callsub, subroutine3),\n ],\n subroutine1: [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine1L1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.dig, 1),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.swap),\n TealOp(None, Op.store, 0),\n TealOp(None, Op.swap),\n TealOp(None, Op.pop),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine1L1Label),\n TealOp(None, Op.load, 255),\n TealOp(None, Op.retsub),\n ],\n subroutine2: [\n TealOp(None, Op.store, 1),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine2L1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine2L1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ],\n subroutine3: [\n TealOp(None, Op.callsub, subroutine3),\n TealOp(None, Op.retsub),\n ],\n }\n\n\ndef test_spillLocalSlotsDuringRecursion_multiple_subroutines_recursion_v5():\n def sub1Impl(a1):\n return None\n\n def sub2Impl(a1, a2):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine3 = SubroutineDefinition(sub1Impl, TealType.none)\n\n subroutine1L1Label = LabelReference(\"l1\")\n subroutine1Ops = [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine1L1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine1L1Label),\n TealOp(None, Op.load, 255),\n TealOp(None, Op.retsub),\n ]\n\n subroutine2L1Label = LabelReference(\"l1\")\n subroutine2Ops = [\n TealOp(None, Op.store, 1),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine2L1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine2L1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine3Ops = [\n TealOp(None, Op.callsub, subroutine3),\n TealOp(None, Op.retsub),\n ]\n\n l1Label = LabelReference(\"l1\")\n mainOps = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, 255),\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 101),\n TealOp(None, Op.callsub, subroutine2),\n TealOp(None, Op.return_),\n TealOp(None, Op.callsub, subroutine3),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine1: subroutine1Ops,\n subroutine2: subroutine2Ops,\n subroutine3: subroutine3Ops,\n }\n\n subroutineGraph = {\n subroutine1: {subroutine1},\n subroutine2: {subroutine1},\n subroutine3: {subroutine3},\n }\n\n localSlots = {None: set(), subroutine1: {0}, subroutine2: {1}, subroutine3: {}}\n\n spillLocalSlotsDuringRecursion(5, subroutineMapping, subroutineGraph, localSlots)\n\n assert subroutineMapping == {\n None: [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, 255),\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 101),\n TealOp(None, Op.callsub, subroutine2),\n TealOp(None, Op.return_),\n TealOp(None, Op.callsub, subroutine3),\n ],\n subroutine1: [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine1L1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.swap),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.swap),\n TealOp(None, Op.store, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine1L1Label),\n TealOp(None, Op.load, 255),\n TealOp(None, Op.retsub),\n ],\n subroutine2: [\n TealOp(None, Op.store, 1),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine2L1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine2L1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ],\n subroutine3: [\n TealOp(None, Op.callsub, subroutine3),\n TealOp(None, Op.retsub),\n ],\n }\n\n\ndef test_spillLocalSlotsDuringRecursion_recursive_many_args_no_return_v4():\n def subImpl(a1, a2, a3):\n return None\n\n subroutine = SubroutineDefinition(subImpl, TealType.none)\n\n subroutineOps = [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.retsub),\n ]\n\n mainOps = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine: subroutineOps,\n }\n\n subroutineGraph = {\n subroutine: {subroutine},\n }\n\n localSlots = {None: set(), subroutine: {0, 1, 2}}\n\n spillLocalSlotsDuringRecursion(4, subroutineMapping, subroutineGraph, localSlots)\n\n assert subroutineMapping == {\n None: [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.return_),\n ],\n subroutine: [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.load, 2),\n TealOp(None, Op.dig, 5),\n TealOp(None, Op.dig, 5),\n TealOp(None, Op.dig, 5),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 0),\n TealOp(None, Op.pop),\n TealOp(None, Op.pop),\n TealOp(None, Op.pop),\n TealOp(None, Op.retsub),\n ],\n }\n\n\ndef test_spillLocalSlotsDuringRecursion_recursive_many_args_no_return_v5():\n def subImpl(a1, a2, a3):\n return None\n\n subroutine = SubroutineDefinition(subImpl, TealType.none)\n\n subroutineOps = [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.retsub),\n ]\n\n mainOps = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine: subroutineOps,\n }\n\n subroutineGraph = {\n subroutine: {subroutine},\n }\n\n localSlots = {None: set(), subroutine: {0, 1, 2}}\n\n spillLocalSlotsDuringRecursion(5, subroutineMapping, subroutineGraph, localSlots)\n\n assert subroutineMapping == {\n None: [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.return_),\n ],\n subroutine: [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.load, 2),\n TealOp(None, Op.uncover, 5),\n TealOp(None, Op.uncover, 5),\n TealOp(None, Op.uncover, 5),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 0),\n TealOp(None, Op.retsub),\n ],\n }\n\n\ndef test_spillLocalSlotsDuringRecursion_recursive_many_args_return_v4():\n def subImpl(a1, a2, a3):\n return None\n\n subroutine = SubroutineDefinition(subImpl, TealType.uint64)\n\n subroutineOps = [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.retsub),\n ]\n\n mainOps = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine: subroutineOps,\n }\n\n subroutineGraph = {\n subroutine: {subroutine},\n }\n\n localSlots = {None: set(), subroutine: {0, 1, 2}}\n\n spillLocalSlotsDuringRecursion(4, subroutineMapping, subroutineGraph, localSlots)\n\n assert subroutineMapping == {\n None: [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n ],\n subroutine: [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.load, 2),\n TealOp(None, Op.dig, 5),\n TealOp(None, Op.dig, 5),\n TealOp(None, Op.dig, 5),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.store, 0),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.swap),\n TealOp(None, Op.store, 0),\n TealOp(None, Op.swap),\n TealOp(None, Op.pop),\n TealOp(None, Op.swap),\n TealOp(None, Op.pop),\n TealOp(None, Op.swap),\n TealOp(None, Op.pop),\n TealOp(None, Op.retsub),\n ],\n }\n\n\ndef test_spillLocalSlotsDuringRecursion_recursive_many_args_return_v5():\n def subImpl(a1, a2, a3):\n return None\n\n subroutine = SubroutineDefinition(subImpl, TealType.uint64)\n\n subroutineOps = [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.retsub),\n ]\n\n mainOps = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine: subroutineOps,\n }\n\n subroutineGraph = {\n subroutine: {subroutine},\n }\n\n localSlots = {None: set(), subroutine: {0, 1, 2}}\n\n spillLocalSlotsDuringRecursion(5, subroutineMapping, subroutineGraph, localSlots)\n\n assert subroutineMapping == {\n None: [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n ],\n subroutine: [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.load, 2),\n TealOp(None, Op.uncover, 5),\n TealOp(None, Op.uncover, 5),\n TealOp(None, Op.uncover, 5),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.cover, 3),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 0),\n TealOp(None, Op.retsub),\n ],\n }\n\n\ndef test_spillLocalSlotsDuringRecursion_recursive_more_args_than_slots_v5():\n def subImpl(a1, a2, a3):\n return None\n\n subroutine = SubroutineDefinition(subImpl, TealType.uint64)\n\n subroutineOps = [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.pop),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.retsub),\n ]\n\n mainOps = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine: subroutineOps,\n }\n\n subroutineGraph = {\n subroutine: {subroutine},\n }\n\n localSlots = {None: set(), subroutine: {0, 1}}\n\n spillLocalSlotsDuringRecursion(5, subroutineMapping, subroutineGraph, localSlots)\n\n assert subroutineMapping == {\n None: [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n ],\n subroutine: [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.pop),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.cover, 3),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.cover, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.cover, 2),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 0),\n TealOp(None, Op.retsub),\n ],\n }\n\n\ndef test_spillLocalSlotsDuringRecursion_recursive_more_slots_than_args_v5():\n def subImpl(a1, a2, a3):\n return None\n\n subroutine = SubroutineDefinition(subImpl, TealType.uint64)\n\n subroutineOps = [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.int, 10),\n TealOp(None, Op.store, 3),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.retsub),\n ]\n\n mainOps = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine: subroutineOps,\n }\n\n subroutineGraph = {\n subroutine: {subroutine},\n }\n\n localSlots = {None: set(), subroutine: {0, 1, 2, 3}}\n\n spillLocalSlotsDuringRecursion(5, subroutineMapping, subroutineGraph, localSlots)\n\n assert subroutineMapping == {\n None: [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.return_),\n ],\n subroutine: [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.int, 10),\n TealOp(None, Op.store, 3),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.int, 2),\n TealOp(None, Op.int, 3),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.load, 2),\n TealOp(None, Op.load, 3),\n TealOp(None, Op.uncover, 6),\n TealOp(None, Op.uncover, 6),\n TealOp(None, Op.uncover, 6),\n TealOp(None, Op.callsub, subroutine),\n TealOp(None, Op.cover, 4),\n TealOp(None, Op.store, 3),\n TealOp(None, Op.store, 2),\n TealOp(None, Op.store, 1),\n TealOp(None, Op.store, 0),\n TealOp(None, Op.retsub),\n ],\n }\n\n\ndef test_resolveSubroutines():\n def sub1Impl(a1):\n return None\n\n def sub2Impl(a1, a2):\n return None\n\n def sub3Impl(a1, a2, a3):\n return None\n\n subroutine1 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine2 = SubroutineDefinition(sub1Impl, TealType.uint64)\n subroutine3 = SubroutineDefinition(sub1Impl, TealType.none)\n\n subroutine1L1Label = LabelReference(\"l1\")\n subroutine1Ops = [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine1L1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.dig, 1),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.swap),\n TealOp(None, Op.store, 0),\n TealOp(None, Op.swap),\n TealOp(None, Op.pop),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine1L1Label),\n TealOp(None, Op.load, 255),\n TealOp(None, Op.retsub),\n ]\n\n subroutine2L1Label = LabelReference(\"l1\")\n subroutine2Ops = [\n TealOp(None, Op.store, 1),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine2L1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine2L1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ]\n\n subroutine3Ops = [\n TealOp(None, Op.callsub, subroutine3),\n TealOp(None, Op.retsub),\n ]\n\n l1Label = LabelReference(\"l1\")\n mainOps = [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, 255),\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, subroutine1),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 101),\n TealOp(None, Op.callsub, subroutine2),\n TealOp(None, Op.return_),\n TealOp(None, Op.callsub, subroutine3),\n ]\n\n subroutineMapping = {\n None: mainOps,\n subroutine1: subroutine1Ops,\n subroutine2: subroutine2Ops,\n subroutine3: subroutine3Ops,\n }\n\n expected = OrderedDict()\n expected[subroutine1] = \"sub0\"\n expected[subroutine2] = \"sub1\"\n expected[subroutine3] = \"sub2\"\n\n actual = resolveSubroutines(subroutineMapping)\n assert actual == expected\n\n assert mainOps == [\n TealOp(None, Op.int, 1),\n TealOp(None, Op.store, 255),\n TealOp(None, Op.txn, \"Fee\"),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bz, l1Label),\n TealOp(None, Op.int, 100),\n TealOp(None, Op.callsub, expected[subroutine1]),\n TealOp(None, Op.return_),\n TealLabel(None, l1Label),\n TealOp(None, Op.int, 101),\n TealOp(None, Op.callsub, expected[subroutine2]),\n TealOp(None, Op.return_),\n TealOp(None, Op.callsub, expected[subroutine3]),\n ]\n\n assert subroutine1Ops == [\n TealOp(None, Op.store, 0),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine1L1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.dig, 1),\n TealOp(None, Op.callsub, expected[subroutine1]),\n TealOp(None, Op.swap),\n TealOp(None, Op.store, 0),\n TealOp(None, Op.swap),\n TealOp(None, Op.pop),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine1L1Label),\n TealOp(None, Op.load, 255),\n TealOp(None, Op.retsub),\n ]\n\n assert subroutine2Ops == [\n TealOp(None, Op.store, 1),\n TealOp(None, Op.load, 1),\n TealOp(None, Op.int, 0),\n TealOp(None, Op.eq),\n TealOp(None, Op.bnz, subroutine2L1Label),\n TealOp(None, Op.load, 0),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.minus),\n TealOp(None, Op.callsub, expected[subroutine1]),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.add),\n TealOp(None, Op.retsub),\n TealLabel(None, subroutine2L1Label),\n TealOp(None, Op.int, 1),\n TealOp(None, Op.retsub),\n ]\n\n assert subroutine3Ops == [\n TealOp(None, Op.callsub, expected[subroutine3]),\n TealOp(None, Op.retsub),\n ]\n"
},
{
"alpha_fraction": 0.5863938927650452,
"alphanum_fraction": 0.5977323055267334,
"avg_line_length": 15.695473670959473,
"blob_id": "0d8b23b3bbfd9e0971f9fcec1e4eac7a754b2e1e",
"content_id": "9712dc76d20bd3feab0972f87fb2f70af5aa8260",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4057,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 243,
"path": "/pyteal/ast/__init__.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "# abstract types\nfrom .expr import Expr\n\n# basic types\nfrom .leafexpr import LeafExpr\nfrom .addr import Addr\nfrom .bytes import Bytes\nfrom .int import Int, EnumInt\n\n# properties\nfrom .arg import Arg\nfrom .txn import TxnType, TxnField, TxnExpr, TxnaExpr, TxnArray, TxnObject, Txn\nfrom .gtxn import GtxnExpr, GtxnaExpr, TxnGroup, Gtxn\nfrom .gaid import GeneratedID\nfrom .gload import ImportScratchValue\nfrom .global_ import Global, GlobalField\nfrom .app import App, AppField, OnComplete, AppParam\nfrom .asset import AssetHolding, AssetParam\n\n# inner txns\nfrom .itxn import InnerTxnBuilder, InnerTxn\n\n# meta\nfrom .array import Array\nfrom .tmpl import Tmpl\nfrom .nonce import Nonce\n\n# unary ops\nfrom .unaryexpr import (\n UnaryExpr,\n Btoi,\n Itob,\n Len,\n BitLen,\n Sha256,\n Sha512_256,\n Keccak256,\n Not,\n BitwiseNot,\n Sqrt,\n Pop,\n Balance,\n MinBalance,\n BytesNot,\n BytesZero,\n Log,\n)\n\n# binary ops\nfrom .binaryexpr import (\n BinaryExpr,\n Add,\n Minus,\n Mul,\n Div,\n Mod,\n Exp,\n BitwiseAnd,\n BitwiseOr,\n BitwiseXor,\n ShiftLeft,\n ShiftRight,\n Eq,\n Neq,\n Lt,\n Le,\n Gt,\n Ge,\n GetBit,\n GetByte,\n BytesAdd,\n BytesMinus,\n BytesDiv,\n BytesMul,\n BytesMod,\n BytesAnd,\n BytesOr,\n BytesXor,\n BytesEq,\n BytesNeq,\n BytesLt,\n BytesLe,\n BytesGt,\n BytesGe,\n ExtractUint16,\n ExtractUint32,\n ExtractUint64,\n)\n\n# ternary ops\nfrom .ternaryexpr import Ed25519Verify, SetBit, SetByte\nfrom .substring import Substring, Extract, Suffix\n\n# more ops\nfrom .naryexpr import NaryExpr, And, Or, Concat\nfrom .widemath import WideRatio\n\n# control flow\nfrom .if_ import If\nfrom .cond import Cond\nfrom .seq import Seq\nfrom .assert_ import Assert\nfrom .err import Err\nfrom .return_ import Return, Approve, Reject\nfrom .subroutine import (\n Subroutine,\n SubroutineDefinition,\n SubroutineDeclaration,\n SubroutineCall,\n)\nfrom .while_ import While\nfrom .for_ import For\nfrom .break_ import Break\nfrom .continue_ import Continue\n\n\n# misc\nfrom .scratch import ScratchSlot, ScratchLoad, ScratchStore, ScratchStackStore\nfrom .scratchvar import ScratchVar\nfrom .maybe import MaybeValue\n\n__all__ = [\n \"Expr\",\n \"LeafExpr\",\n \"Addr\",\n \"Bytes\",\n \"Int\",\n \"EnumInt\",\n \"Arg\",\n \"TxnType\",\n \"TxnField\",\n \"TxnExpr\",\n \"TxnaExpr\",\n \"TxnArray\",\n \"TxnObject\",\n \"Txn\",\n \"GtxnExpr\",\n \"GtxnaExpr\",\n \"TxnGroup\",\n \"Gtxn\",\n \"GeneratedID\",\n \"ImportScratchValue\",\n \"Global\",\n \"GlobalField\",\n \"App\",\n \"AppField\",\n \"OnComplete\",\n \"AppParam\",\n \"AssetHolding\",\n \"AssetParam\",\n \"InnerTxnBuilder\",\n \"InnerTxn\",\n \"Array\",\n \"Tmpl\",\n \"Nonce\",\n \"UnaryExpr\",\n \"Btoi\",\n \"Itob\",\n \"Len\",\n \"BitLen\",\n \"Sha256\",\n \"Sha512_256\",\n \"Keccak256\",\n \"Not\",\n \"BitwiseNot\",\n \"Sqrt\",\n \"Pop\",\n \"Balance\",\n \"MinBalance\",\n \"BinaryExpr\",\n \"Add\",\n \"Minus\",\n \"Mul\",\n \"Div\",\n \"Mod\",\n \"Exp\",\n \"BitwiseAnd\",\n \"BitwiseOr\",\n \"BitwiseXor\",\n \"ShiftLeft\",\n \"ShiftRight\",\n \"Eq\",\n \"Neq\",\n \"Lt\",\n \"Le\",\n \"Gt\",\n \"Ge\",\n \"GetBit\",\n \"GetByte\",\n \"Ed25519Verify\",\n \"Substring\",\n \"Extract\",\n \"Suffix\",\n \"SetBit\",\n \"SetByte\",\n \"NaryExpr\",\n \"And\",\n \"Or\",\n \"Concat\",\n \"WideRatio\",\n \"If\",\n \"Cond\",\n \"Seq\",\n \"Assert\",\n \"Err\",\n \"Return\",\n \"Approve\",\n \"Reject\",\n \"Subroutine\",\n \"SubroutineDefinition\",\n \"SubroutineDeclaration\",\n \"SubroutineCall\",\n \"ScratchSlot\",\n \"ScratchLoad\",\n \"ScratchStore\",\n \"ScratchStackStore\",\n \"ScratchVar\",\n \"MaybeValue\",\n \"BytesAdd\",\n \"BytesMinus\",\n \"BytesDiv\",\n \"BytesMul\",\n \"BytesMod\",\n \"BytesAnd\",\n \"BytesOr\",\n \"BytesXor\",\n \"BytesEq\",\n \"BytesNeq\",\n \"BytesLt\",\n \"BytesLe\",\n \"BytesGt\",\n \"BytesGe\",\n \"BytesNot\",\n \"BytesZero\",\n \"ExtractUint16\",\n \"ExtractUint32\",\n \"ExtractUint64\",\n \"Log\",\n \"While\",\n \"For\",\n \"Break\",\n \"Continue\",\n]\n"
},
{
"alpha_fraction": 0.6530864238739014,
"alphanum_fraction": 0.6654320955276489,
"avg_line_length": 27.928571701049805,
"blob_id": "5ace256f1252559264ff309cbcf1ce6bc53e12aa",
"content_id": "78893d60f6adbfa820214fb1cb68b41dafe882de",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 810,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 28,
"path": "/pyteal/ir/tealsimpleblock_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from .. import *\n\n\ndef test_constructor():\n block1 = TealSimpleBlock([])\n assert block1.ops == []\n assert block1.nextBlock is None\n\n block2 = TealSimpleBlock([TealOp(None, Op.int, 1)])\n assert block2.ops == [TealOp(None, Op.int, 1)]\n assert block2.nextBlock is None\n\n\ndef test_next_block():\n block = TealSimpleBlock([])\n block.setNextBlock(TealSimpleBlock([TealOp(None, Op.substring3)]))\n assert block.nextBlock == TealSimpleBlock([TealOp(None, Op.substring3)])\n\n\ndef test_outgoing():\n emptyBlock = TealSimpleBlock([])\n assert emptyBlock.getOutgoing() == []\n\n block = TealSimpleBlock([])\n block.setNextBlock(TealSimpleBlock([TealOp(None, Op.byte, '\"nextBlock\"')]))\n assert block.getOutgoing() == [\n TealSimpleBlock([TealOp(None, Op.byte, '\"nextBlock\"')])\n ]\n"
},
{
"alpha_fraction": 0.6124907732009888,
"alphanum_fraction": 0.6194246411323547,
"avg_line_length": 27.600563049316406,
"blob_id": "9da1f866d500c5845b2624ea7a5ae798a0ea495e",
"content_id": "c51b8833a1e233dc9143b76b1e396553b3a66f85",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 20335,
"license_type": "permissive",
"max_line_length": 68,
"num_lines": 711,
"path": "/pyteal/ast/asset_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\nteal2Options = CompileOptions()\nteal4Options = CompileOptions(version=4)\nteal5Options = CompileOptions(version=5)\n\n\ndef test_asset_holding_balance():\n args = Int(0), Int(17)\n expr = AssetHolding.balance(args[0], args[1])\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 0),\n TealOp(args[1], Op.int, 17),\n TealOp(expr, Op.asset_holding_get, \"AssetBalance\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_holding_balance_direct_ref():\n args = [Txn.sender(), Txn.assets[17]]\n expr = AssetHolding.balance(args[0], args[1])\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.txn, \"Sender\"),\n TealOp(args[1], Op.txna, \"Assets\", 17),\n TealOp(expr, Op.asset_holding_get, \"AssetBalance\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_holding_balance_invalid():\n with pytest.raises(TealTypeError):\n AssetHolding.balance(Txn.sender(), Bytes(\"100\"))\n\n with pytest.raises(TealTypeError):\n AssetHolding.balance(Int(0), Txn.receiver())\n\n\ndef test_asset_holding_frozen():\n args = [Int(0), Int(17)]\n expr = AssetHolding.frozen(args[0], args[1])\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 0),\n TealOp(args[1], Op.int, 17),\n TealOp(expr, Op.asset_holding_get, \"AssetFrozen\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_holding_frozen_direct_ref():\n args = [Txn.sender(), Txn.assets[17]]\n expr = AssetHolding.frozen(args[0], args[1])\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.txn, \"Sender\"),\n TealOp(args[1], Op.txna, \"Assets\", 17),\n TealOp(expr, Op.asset_holding_get, \"AssetFrozen\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_holding_frozen_invalid():\n with pytest.raises(TealTypeError):\n AssetHolding.frozen(Txn.sender(), Bytes(\"17\"))\n\n with pytest.raises(TealTypeError):\n AssetHolding.frozen(Int(0), Txn.receiver())\n\n\ndef test_asset_param_total():\n arg = Int(0)\n expr = AssetParam.total(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 0),\n TealOp(expr, Op.asset_params_get, \"AssetTotal\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_total_direct_ref():\n arg = Txn.assets[0]\n expr = AssetParam.total(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.txna, \"Assets\", 0),\n TealOp(expr, Op.asset_params_get, \"AssetTotal\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_total_invalid():\n with pytest.raises(TealTypeError):\n AssetParam.total(Txn.sender())\n\n\ndef test_asset_param_decimals():\n arg = Int(0)\n expr = AssetParam.decimals(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 0),\n TealOp(expr, Op.asset_params_get, \"AssetDecimals\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_decimals_direct_ref():\n arg = Txn.assets[0]\n expr = AssetParam.decimals(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.txna, \"Assets\", 0),\n TealOp(expr, Op.asset_params_get, \"AssetDecimals\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_decimals_invalid():\n with pytest.raises(TealTypeError):\n AssetParam.decimals(Txn.sender())\n\n\ndef test_asset_param_default_frozen():\n arg = Int(0)\n expr = AssetParam.defaultFrozen(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 0),\n TealOp(expr, Op.asset_params_get, \"AssetDefaultFrozen\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_default_frozen_direct_ref():\n arg = Txn.assets[0]\n expr = AssetParam.defaultFrozen(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.txna, \"Assets\", 0),\n TealOp(expr, Op.asset_params_get, \"AssetDefaultFrozen\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_default_frozen_invalid():\n with pytest.raises(TealTypeError):\n AssetParam.defaultFrozen(Txn.sender())\n\n\ndef test_asset_param_unit_name():\n arg = Int(0)\n expr = AssetParam.unitName(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 0),\n TealOp(expr, Op.asset_params_get, \"AssetUnitName\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_unit_name_direct_ref():\n arg = Txn.assets[0]\n expr = AssetParam.unitName(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.txna, \"Assets\", 0),\n TealOp(expr, Op.asset_params_get, \"AssetUnitName\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_unit_name_invalid():\n with pytest.raises(TealTypeError):\n AssetParam.unitName(Txn.sender())\n\n\ndef test_asset_param_name():\n arg = Int(0)\n expr = AssetParam.name(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 0),\n TealOp(expr, Op.asset_params_get, \"AssetName\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_name_direct_ref():\n arg = Txn.assets[0]\n expr = AssetParam.name(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.txna, \"Assets\", 0),\n TealOp(expr, Op.asset_params_get, \"AssetName\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_name_invalid():\n with pytest.raises(TealTypeError):\n AssetParam.name(Txn.sender())\n\n\ndef test_asset_param_url():\n arg = Int(0)\n expr = AssetParam.url(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 0),\n TealOp(expr, Op.asset_params_get, \"AssetURL\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_url_direct_ref():\n arg = Txn.assets[0]\n expr = AssetParam.url(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.txna, \"Assets\", 0),\n TealOp(expr, Op.asset_params_get, \"AssetURL\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_url_invalid():\n with pytest.raises(TealTypeError):\n AssetParam.url(Txn.sender())\n\n\ndef test_asset_param_metadata_hash():\n arg = Int(0)\n expr = AssetParam.metadataHash(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 0),\n TealOp(expr, Op.asset_params_get, \"AssetMetadataHash\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_metadata_hash_direct_ref():\n arg = Txn.assets[0]\n expr = AssetParam.metadataHash(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.txna, \"Assets\", 0),\n TealOp(expr, Op.asset_params_get, \"AssetMetadataHash\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_metadata_hash_invalid():\n with pytest.raises(TealTypeError):\n AssetParam.metadataHash(Txn.sender())\n\n\ndef test_asset_param_manager():\n arg = Int(0)\n expr = AssetParam.manager(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 0),\n TealOp(expr, Op.asset_params_get, \"AssetManager\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_manager_direct_ref():\n arg = Txn.assets[0]\n expr = AssetParam.manager(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.txna, \"Assets\", 0),\n TealOp(expr, Op.asset_params_get, \"AssetManager\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_manager_invalid():\n with pytest.raises(TealTypeError):\n AssetParam.manager(Txn.sender())\n\n\ndef test_asset_param_reserve():\n arg = Int(2)\n expr = AssetParam.reserve(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 2),\n TealOp(expr, Op.asset_params_get, \"AssetReserve\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_reserve_direct_ref():\n arg = Txn.assets[2]\n expr = AssetParam.reserve(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.txna, \"Assets\", 2),\n TealOp(expr, Op.asset_params_get, \"AssetReserve\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_reserve_invalid():\n with pytest.raises(TealTypeError):\n AssetParam.reserve(Txn.sender())\n\n\ndef test_asset_param_freeze():\n arg = Int(0)\n expr = AssetParam.freeze(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 0),\n TealOp(expr, Op.asset_params_get, \"AssetFreeze\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_freeze_direct_ref():\n arg = Txn.assets[0]\n expr = AssetParam.freeze(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.txna, \"Assets\", 0),\n TealOp(expr, Op.asset_params_get, \"AssetFreeze\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_freeze_invalid():\n with pytest.raises(TealTypeError):\n AssetParam.freeze(Txn.sender())\n\n\ndef test_asset_param_clawback():\n arg = Int(1)\n expr = AssetParam.clawback(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 1),\n TealOp(expr, Op.asset_params_get, \"AssetClawback\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_clawback_direct_ref():\n arg = Txn.assets[1]\n expr = AssetParam.clawback(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.txna, \"Assets\", 1),\n TealOp(expr, Op.asset_params_get, \"AssetClawback\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_clawback_invalid():\n with pytest.raises(TealTypeError):\n AssetParam.clawback(Txn.sender())\n\n\ndef test_asset_param_creator_valid():\n arg = Int(1)\n expr = AssetParam.creator(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 1),\n TealOp(expr, Op.asset_params_get, \"AssetCreator\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_asset_param_creator_invalid():\n with pytest.raises(TealTypeError):\n AssetParam.creator(Txn.sender())\n"
},
{
"alpha_fraction": 0.6215971112251282,
"alphanum_fraction": 0.6256805658340454,
"avg_line_length": 35.13114929199219,
"blob_id": "3dc19c734100b005db06eb69a131abd0b2f208f3",
"content_id": "f8716302a28e5acc6d42850c043519c29940905e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2204,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 61,
"path": "/pyteal/ast/gaid.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import cast, Union, TYPE_CHECKING\n\nfrom ..types import TealType, require_type\nfrom ..ir import TealOp, Op, TealBlock\nfrom ..errors import TealInputError, verifyTealVersion\nfrom ..config import MAX_GROUP_SIZE\nfrom .expr import Expr\nfrom .leafexpr import LeafExpr\n\nif TYPE_CHECKING:\n from ..compiler import CompileOptions\n\n\nclass GeneratedID(LeafExpr):\n \"\"\"An expression to obtain the ID of an asset or application created by another transaction in the current group.\"\"\"\n\n def __init__(self, txnIndex: Union[int, Expr]) -> None:\n \"\"\"Create an expression to extract the created ID from a transaction in the current group.\n\n Requires TEAL version 4 or higher. This operation is only permitted in application mode.\n\n Args:\n txnIndex: The index of the transaction from which the created ID should be obtained.\n This index may be a Python int, or it may be a PyTeal expression that evaluates at\n runtime. If it's an expression, it must evaluate to a uint64. In all cases, the index\n must be less than the index of the current transaction.\n \"\"\"\n super().__init__()\n if type(txnIndex) is int:\n if txnIndex < 0 or txnIndex >= MAX_GROUP_SIZE:\n raise TealInputError(\n \"Invalid transaction index {}, shoud be in [0, {})\".format(\n txnIndex, MAX_GROUP_SIZE\n )\n )\n else:\n require_type(cast(Expr, txnIndex).type_of(), TealType.uint64)\n self.txnIndex = txnIndex\n\n def __str__(self):\n return \"(Gaid {})\".format(self.txnIndex)\n\n def __teal__(self, options: \"CompileOptions\"):\n verifyTealVersion(\n Op.gaid.min_version,\n options.version,\n \"TEAL version too low to use Gaid expression\",\n )\n\n if type(self.txnIndex) is int:\n op = TealOp(self, Op.gaid, self.txnIndex)\n return TealBlock.FromOp(options, op)\n\n op = TealOp(self, Op.gaids)\n return TealBlock.FromOp(options, op, cast(Expr, self.txnIndex))\n\n def type_of(self):\n return TealType.uint64\n\n\nGeneratedID.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.6022904515266418,
"alphanum_fraction": 0.612962007522583,
"avg_line_length": 24.443708419799805,
"blob_id": "80a953e20375905a2987c9da1b97dbcb940b1aeb",
"content_id": "ff539a046a9a56ff3d2107124bff89b9bffcb8be",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3842,
"license_type": "permissive",
"max_line_length": 76,
"num_lines": 151,
"path": "/pyteal/ast/while_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_while_compiles():\n\n i = ScratchVar()\n expr = While(Int(2)).Do(Seq([i.store(Int(0))]))\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n expr.__teal__(options)\n\n\ndef test_nested_whiles_compile():\n i = ScratchVar()\n expr = While(Int(2)).Do(Seq([While(Int(2)).Do(Seq([i.store(Int(0))]))]))\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n\ndef test_continue_break():\n expr = While(Int(0)).Do(Seq([If(Int(1), Break(), Continue())]))\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n expr.__teal__(options)\n\n\ndef test_while():\n i = ScratchVar()\n i.store(Int(0))\n items = [i.load() < Int(2), [i.store(i.load() + Int(1))]]\n expr = While(items[0]).Do(Seq(items[1]))\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n expected, condEnd = items[0].__teal__(options)\n do, doEnd = Seq(items[1]).__teal__(options)\n expectedBranch = TealConditionalBlock([])\n end = TealSimpleBlock([])\n\n expectedBranch.setTrueBlock(do)\n expectedBranch.setFalseBlock(end)\n condEnd.setNextBlock(expectedBranch)\n doEnd.setNextBlock(expected)\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_while_continue():\n i = ScratchVar()\n i.store(Int(0))\n items = [\n i.load() < Int(2),\n i.store(i.load() + Int(1)),\n If(i.load() == Int(1), Continue()),\n ]\n expr = While(items[0]).Do(Seq(items[1], items[2]))\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n options.enterLoop()\n\n expected, condEnd = items[0].__teal__(options)\n do, doEnd = Seq([items[1], items[2]]).__teal__(options)\n expectedBranch = TealConditionalBlock([])\n end = TealSimpleBlock([])\n\n expectedBranch.setTrueBlock(do)\n expectedBranch.setFalseBlock(end)\n condEnd.setNextBlock(expectedBranch)\n doEnd.setNextBlock(expected)\n\n _, continueBlocks = options.exitLoop()\n\n for block in continueBlocks:\n block.setNextBlock(do)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_while_break():\n i = ScratchVar()\n i.store(Int(0))\n items = [\n i.load() < Int(2),\n i.store(i.load() + Int(1)),\n If(i.load() == Int(1), Break()),\n ]\n expr = While(items[0]).Do(Seq(items[1], items[2]))\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n options.enterLoop()\n\n expected, condEnd = items[0].__teal__(options)\n do, doEnd = Seq([items[1], items[2]]).__teal__(options)\n expectedBranch = TealConditionalBlock([])\n end = TealSimpleBlock([])\n\n expectedBranch.setTrueBlock(do)\n expectedBranch.setFalseBlock(end)\n condEnd.setNextBlock(expectedBranch)\n doEnd.setNextBlock(expected)\n\n breakBlocks, _ = options.exitLoop()\n\n for block in breakBlocks:\n block.setNextBlock(end)\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_while_invalid():\n\n with pytest.raises(TypeError):\n expr = While()\n\n with pytest.raises(TealCompileError):\n expr = While(Int(2))\n expr.type_of()\n\n with pytest.raises(TealCompileError):\n expr = While(Int(2))\n expr.__teal__(options)\n\n with pytest.raises(TealCompileError):\n expr = While(Int(2))\n expr.type_of()\n\n with pytest.raises(TealCompileError):\n expr = While(Int(2))\n expr.__str__()\n\n with pytest.raises(TealTypeError):\n expr = While(Int(2)).Do(Int(2))\n expr.__str__()\n\n with pytest.raises(TealCompileError):\n expr = While(Int(0)).Do(Continue()).Do(Continue())\n expr.__str__()\n"
},
{
"alpha_fraction": 0.6709070801734924,
"alphanum_fraction": 0.6792035102844238,
"avg_line_length": 34.45098114013672,
"blob_id": "b78b15242a12312f091ef26f8163d42677e3ecc8",
"content_id": "7139adc24ea72b44577f40d1642bbac377fc6393",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1808,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 51,
"path": "/pyteal/ir/tealconditionalblock_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from .. import *\n\n\ndef test_constructor():\n block1 = TealConditionalBlock([])\n assert block1.ops == []\n assert block1.trueBlock is None\n assert block1.falseBlock is None\n\n block2 = TealConditionalBlock([TealOp(None, Op.int, 1)])\n assert block2.ops == [TealOp(None, Op.int, 1)]\n assert block2.trueBlock is None\n assert block2.falseBlock is None\n\n\ndef test_true_block():\n block = TealConditionalBlock([])\n block.setTrueBlock(TealSimpleBlock([TealOp(None, Op.substring3)]))\n assert block.trueBlock == TealSimpleBlock([TealOp(None, Op.substring3)])\n assert block.getOutgoing() == [TealSimpleBlock([TealOp(None, Op.substring3)])]\n\n\ndef test_false_block():\n block = TealConditionalBlock([])\n block.setFalseBlock(TealSimpleBlock([TealOp(None, Op.substring3)]))\n assert block.falseBlock == TealSimpleBlock([TealOp(None, Op.substring3)])\n\n\ndef test_outgoing():\n emptyBlock = TealConditionalBlock([])\n assert emptyBlock.getOutgoing() == []\n\n trueBlock = TealConditionalBlock([])\n trueBlock.setTrueBlock(TealSimpleBlock([TealOp(None, Op.byte, '\"true\"')]))\n assert trueBlock.getOutgoing() == [\n TealSimpleBlock([TealOp(None, Op.byte, '\"true\"')])\n ]\n\n falseBlock = TealConditionalBlock([])\n falseBlock.setFalseBlock(TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')]))\n assert falseBlock.getOutgoing() == [\n TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')])\n ]\n\n bothBlock = TealConditionalBlock([])\n bothBlock.setTrueBlock(TealSimpleBlock([TealOp(None, Op.byte, '\"true\"')]))\n bothBlock.setFalseBlock(TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')]))\n assert bothBlock.getOutgoing() == [\n TealSimpleBlock([TealOp(None, Op.byte, '\"true\"')]),\n TealSimpleBlock([TealOp(None, Op.byte, '\"false\"')]),\n ]\n"
},
{
"alpha_fraction": 0.5967759490013123,
"alphanum_fraction": 0.6075048446655273,
"avg_line_length": 26.31664276123047,
"blob_id": "27bbaa22459edf46c285cf191dc8fc1af48c3a5c",
"content_id": "95f6b4ac2cc2ec685a543e21c1db7717978d47bd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 18548,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 679,
"path": "/pyteal/ast/app_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\nteal4Options = CompileOptions(version=4)\nteal5Options = CompileOptions(version=5)\n\n\ndef test_on_complete():\n assert OnComplete.NoOp.__teal__(options)[0] == TealSimpleBlock(\n [TealOp(OnComplete.NoOp, Op.int, \"NoOp\")]\n )\n\n assert OnComplete.OptIn.__teal__(options)[0] == TealSimpleBlock(\n [TealOp(OnComplete.OptIn, Op.int, \"OptIn\")]\n )\n\n assert OnComplete.CloseOut.__teal__(options)[0] == TealSimpleBlock(\n [TealOp(OnComplete.CloseOut, Op.int, \"CloseOut\")]\n )\n\n assert OnComplete.ClearState.__teal__(options)[0] == TealSimpleBlock(\n [TealOp(OnComplete.ClearState, Op.int, \"ClearState\")]\n )\n\n assert OnComplete.UpdateApplication.__teal__(options)[0] == TealSimpleBlock(\n [TealOp(OnComplete.UpdateApplication, Op.int, \"UpdateApplication\")]\n )\n\n assert OnComplete.DeleteApplication.__teal__(options)[0] == TealSimpleBlock(\n [TealOp(OnComplete.DeleteApplication, Op.int, \"DeleteApplication\")]\n )\n\n\ndef test_app_id():\n expr = App.id()\n assert expr.type_of() == TealType.uint64\n with TealComponent.Context.ignoreExprEquality():\n assert (\n expr.__teal__(options)[0]\n == Global.current_application_id().__teal__(options)[0]\n )\n\n\ndef test_opted_in():\n args = [Int(1), Int(12)]\n expr = App.optedIn(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 1),\n TealOp(args[1], Op.int, 12),\n TealOp(expr, Op.app_opted_in),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_opted_in_direct_ref():\n args = [Bytes(\"sender address\"), Int(100)]\n expr = App.optedIn(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"sender address\"'),\n TealOp(args[1], Op.int, 100),\n TealOp(expr, Op.app_opted_in),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_opted_in_invalid():\n with pytest.raises(TealTypeError):\n App.optedIn(Bytes(\"sender\"), Bytes(\"100\"))\n\n with pytest.raises(TealTypeError):\n App.optedIn(Int(123456), Bytes(\"364\"))\n\n\ndef test_local_get():\n args = [Int(0), Bytes(\"key\")]\n expr = App.localGet(args[0], args[1])\n assert expr.type_of() == TealType.anytype\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 0),\n TealOp(args[1], Op.byte, '\"key\"'),\n TealOp(expr, Op.app_local_get),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_local_get_direct_ref():\n args = [Txn.sender(), Bytes(\"key\")]\n expr = App.localGet(args[0], args[1])\n assert expr.type_of() == TealType.anytype\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.txn, \"Sender\"),\n TealOp(args[1], Op.byte, '\"key\"'),\n TealOp(expr, Op.app_local_get),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_local_get_invalid():\n with pytest.raises(TealTypeError):\n App.localGet(Txn.sender(), Int(1337))\n\n with pytest.raises(TealTypeError):\n App.localGet(Int(0), Int(1))\n\n\ndef test_local_get_ex():\n args = [Int(0), Int(6), Bytes(\"key\")]\n expr = App.localGetEx(args[0], args[1], args[2])\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.anytype\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 0),\n TealOp(args[1], Op.int, 6),\n TealOp(args[2], Op.byte, '\"key\"'),\n TealOp(expr, Op.app_local_get_ex),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_local_get_ex_direct_ref():\n args = [Txn.sender(), Int(6), Bytes(\"key\")]\n expr = App.localGetEx(args[0], args[1], args[2])\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.anytype\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.txn, \"Sender\"),\n TealOp(args[1], Op.int, 6),\n TealOp(args[2], Op.byte, '\"key\"'),\n TealOp(expr, Op.app_local_get_ex),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_local_get_ex_invalid():\n with pytest.raises(TealTypeError):\n App.localGetEx(Txn.sender(), Int(0), Int(0x123456))\n\n with pytest.raises(TealTypeError):\n App.localGetEx(Int(0), Bytes(\"app\"), Bytes(\"key\"))\n\n\ndef test_global_get():\n arg = Bytes(\"key\")\n expr = App.globalGet(arg)\n assert expr.type_of() == TealType.anytype\n\n expected = TealSimpleBlock(\n [TealOp(arg, Op.byte, '\"key\"'), TealOp(expr, Op.app_global_get)]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_global_get_invalid():\n with pytest.raises(TealTypeError):\n App.globalGet(Int(7))\n\n\ndef test_global_get_ex():\n args = [Int(6), Bytes(\"key\")]\n expr = App.globalGetEx(args[0], args[1])\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.anytype\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 6),\n TealOp(args[1], Op.byte, '\"key\"'),\n TealOp(expr, Op.app_global_get_ex),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_global_get_ex_direct_ref():\n args = [Txn.applications[0], Bytes(\"key\")]\n expr = App.globalGetEx(args[0], args[1])\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.anytype\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.txna, \"Applications\", 0),\n TealOp(args[1], Op.byte, '\"key\"'),\n TealOp(expr, Op.app_global_get_ex),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_global_get_ex_invalid():\n with pytest.raises(TealTypeError):\n App.globalGetEx(Bytes(\"app\"), Int(12))\n\n with pytest.raises(TealTypeError):\n App.globalGetEx(Int(0), Int(1))\n\n\ndef test_local_put():\n args = [Int(0), Bytes(\"key\"), Int(5)]\n expr = App.localPut(args[0], args[1], args[2])\n assert expr.type_of() == TealType.none\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 0),\n TealOp(args[1], Op.byte, '\"key\"'),\n TealOp(args[2], Op.int, 5),\n TealOp(expr, Op.app_local_put),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_local_put_direct_ref():\n args = [Txn.sender(), Bytes(\"key\"), Int(5)]\n expr = App.localPut(args[0], args[1], args[2])\n assert expr.type_of() == TealType.none\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.txn, \"Sender\"),\n TealOp(args[1], Op.byte, '\"key\"'),\n TealOp(args[2], Op.int, 5),\n TealOp(expr, Op.app_local_put),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_local_put_invalid():\n with pytest.raises(TealTypeError):\n App.localPut(Txn.sender(), Int(55), Int(5))\n\n with pytest.raises(TealTypeError):\n App.localPut(Int(1), Int(0), Int(5))\n\n with pytest.raises(TealTypeError):\n App.localPut(Int(1), Bytes(\"key\"), Pop(Int(1)))\n\n\ndef test_global_put():\n args = [Bytes(\"key\"), Int(5)]\n expr = App.globalPut(args[0], args[1])\n assert expr.type_of() == TealType.none\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"key\"'),\n TealOp(args[1], Op.int, 5),\n TealOp(expr, Op.app_global_put),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_global_put_invalid():\n with pytest.raises(TealTypeError):\n App.globalPut(Int(0), Int(5))\n\n with pytest.raises(TealTypeError):\n App.globalPut(Bytes(\"key\"), Pop(Int(1)))\n\n\ndef test_local_del():\n args = [Int(0), Bytes(\"key\")]\n expr = App.localDel(args[0], args[1])\n assert expr.type_of() == TealType.none\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 0),\n TealOp(args[1], Op.byte, '\"key\"'),\n TealOp(expr, Op.app_local_del),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_local_del_direct_ref():\n args = [Txn.sender(), Bytes(\"key\")]\n expr = App.localDel(args[0], args[1])\n assert expr.type_of() == TealType.none\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.txn, \"Sender\"),\n TealOp(args[1], Op.byte, '\"key\"'),\n TealOp(expr, Op.app_local_del),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_local_del_invalid():\n with pytest.raises(TealTypeError):\n App.localDel(Txn.sender(), Int(123))\n\n with pytest.raises(TealTypeError):\n App.localDel(Int(1), Int(2))\n\n\ndef test_global_del():\n arg = Bytes(\"key\")\n expr = App.globalDel(arg)\n assert expr.type_of() == TealType.none\n\n expected = TealSimpleBlock(\n [TealOp(arg, Op.byte, '\"key\"'), TealOp(expr, Op.app_global_del)]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_global_del_invalid():\n with pytest.raises(TealTypeError):\n App.globalDel(Int(2))\n\n\ndef test_app_param_approval_program_valid():\n arg = Int(1)\n expr = AppParam.approvalProgram(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 1),\n TealOp(expr, Op.app_params_get, \"AppApprovalProgram\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_app_param_approval_program_invalid():\n with pytest.raises(TealTypeError):\n AppParam.approvalProgram(Txn.sender())\n\n\ndef test_app_param_clear_state_program_valid():\n arg = Int(0)\n expr = AppParam.clearStateProgram(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 0),\n TealOp(expr, Op.app_params_get, \"AppClearStateProgram\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_app_param_clear_state_program_invalid():\n with pytest.raises(TealTypeError):\n AppParam.clearStateProgram(Txn.sender())\n\n\ndef test_app_param_global_num_unit_valid():\n arg = Int(1)\n expr = AppParam.globalNumUnit(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 1),\n TealOp(expr, Op.app_params_get, \"AppGlobalNumUnit\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_app_param_global_num_unit_invalid():\n with pytest.raises(TealTypeError):\n AppParam.globalNumUnit(Txn.sender())\n\n\ndef test_app_param_global_num_byte_slice_valid():\n arg = Int(1)\n expr = AppParam.globalNumByteSlice(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 1),\n TealOp(expr, Op.app_params_get, \"AppGlobalNumByteSlice\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_app_param_global_num_byte_slice_invalid():\n with pytest.raises(TealTypeError):\n AppParam.globalNumByteSlice(Txn.sender())\n\n\ndef test_app_param_local_num_unit_valid():\n arg = Int(1)\n expr = AppParam.localNumUnit(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 1),\n TealOp(expr, Op.app_params_get, \"AppLocalNumUnit\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_app_param_local_num_unit_invalid():\n with pytest.raises(TealTypeError):\n AppParam.localNumUnit(Txn.sender())\n\n\ndef test_app_param_local_num_byte_slice_valid():\n arg = Int(1)\n expr = AppParam.localNumByteSlice(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 1),\n TealOp(expr, Op.app_params_get, \"AppLocalNumByteSlice\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_app_param_local_num_byte_slice_invalid():\n with pytest.raises(TealTypeError):\n AppParam.localNumByteSlice(Txn.sender())\n\n\ndef test_app_param_extra_programs_page_valid():\n arg = Int(1)\n expr = AppParam.extraProgramPages(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 1),\n TealOp(expr, Op.app_params_get, \"AppExtraProgramPages\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_app_param_extra_program_pages_invalid():\n with pytest.raises(TealTypeError):\n AppParam.extraProgramPages(Txn.sender())\n\n\ndef test_app_param_creator_valid():\n arg = Int(1)\n expr = AppParam.creator(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 1),\n TealOp(expr, Op.app_params_get, \"AppCreator\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_app_param_creator_invalid():\n with pytest.raises(TealTypeError):\n AppParam.creator(Txn.sender())\n\n\ndef test_app_param_address_valid():\n arg = Int(1)\n expr = AppParam.address(arg)\n assert expr.type_of() == TealType.none\n assert expr.value().type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(arg, Op.int, 1),\n TealOp(expr, Op.app_params_get, \"AppAddress\"),\n TealOp(None, Op.store, expr.slotOk),\n TealOp(None, Op.store, expr.slotValue),\n ]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_app_param_address_invalid():\n with pytest.raises(TealTypeError):\n AppParam.address(Txn.sender())\n"
},
{
"alpha_fraction": 0.5987498760223389,
"alphanum_fraction": 0.61679607629776,
"avg_line_length": 28.876506805419922,
"blob_id": "47e6ad57048818ec962f3d1d2cbbc7db35cf681b",
"content_id": "b7157ab164ce9383e1fd82ed77bf47d7f7af43c6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9919,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 332,
"path": "/pyteal/ast/subroutine_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import List\nimport pytest\n\nfrom .. import *\nfrom .subroutine import evaluateSubroutine\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions, Return\n\noptions = CompileOptions(version=4)\n\n\ndef test_subroutine_definition():\n def fn0Args():\n return Return()\n\n def fn1Args(a1):\n return Return()\n\n def fn2Args(a1, a2):\n return Return()\n\n def fn10Args(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10):\n return Return()\n\n lam0Args = lambda: Return()\n lam1Args = lambda a1: Return()\n lam2Args = lambda a1, a2: Return()\n lam10Args = lambda a1, a2, a3, a4, a5, a6, a7, a8, a9, a10: Return()\n\n cases = (\n (fn0Args, 0, \"fn0Args\"),\n (fn1Args, 1, \"fn1Args\"),\n (fn2Args, 2, \"fn2Args\"),\n (fn10Args, 10, \"fn10Args\"),\n (lam0Args, 0, \"<lambda>\"),\n (lam1Args, 1, \"<lambda>\"),\n (lam2Args, 2, \"<lambda>\"),\n (lam10Args, 10, \"<lambda>\"),\n )\n\n for (fn, numArgs, name) in cases:\n definition = SubroutineDefinition(fn, TealType.none)\n assert definition.argumentCount() == numArgs\n assert definition.name() == name\n\n if numArgs > 0:\n with pytest.raises(TealInputError):\n definition.invoke([Int(1)] * (numArgs - 1))\n\n with pytest.raises(TealInputError):\n definition.invoke([Int(1)] * (numArgs + 1))\n\n if numArgs > 0:\n with pytest.raises(TealInputError):\n definition.invoke([1] * numArgs)\n\n args = [Int(1)] * numArgs\n invocation = definition.invoke(args)\n assert isinstance(invocation, SubroutineCall)\n assert invocation.subroutine is definition\n assert invocation.args == args\n\n\ndef test_subroutine_definition_invalid():\n def fnWithDefaults(a, b=None):\n return Return()\n\n def fnWithKeywordArgs(a, *, b):\n return Return()\n\n def fnWithVariableArgs(a, *b):\n return Return()\n\n cases = (\n 1,\n None,\n fnWithDefaults,\n fnWithKeywordArgs,\n fnWithVariableArgs,\n )\n\n for case in cases:\n with pytest.raises(TealInputError):\n SubroutineDefinition(case, TealType.none)\n\n\ndef test_subroutine_declaration():\n cases = (\n (TealType.none, Return()),\n (TealType.uint64, Return(Int(1))),\n (TealType.uint64, Int(1)),\n (TealType.bytes, Bytes(\"value\")),\n (TealType.anytype, App.globalGet(Bytes(\"key\"))),\n )\n\n for (returnType, value) in cases:\n\n def mySubroutine():\n return value\n\n definition = SubroutineDefinition(mySubroutine, returnType)\n\n declaration = SubroutineDeclaration(definition, value)\n assert declaration.type_of() == value.type_of()\n assert declaration.has_return() == value.has_return()\n\n options.currentSubroutine = definition\n assert declaration.__teal__(options) == value.__teal__(options)\n options.setSubroutine(None)\n\n\ndef test_subroutine_call():\n def mySubroutine():\n return Return()\n\n returnTypes = (TealType.uint64, TealType.bytes, TealType.anytype, TealType.none)\n\n argCases = (\n [],\n [Int(1)],\n [Int(1), Bytes(\"value\")],\n )\n\n for returnType in returnTypes:\n definition = SubroutineDefinition(mySubroutine, returnType)\n\n for args in argCases:\n expr = SubroutineCall(definition, args)\n\n assert expr.type_of() == returnType\n assert not expr.has_return()\n\n expected, _ = TealBlock.FromOp(\n options, TealOp(expr, Op.callsub, definition), *args\n )\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_decorator():\n assert callable(Subroutine)\n assert callable(Subroutine(TealType.anytype))\n\n @Subroutine(TealType.none)\n def mySubroutine(a):\n return Return()\n\n assert callable(mySubroutine)\n\n invocation = mySubroutine(Int(1))\n assert isinstance(invocation, SubroutineCall)\n\n with pytest.raises(TealInputError):\n mySubroutine()\n\n with pytest.raises(TealInputError):\n mySubroutine(Int(1), Int(2))\n\n with pytest.raises(TealInputError):\n mySubroutine(Pop(Int(1)))\n\n with pytest.raises(TealInputError):\n mySubroutine(1)\n\n with pytest.raises(TealInputError):\n mySubroutine(a=Int(1))\n\n\ndef test_evaluate_subroutine_no_args():\n cases = (\n (TealType.none, Return()),\n (TealType.uint64, Int(1) + Int(2)),\n (TealType.uint64, Return(Int(1) + Int(2))),\n (TealType.bytes, Bytes(\"value\")),\n (TealType.bytes, Return(Bytes(\"value\"))),\n )\n\n for (returnType, returnValue) in cases:\n\n def mySubroutine():\n return returnValue\n\n definition = SubroutineDefinition(mySubroutine, returnType)\n\n declaration = evaluateSubroutine(definition)\n assert isinstance(declaration, SubroutineDeclaration)\n assert declaration.subroutine is definition\n\n assert declaration.type_of() == returnValue.type_of()\n assert declaration.has_return() == returnValue.has_return()\n\n options.setSubroutine(definition)\n expected, _ = Seq([returnValue]).__teal__(options)\n\n actual, _ = declaration.__teal__(options)\n options.setSubroutine(None)\n assert actual == expected\n\n\ndef test_evaluate_subroutine_1_arg():\n cases = (\n (TealType.none, Return()),\n (TealType.uint64, Int(1) + Int(2)),\n (TealType.uint64, Return(Int(1) + Int(2))),\n (TealType.bytes, Bytes(\"value\")),\n (TealType.bytes, Return(Bytes(\"value\"))),\n )\n\n for (returnType, returnValue) in cases:\n argSlots: List[ScratchSlot] = []\n\n def mySubroutine(a1):\n assert isinstance(a1, ScratchLoad)\n argSlots.append(a1.slot)\n return returnValue\n\n definition = SubroutineDefinition(mySubroutine, returnType)\n\n declaration = evaluateSubroutine(definition)\n assert isinstance(declaration, SubroutineDeclaration)\n assert declaration.subroutine is definition\n\n assert declaration.type_of() == returnValue.type_of()\n assert declaration.has_return() == returnValue.has_return()\n\n assert isinstance(declaration.body, Seq)\n assert len(declaration.body.args) == 2\n\n assert isinstance(declaration.body.args[0], ScratchStackStore)\n\n assert declaration.body.args[0].slot is argSlots[-1]\n\n options.setSubroutine(definition)\n expected, _ = Seq([declaration.body.args[0], returnValue]).__teal__(options)\n\n actual, _ = declaration.__teal__(options)\n options.setSubroutine(None)\n assert actual == expected\n\n\ndef test_evaluate_subroutine_2_args():\n cases = (\n (TealType.none, Return()),\n (TealType.uint64, Int(1) + Int(2)),\n (TealType.uint64, Return(Int(1) + Int(2))),\n (TealType.bytes, Bytes(\"value\")),\n (TealType.bytes, Return(Bytes(\"value\"))),\n )\n\n for (returnType, returnValue) in cases:\n argSlots: List[ScratchSlot] = []\n\n def mySubroutine(a1, a2):\n assert isinstance(a1, ScratchLoad)\n argSlots.append(a1.slot)\n assert isinstance(a2, ScratchLoad)\n argSlots.append(a2.slot)\n return returnValue\n\n definition = SubroutineDefinition(mySubroutine, returnType)\n\n declaration = evaluateSubroutine(definition)\n assert isinstance(declaration, SubroutineDeclaration)\n assert declaration.subroutine is definition\n\n assert declaration.type_of() == returnValue.type_of()\n assert declaration.has_return() == returnValue.has_return()\n\n assert isinstance(declaration.body, Seq)\n assert len(declaration.body.args) == 3\n\n assert isinstance(declaration.body.args[0], ScratchStackStore)\n assert isinstance(declaration.body.args[1], ScratchStackStore)\n\n assert declaration.body.args[0].slot is argSlots[-1]\n assert declaration.body.args[1].slot is argSlots[-2]\n\n options.setSubroutine(definition)\n expected, _ = Seq(\n [declaration.body.args[0], declaration.body.args[1], returnValue]\n ).__teal__(options)\n\n actual, _ = declaration.__teal__(options)\n options.setSubroutine(None)\n assert actual == expected\n\n\ndef test_evaluate_subroutine_10_args():\n cases = (\n (TealType.none, Return()),\n (TealType.uint64, Int(1) + Int(2)),\n (TealType.uint64, Return(Int(1) + Int(2))),\n (TealType.bytes, Bytes(\"value\")),\n (TealType.bytes, Return(Bytes(\"value\"))),\n )\n\n for (returnType, returnValue) in cases:\n argSlots: List[ScratchSlot] = []\n\n def mySubroutine(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10):\n for a in (a1, a2, a3, a4, a5, a6, a7, a8, a9, a10):\n assert isinstance(a, ScratchLoad)\n argSlots.append(a.slot)\n return returnValue\n\n definition = SubroutineDefinition(mySubroutine, returnType)\n\n declaration = evaluateSubroutine(definition)\n assert isinstance(declaration, SubroutineDeclaration)\n assert declaration.subroutine is definition\n\n assert declaration.type_of() == returnValue.type_of()\n assert declaration.has_return() == returnValue.has_return()\n\n assert isinstance(declaration.body, Seq)\n assert len(declaration.body.args) == 11\n\n for i in range(10):\n assert isinstance(declaration.body.args[i], ScratchStackStore)\n\n for i in range(10):\n assert declaration.body.args[i].slot is argSlots[-i - 1]\n\n options.setSubroutine(definition)\n expected, _ = Seq(declaration.body.args[:10] + [returnValue]).__teal__(options)\n\n actual, _ = declaration.__teal__(options)\n options.setSubroutine(None)\n assert actual == expected\n"
},
{
"alpha_fraction": 0.6150943636894226,
"alphanum_fraction": 0.6150943636894226,
"avg_line_length": 16.66666603088379,
"blob_id": "1333600ce6cccb1ac7803ab43cac27c6baaa4af5",
"content_id": "4b8c8c5b3b5e69964f1dd6deda464c9558196476",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 265,
"license_type": "permissive",
"max_line_length": 27,
"num_lines": 15,
"path": "/pyteal/compiler/__init__.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from .compiler import (\n MAX_TEAL_VERSION,\n MIN_TEAL_VERSION,\n DEFAULT_TEAL_VERSION,\n CompileOptions,\n compileTeal,\n)\n\n__all__ = [\n \"MAX_TEAL_VERSION\",\n \"MIN_TEAL_VERSION\",\n \"DEFAULT_TEAL_VERSION\",\n \"CompileOptions\",\n \"compileTeal\",\n]\n"
},
{
"alpha_fraction": 0.5467037558555603,
"alphanum_fraction": 0.5630544424057007,
"avg_line_length": 21.813491821289062,
"blob_id": "3c806e87bad8b4e2150be5ec510b9dc637eaa6df",
"content_id": "674c6d1193eba3656e83e744bc8aa6754a9050ce",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5749,
"license_type": "permissive",
"max_line_length": 63,
"num_lines": 252,
"path": "/pyteal/ast/naryexpr_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions()\n\n\ndef test_and_one():\n arg = Int(1)\n expr = And(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 1)])\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_and_two():\n args = [Int(1), Int(2)]\n expr = And(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 1),\n TealOp(args[1], Op.int, 2),\n TealOp(expr, Op.logic_and),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_and_three():\n args = [Int(1), Int(2), Int(3)]\n expr = And(args[0], args[1], args[2])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 1),\n TealOp(args[1], Op.int, 2),\n TealOp(expr, Op.logic_and),\n TealOp(args[2], Op.int, 3),\n TealOp(expr, Op.logic_and),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_and_overload():\n args = [Int(1), Int(2)]\n expr = args[0].And(args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 1),\n TealOp(args[1], Op.int, 2),\n TealOp(expr, Op.logic_and),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_and_invalid():\n with pytest.raises(TealInputError):\n And()\n\n with pytest.raises(TealTypeError):\n And(Int(1), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n And(Txn.receiver(), Int(1))\n\n with pytest.raises(TealTypeError):\n And(Txn.receiver(), Txn.receiver())\n\n\ndef test_or_one():\n arg = Int(1)\n expr = Or(arg)\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 1)])\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_or_two():\n args = [Int(1), Int(0)]\n expr = Or(args[0], args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 1),\n TealOp(args[1], Op.int, 0),\n TealOp(expr, Op.logic_or),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_or_three():\n args = [Int(0), Int(1), Int(2)]\n expr = Or(args[0], args[1], args[2])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 0),\n TealOp(args[1], Op.int, 1),\n TealOp(expr, Op.logic_or),\n TealOp(args[2], Op.int, 2),\n TealOp(expr, Op.logic_or),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_or_overload():\n args = [Int(1), Int(0)]\n expr = args[0].Or(args[1])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 1),\n TealOp(args[1], Op.int, 0),\n TealOp(expr, Op.logic_or),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_or_invalid():\n with pytest.raises(TealInputError):\n Or()\n\n with pytest.raises(TealTypeError):\n Or(Int(1), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n Or(Txn.receiver(), Int(1))\n\n with pytest.raises(TealTypeError):\n Or(Txn.receiver(), Txn.receiver())\n\n\ndef test_concat_one():\n arg = Bytes(\"a\")\n expr = Concat(arg)\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock([TealOp(arg, Op.byte, '\"a\"')])\n\n actual, _ = expr.__teal__(options)\n\n assert actual == expected\n\n\ndef test_concat_two():\n args = [Bytes(\"a\"), Bytes(\"b\")]\n expr = Concat(args[0], args[1])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"a\"'),\n TealOp(args[1], Op.byte, '\"b\"'),\n TealOp(expr, Op.concat),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_concat_three():\n args = [Bytes(\"a\"), Bytes(\"b\"), Bytes(\"c\")]\n expr = Concat(args[0], args[1], args[2])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"a\"'),\n TealOp(args[1], Op.byte, '\"b\"'),\n TealOp(expr, Op.concat),\n TealOp(args[2], Op.byte, '\"c\"'),\n TealOp(expr, Op.concat),\n ]\n )\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_concat_invalid():\n with pytest.raises(TealInputError):\n Concat()\n\n with pytest.raises(TealTypeError):\n Concat(Int(1), Txn.receiver())\n\n with pytest.raises(TealTypeError):\n Concat(Txn.receiver(), Int(1))\n\n with pytest.raises(TealTypeError):\n Concat(Int(1), Int(2))\n"
},
{
"alpha_fraction": 0.6652512550354004,
"alphanum_fraction": 0.6843594908714294,
"avg_line_length": 24.23214340209961,
"blob_id": "9ce7c19bd4cd31cbd19f42b15d29820313474367",
"content_id": "f8b344a31e294b8697abebb2c9b97c95e6a9353e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1413,
"license_type": "permissive",
"max_line_length": 84,
"num_lines": 56,
"path": "/pyteal/ast/gload_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import MAX_GROUP_SIZE, NUM_SLOTS, CompileOptions\n\nteal3Options = CompileOptions(version=3)\nteal4Options = CompileOptions(version=4)\n\n\ndef test_gload_teal_3():\n with pytest.raises(TealInputError):\n ImportScratchValue(0, 1).__teal__(teal3Options)\n\n with pytest.raises(TealInputError):\n ImportScratchValue(Int(0), 1).__teal__(teal3Options)\n\n\ndef test_gload():\n expr = ImportScratchValue(0, 1)\n assert expr.type_of() == TealType.anytype\n\n expected = TealSimpleBlock([TealOp(expr, Op.gload, 0, 1)])\n\n actual, _ = expr.__teal__(teal4Options)\n\n assert actual == expected\n\n\ndef test_gload_dynamic():\n arg = Int(1)\n expr = ImportScratchValue(arg, 0)\n assert expr.type_of() == TealType.anytype\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 1), TealOp(expr, Op.gloads, 0)])\n\n actual, _ = expr.__teal__(teal4Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_gload_invalid():\n with pytest.raises(TealInputError):\n ImportScratchValue(-1, 0)\n\n with pytest.raises(TealInputError):\n ImportScratchValue(MAX_GROUP_SIZE, 0)\n\n with pytest.raises(TealInputError):\n ImportScratchValue(0, -1)\n\n with pytest.raises(TealInputError):\n ImportScratchValue(0, NUM_SLOTS)\n"
},
{
"alpha_fraction": 0.6059957146644592,
"alphanum_fraction": 0.6059957146644592,
"avg_line_length": 22.74576187133789,
"blob_id": "6818ca8e779c2f837ccda4a4e83e7180901f66b4",
"content_id": "14f26d4a7d209cae4315f8589d504389502a5f76",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1401,
"license_type": "permissive",
"max_line_length": 88,
"num_lines": 59,
"path": "/pyteal/ir/tealcomponent.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from abc import ABC, abstractmethod\nfrom typing import List, Optional, TYPE_CHECKING\nfrom contextlib import AbstractContextManager\n\nif TYPE_CHECKING:\n from ..ast import Expr, ScratchSlot, SubroutineDefinition\n\n\nclass TealComponent(ABC):\n def __init__(self, expr: Optional[\"Expr\"]):\n self.expr = expr\n\n def getSlots(self) -> List[\"ScratchSlot\"]:\n return []\n\n def assignSlot(self, slot: \"ScratchSlot\", location: int) -> None:\n pass\n\n def getSubroutines(self) -> List[\"SubroutineDefinition\"]:\n return []\n\n def resolveSubroutine(self, subroutine: \"SubroutineDefinition\", label: str) -> None:\n pass\n\n @abstractmethod\n def assemble(self) -> str:\n pass\n\n @abstractmethod\n def __repr__(self) -> str:\n pass\n\n @abstractmethod\n def __hash__(self) -> int:\n pass\n\n @abstractmethod\n def __eq__(self, other: object) -> bool:\n pass\n\n class Context:\n\n checkExpr = True\n\n class EqualityContext(AbstractContextManager):\n def __enter__(self):\n TealComponent.Context.checkExpr = False\n return self\n\n def __exit__(self, *args):\n TealComponent.Context.checkExpr = True\n return None\n\n @classmethod\n def ignoreExprEquality(cls):\n return cls.EqualityContext()\n\n\nTealComponent.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.614556610584259,
"alphanum_fraction": 0.614556610584259,
"avg_line_length": 24.337661743164062,
"blob_id": "55145b6700b1106be2f37fbd82721a43eeb4a358",
"content_id": "a32cb4d5ae5ed92b90567e3ee2dc42406f21cad9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1951,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 77,
"path": "/pyteal/errors.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import List, Optional, TYPE_CHECKING\n\nif TYPE_CHECKING:\n from .ast import Expr\n\n\nclass TealInternalError(Exception):\n def __init__(self, message: str) -> None:\n self.message = message\n\n def __str__(self):\n return self.message\n\n\nTealInternalError.__module__ = \"pyteal\"\n\n\nclass TealTypeError(Exception):\n def __init__(self, actual, expected) -> None:\n self.message = \"{} while expected {} \".format(actual, expected)\n\n def __str__(self) -> str:\n return self.message\n\n\nTealTypeError.__module__ = \"pyteal\"\n\n\nclass TealInputError(Exception):\n def __init__(self, msg: str) -> None:\n self.message = msg\n\n def __str__(self) -> str:\n return self.message\n\n\nTealInputError.__module__ = \"pyteal\"\n\n\nclass TealCompileError(Exception):\n def __init__(self, msg: str, sourceExpr: Optional[\"Expr\"]) -> None:\n self.msg = msg\n self.sourceExpr = sourceExpr\n\n def __str__(self) -> str:\n if self.sourceExpr is None:\n return self.msg\n trace = self.sourceExpr.getDefinitionTrace()\n return (\n self.msg\n + \"\\nTraceback of origin expression (most recent call last):\\n\"\n + \"\".join(trace)\n )\n\n def __eq__(self, other) -> bool:\n if not isinstance(other, TealCompileError):\n return False\n return self.msg == other.msg and self.sourceExpr is other.sourceExpr\n\n\nTealCompileError.__module__ = \"pyteal\"\n\n\ndef verifyTealVersion(minVersion: int, version: int, msg: str):\n if minVersion > version:\n msg = \"{}. Minimum version needed is {}, but current version being compiled is {}\".format(\n msg, minVersion, version\n )\n raise TealInputError(msg)\n\n\ndef verifyFieldVersion(fieldName: str, fieldMinVersion: int, version: int):\n verifyTealVersion(\n fieldMinVersion,\n version,\n \"TEAL version too low to use field {}\".format(fieldName),\n )\n"
},
{
"alpha_fraction": 0.7050229907035828,
"alphanum_fraction": 0.7398227453231812,
"avg_line_length": 54.88990783691406,
"blob_id": "afe65c67cb3ac1f2817ffb5964d2bab25f097de0",
"content_id": "f9d09f38a988ec122c59c72efa4a31088334f209",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 6092,
"license_type": "permissive",
"max_line_length": 226,
"num_lines": 109,
"path": "/CHANGELOG.md",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "# 0.9.0\n\n## Added\n* Support for new TEAL 5 features:\n * `AppParam` expressions ([#107](https://github.com/algorand/pyteal/pull/107), [#123](https://github.com/algorand/pyteal/pull/123))\n * New `nonparticipation` transaction field ([#106](https://github.com/algorand/pyteal/pull/106))\n * Inner transactions, zero-element `Seq` expressions, dynamic transaction array access ([#115](https://github.com/algorand/pyteal/pull/115))\n * Logs, dynamic LogicSig argument indexes, single-element `NaryExpr`s, and creating `Bytes` from `bytes` and `bytearray` ([#117](https://github.com/algorand/pyteal/pull/117))\n * Extract expressions ([#118](https://github.com/algorand/pyteal/pull/118))\n * More efficient implementation of recursive subroutines in TEAL 5+ ([#114](https://github.com/algorand/pyteal/pull/114))\n* Add `WideRatio`, an expression which exposes `mulw` and `divmodw` ([#121](https://github.com/algorand/pyteal/pull/121), [#122](https://github.com/algorand/pyteal/pull/122))\n\n## Changed\n* **WARNING**: Due to code generation improvements, programs compiled with this version will likely\n produce different TEAL code than previous versions, but their functionality will be the same. Be\n aware that even small differences in generated TEAL code will change the address associated with\n escrow LogicSig contracts.\n* Some unnecessary branch conditions have been removed ([#120](https://github.com/algorand/pyteal/pull/120))\n\n# 0.8.0\n\n## Added\n* Support for new TEAL 4 features:\n * Basic ops ([#67](https://github.com/algorand/pyteal/pull/67))\n * Byteslice arithmetic ([#75](https://github.com/algorand/pyteal/pull/75))\n * Importing scratch slot values from previous app calls ([#79](https://github.com/algorand/pyteal/pull/79), [#83](https://github.com/algorand/pyteal/pull/83))\n * Direct reference support for applications/accounts/assets ([#90](https://github.com/algorand/pyteal/pull/90))\n * `While` and `For` loops ([#95](https://github.com/algorand/pyteal/pull/95))\n * Subroutines ([#99](https://github.com/algorand/pyteal/pull/99))\n* New logo ([#88](https://github.com/algorand/pyteal/pull/88), [#91](https://github.com/algorand/pyteal/pull/91))\n* Added the `assembleConstants` option to `compileTeal`. When enabled, the compiler will assemble\nint and byte constants in the most efficient way to reduce program size ([#57](https://github.com/algorand/pyteal/pull/57), [#61](https://github.com/algorand/pyteal/pull/61), [#66](https://github.com/algorand/pyteal/pull/66)).\n* Added an alternative syntax for constructing `If` statements ([#77](https://github.com/algorand/pyteal/pull/77), [#82](https://github.com/algorand/pyteal/pull/82)).\n* Align `Seq` with the rest of the API ([#96](https://github.com/algorand/pyteal/pull/96)).\n\n## Fixed\n* Fixed `NaryExpr.__str__` method ([#102](https://github.com/algorand/pyteal/pull/102)).\n\n## Changed\n* **WARNING**: Due to code generation changes required to support TEAL 4 loops and subroutines,\n programs compiled with this version will likely produce different TEAL code than previous\n versions, but their functionality will be the same. Be aware that even small differences in\n generated TEAL code will change the address associated with escrow LogicSig contracts.\n* Improved crypto cost docs ([#81](https://github.com/algorand/pyteal/pull/81)).\n* Applied code formatter ([#100](https://github.com/algorand/pyteal/pull/100)).\n\n# 0.7.0\n\n## Added\n* Support for new TEAL 3 features:\n * Bit/byte manipulation and new transaction and global fields ([#50](https://github.com/algorand/pyteal/pull/50)).\n * Dynamic `Gtxn` indexes ([#53](https://github.com/algorand/pyteal/pull/53)).\n * `MinBalance` expression ([#54](https://github.com/algorand/pyteal/pull/54)).\n * Documentation for new features ([#55](https://github.com/algorand/pyteal/pull/55)).\n* Added the ability to specify the TEAL version target when using `compileTeal` ([#45](https://github.com/algorand/pyteal/pull/45)).\n* Added `ScratchVar`, an interface for storing and loading values from scratch space ([#33](https://github.com/algorand/pyteal/pull/33)).\n* Added a warning when scratch slots are loaded before anything has been stored ([#47](https://github.com/algorand/pyteal/pull/47)).\n\n## Changed\n* Rewrote internal code generation to produce smaller programs and make future optimization easier\n([#26](https://github.com/algorand/pyteal/pull/26)). Programs compiled with this version will likely\nproduce different TEAL code than previous versions, but their functionality will be the same.\n\n# 0.6.2\n\n## Fixed\n* Corrected documentation and examples that incorrectly used the `Txn.accounts` array ([#42](https://github.com/algorand/pyteal/pull/42)).\n* Fixed improper base32 validation and allow the use of padding ([#34](https://github.com/algorand/pyteal/pull/34)\nand [#37](https://github.com/algorand/pyteal/pull/37)).\n\n# 0.6.1\n\n## Added\n* An application deployment example, `vote_deploy.py`.\n\n## Fixed\n* Internal modules no longer pollute the global namespace when importing with `from pyteal import *`\n([#29](https://github.com/algorand/pyteal/pull/29)).\n* Fixed several documentation typos.\n\n## Changed\n* Moved signature and application mode examples into separate folders.\n\n# 0.6.0\n\n## Added\n* TEAL v2 `Txn` and `Gtxn` fields\n* TEAL v2 `Global` fields\n* `TxnType` enum\n* `Pop` expression\n* `Not` expression\n* `BitwiseNot` expression\n* `BitwiseAnd` expression\n* `BitwiseOr` expression\n* `BitwiseXor` expression\n* `Neq` (not equal) expression\n* `Assert` expression\n* `AssetHolding` expressions\n* `AssetParam` expressions\n* State manipulation with `App` expressions\n* `Concat` expression\n* `Substring` expression\n* `Bytes` constructor now accepts UTF-8 strings\n* `If` expression now allows single branches\n\n## Changed\n* Compiling a PyTeal program must now be done with the `compileTeal(program, mode)` function. The `.teal()` method no longer exists.\n* The API for group transactions has changed from `Gtxn.field(transaction_index)` to `Gtxn[transaction_index].field()`.\n* `Tmpl` syntax has changed from `Type(Tmpl(\"TMPL_NAME\"))` to `Tmpl.Type(\"TMPL_NAME\")`.\n"
},
{
"alpha_fraction": 0.6265178918838501,
"alphanum_fraction": 0.6307843923568726,
"avg_line_length": 23.97541046142578,
"blob_id": "133cd7dfa4d4231834426bd6eb722720501f8adc",
"content_id": "8d8761ffcb5e30d043fd8c07cf9d55089191e06b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3047,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 122,
"path": "/pyteal/ast/return_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\noptions = CompileOptions(version=4)\n\n\ndef test_main_return():\n arg = Int(1)\n expr = Return(arg)\n assert expr.type_of() == TealType.none\n assert expr.has_return()\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 1), TealOp(expr, Op.return_)])\n\n actual, _ = expr.__teal__(options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_main_return_invalid():\n with pytest.raises(TealCompileError):\n Return(Txn.receiver()).__teal__(options)\n\n with pytest.raises(TealCompileError):\n Return().__teal__(options)\n\n\ndef test_subroutine_return_value():\n cases = (\n (TealType.uint64, Int(1), Op.int, 1),\n (TealType.bytes, Bytes(\"value\"), Op.byte, '\"value\"'),\n (TealType.anytype, Int(1), Op.int, 1),\n (TealType.anytype, Bytes(\"value\"), Op.byte, '\"value\"'),\n )\n\n for (tealType, value, op, opValue) in cases:\n expr = Return(value)\n\n def mySubroutine():\n return expr\n\n subroutine = SubroutineDefinition(mySubroutine, tealType)\n\n assert expr.type_of() == TealType.none\n assert expr.has_return()\n\n expected = TealSimpleBlock(\n [TealOp(value, op, opValue), TealOp(expr, Op.retsub)]\n )\n\n options.setSubroutine(subroutine)\n actual, _ = expr.__teal__(options)\n options.setSubroutine(None)\n\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_subroutine_return_value_invalid():\n cases = (\n (TealType.bytes, Int(1)),\n (TealType.uint64, Bytes(\"value\")),\n )\n\n for (tealType, value) in cases:\n expr = Return(value)\n\n def mySubroutine():\n return expr\n\n subroutine = SubroutineDefinition(mySubroutine, tealType)\n\n options.setSubroutine(subroutine)\n with pytest.raises(TealCompileError):\n expr.__teal__(options)\n options.setSubroutine(None)\n\n\ndef test_subroutine_return_none():\n expr = Return()\n\n def mySubroutine():\n return expr\n\n subroutine = SubroutineDefinition(mySubroutine, TealType.none)\n\n assert expr.type_of() == TealType.none\n assert expr.has_return()\n\n expected = TealSimpleBlock([TealOp(expr, Op.retsub)])\n\n options.setSubroutine(subroutine)\n actual, _ = expr.__teal__(options)\n options.setSubroutine(None)\n\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_subroutine_return_none_invalid():\n for value in (Int(1), Bytes(\"value\")):\n expr = Return(value)\n\n def mySubroutine():\n return expr\n\n subroutine = SubroutineDefinition(mySubroutine, TealType.none)\n\n options.setSubroutine(subroutine)\n with pytest.raises(TealCompileError):\n expr.__teal__(options)\n options.setSubroutine(None)\n"
},
{
"alpha_fraction": 0.6099700331687927,
"alphanum_fraction": 0.6161169409751892,
"avg_line_length": 36.262569427490234,
"blob_id": "0fdf8c162ba73c7d9efb089ff0b057ddf6066aec",
"content_id": "79ecd4410a491cfe1b6dbc77d594132b7f3452b4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 13340,
"license_type": "permissive",
"max_line_length": 96,
"num_lines": 358,
"path": "/pyteal/ast/app.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import TYPE_CHECKING\nfrom enum import Enum\n\nfrom ..types import TealType, require_type\nfrom ..ir import TealOp, Op, TealBlock\nfrom .leafexpr import LeafExpr\nfrom .expr import Expr\nfrom .maybe import MaybeValue\nfrom .int import EnumInt\nfrom .global_ import Global\n\nif TYPE_CHECKING:\n from ..compiler import CompileOptions\n\n\nclass OnComplete:\n \"\"\"An enum of values that :any:`TxnObject.on_completion()` may return.\"\"\"\n\n NoOp = EnumInt(\"NoOp\")\n OptIn = EnumInt(\"OptIn\")\n CloseOut = EnumInt(\"CloseOut\")\n ClearState = EnumInt(\"ClearState\")\n UpdateApplication = EnumInt(\"UpdateApplication\")\n DeleteApplication = EnumInt(\"DeleteApplication\")\n\n\nOnComplete.__module__ = \"pyteal\"\n\n\nclass AppField(Enum):\n \"\"\"Enum of app fields used to create :any:`App` objects.\"\"\"\n\n optedIn = (Op.app_opted_in, TealType.uint64)\n localGet = (Op.app_local_get, TealType.anytype)\n localGetEx = (Op.app_local_get_ex, TealType.none)\n globalGet = (Op.app_global_get, TealType.anytype)\n globalGetEx = (Op.app_global_get_ex, TealType.none)\n localPut = (Op.app_local_put, TealType.none)\n globalPut = (Op.app_global_put, TealType.none)\n localDel = (Op.app_local_del, TealType.none)\n globalDel = (Op.app_global_del, TealType.none)\n\n def __init__(self, op: Op, type: TealType) -> None:\n self.op = op\n self.ret_type = type\n\n def get_op(self) -> Op:\n return self.op\n\n def type_of(self) -> TealType:\n return self.ret_type\n\n\nAppField.__module__ = \"pyteal\"\n\n\nclass App(LeafExpr):\n \"\"\"An expression related to applications.\"\"\"\n\n def __init__(self, field: AppField, args) -> None:\n super().__init__()\n self.field = field\n self.args = args\n\n def __str__(self):\n ret_str = \"({}\".format(self.field.get_op())\n for a in self.args:\n ret_str += \" \" + a.__str__()\n ret_str += \")\"\n return ret_str\n\n def __teal__(self, options: \"CompileOptions\"):\n return TealBlock.FromOp(options, TealOp(self, self.field.get_op()), *self.args)\n\n def type_of(self):\n return self.field.type_of()\n\n @classmethod\n def id(cls) -> Global:\n \"\"\"Get the ID of the current running application.\n\n This is the same as :any:`Global.current_application_id()`.\n \"\"\"\n return Global.current_application_id()\n\n @classmethod\n def optedIn(cls, account: Expr, app: Expr) -> \"App\":\n \"\"\"Check if an account has opted in for an application.\n\n Args:\n account: An index into Txn.Accounts that corresponds to the account to check,\n must be evaluated to uint64 (or, since v4, an account address that appears in\n Txn.Accounts or is Txn.Sender, must be evaluated to bytes).\n app: An index into Txn.ForeignApps that corresponds to the application to read from,\n must be evaluated to uint64 (or, since v4, an application id that appears in\n Txn.ForeignApps or is the CurrentApplicationID, must be evaluated to bytes).\n \"\"\"\n require_type(account.type_of(), TealType.anytype)\n require_type(app.type_of(), TealType.uint64)\n return cls(AppField.optedIn, [account, app])\n\n @classmethod\n def localGet(cls, account: Expr, key: Expr) -> \"App\":\n \"\"\"Read from an account's local state for the current application.\n\n Args:\n account: An index into Txn.Accounts that corresponds to the account to check,\n must be evaluated to uint64 (or, since v4, an account address that appears in\n Txn.Accounts or is Txn.Sender, must be evaluated to bytes).\n key: The key to read from the account's local state. Must evaluate to bytes.\n \"\"\"\n require_type(account.type_of(), TealType.anytype)\n require_type(key.type_of(), TealType.bytes)\n return cls(AppField.localGet, [account, key])\n\n @classmethod\n def localGetEx(cls, account: Expr, app: Expr, key: Expr) -> MaybeValue:\n \"\"\"Read from an account's local state for an application.\n\n Args:\n account: An index into Txn.Accounts that corresponds to the account to check,\n must be evaluated to uint64 (or, since v4, an account address that appears in\n Txn.Accounts or is Txn.Sender, must be evaluated to bytes).\n app: An index into Txn.ForeignApps that corresponds to the application to read from,\n must be evaluated to uint64 (or, since v4, an application id that appears in\n Txn.ForeignApps or is the CurrentApplicationID, must be evaluated to bytes).\n key: The key to read from the account's local state. Must evaluate to bytes.\n \"\"\"\n require_type(account.type_of(), TealType.anytype)\n require_type(app.type_of(), TealType.uint64)\n require_type(key.type_of(), TealType.bytes)\n return MaybeValue(\n AppField.localGetEx.get_op(), TealType.anytype, args=[account, app, key]\n )\n\n @classmethod\n def globalGet(cls, key: Expr) -> \"App\":\n \"\"\"Read from the global state of the current application.\n\n Args:\n key: The key to read from the global application state. Must evaluate to bytes.\n \"\"\"\n require_type(key.type_of(), TealType.bytes)\n return cls(AppField.globalGet, [key])\n\n @classmethod\n def globalGetEx(cls, app: Expr, key: Expr) -> MaybeValue:\n \"\"\"Read from the global state of an application.\n\n Args:\n app: An index into Txn.ForeignApps that corresponds to the application to read from,\n must be evaluated to uint64 (or, since v4, an application id that appears in\n Txn.ForeignApps or is the CurrentApplicationID, must be evaluated to bytes).\n key: The key to read from the global application state. Must evaluate to bytes.\n \"\"\"\n require_type(app.type_of(), TealType.uint64)\n require_type(key.type_of(), TealType.bytes)\n return MaybeValue(\n AppField.globalGetEx.get_op(), TealType.anytype, args=[app, key]\n )\n\n @classmethod\n def localPut(cls, account: Expr, key: Expr, value: Expr) -> \"App\":\n \"\"\"Write to an account's local state for the current application.\n\n Args:\n account: An index into Txn.Accounts that corresponds to the account to check,\n must be evaluated to uint64 (or, since v4, an account address that appears in\n Txn.Accounts or is Txn.Sender, must be evaluated to bytes).\n key: The key to write in the account's local state. Must evaluate to bytes.\n value: The value to write in the account's local state. Can evaluate to any type.\n \"\"\"\n require_type(account.type_of(), TealType.anytype)\n require_type(key.type_of(), TealType.bytes)\n require_type(value.type_of(), TealType.anytype)\n return cls(AppField.localPut, [account, key, value])\n\n @classmethod\n def globalPut(cls, key: Expr, value: Expr) -> \"App\":\n \"\"\"Write to the global state of the current application.\n\n Args:\n key: The key to write in the global application state. Must evaluate to bytes.\n value: THe value to write in the global application state. Can evaluate to any type.\n \"\"\"\n require_type(key.type_of(), TealType.bytes)\n require_type(value.type_of(), TealType.anytype)\n return cls(AppField.globalPut, [key, value])\n\n @classmethod\n def localDel(cls, account: Expr, key: Expr) -> \"App\":\n \"\"\"Delete a key from an account's local state for the current application.\n\n Args:\n account: An index into Txn.Accounts that corresponds to the account to check,\n must be evaluated to uint64 (or, since v4, an account address that appears in\n Txn.Accounts or is Txn.Sender, must be evaluated to bytes).\n key: The key to delete from the account's local state. Must evaluate to bytes.\n \"\"\"\n require_type(account.type_of(), TealType.anytype)\n require_type(key.type_of(), TealType.bytes)\n return cls(AppField.localDel, [account, key])\n\n @classmethod\n def globalDel(cls, key: Expr) -> \"App\":\n \"\"\"Delete a key from the global state of the current application.\n\n Args:\n key: The key to delete from the global application state. Must evaluate to bytes.\n \"\"\"\n require_type(key.type_of(), TealType.bytes)\n return cls(AppField.globalDel, [key])\n\n\nApp.__module__ = \"pyteal\"\n\n\nclass AppParam:\n @classmethod\n def approvalProgram(cls, app: Expr) -> MaybeValue:\n \"\"\"Get the bytecode of Approval Program for the application.\n\n Args:\n app: An index into Txn.ForeignApps that correspond to the application to check.\n Must evaluate to uint64.\n \"\"\"\n require_type(app.type_of(), TealType.uint64)\n return MaybeValue(\n Op.app_params_get,\n TealType.bytes,\n immediate_args=[\"AppApprovalProgram\"],\n args=[app],\n )\n\n @classmethod\n def clearStateProgram(cls, app: Expr) -> MaybeValue:\n \"\"\"Get the bytecode of Clear State Program for the application.\n\n Args:\n app: An index into Txn.ForeignApps that correspond to the application to check.\n Must evaluate to uint64.\n \"\"\"\n require_type(app.type_of(), TealType.uint64)\n return MaybeValue(\n Op.app_params_get,\n TealType.bytes,\n immediate_args=[\"AppClearStateProgram\"],\n args=[app],\n )\n\n @classmethod\n def globalNumUnit(cls, app: Expr) -> MaybeValue:\n \"\"\"Get the number of uint64 values allowed in Global State for the application.\n\n Args:\n app: An index into Txn.ForeignApps that correspond to the application to check.\n Must evaluate to uint64.\n \"\"\"\n require_type(app.type_of(), TealType.uint64)\n return MaybeValue(\n Op.app_params_get,\n TealType.uint64,\n immediate_args=[\"AppGlobalNumUnit\"],\n args=[app],\n )\n\n @classmethod\n def globalNumByteSlice(cls, app: Expr) -> MaybeValue:\n \"\"\"Get the number of byte array values allowed in Global State for the application.\n\n Args:\n app: An index into Txn.ForeignApps that correspond to the application to check.\n Must evaluate to uint64.\n \"\"\"\n require_type(app.type_of(), TealType.uint64)\n return MaybeValue(\n Op.app_params_get,\n TealType.uint64,\n immediate_args=[\"AppGlobalNumByteSlice\"],\n args=[app],\n )\n\n @classmethod\n def localNumUnit(cls, app: Expr) -> MaybeValue:\n \"\"\"Get the number of uint64 values allowed in Local State for the application.\n\n Args:\n app: An index into Txn.ForeignApps that correspond to the application to check.\n Must evaluate to uint64.\n \"\"\"\n require_type(app.type_of(), TealType.uint64)\n return MaybeValue(\n Op.app_params_get,\n TealType.uint64,\n immediate_args=[\"AppLocalNumUnit\"],\n args=[app],\n )\n\n @classmethod\n def localNumByteSlice(cls, app: Expr) -> MaybeValue:\n \"\"\"Get the number of byte array values allowed in Local State for the application.\n\n Args:\n app: An index into Txn.ForeignApps that correspond to the application to check.\n Must evaluate to uint64.\n \"\"\"\n require_type(app.type_of(), TealType.uint64)\n return MaybeValue(\n Op.app_params_get,\n TealType.uint64,\n immediate_args=[\"AppLocalNumByteSlice\"],\n args=[app],\n )\n\n @classmethod\n def extraProgramPages(cls, app: Expr) -> MaybeValue:\n \"\"\"Get the number of Extra Program Pages of code space for the application.\n\n Args:\n app: An index into Txn.ForeignApps that correspond to the application to check.\n Must evaluate to uint64.\n \"\"\"\n require_type(app.type_of(), TealType.uint64)\n return MaybeValue(\n Op.app_params_get,\n TealType.uint64,\n immediate_args=[\"AppExtraProgramPages\"],\n args=[app],\n )\n\n @classmethod\n def creator(cls, app: Expr) -> MaybeValue:\n \"\"\"Get the creator address for the application.\n\n Args:\n app: An index into Txn.ForeignApps that correspond to the application to check.\n Must evaluate to uint64.\n \"\"\"\n require_type(app.type_of(), TealType.uint64)\n return MaybeValue(\n Op.app_params_get, TealType.bytes, immediate_args=[\"AppCreator\"], args=[app]\n )\n\n @classmethod\n def address(cls, app: Expr) -> MaybeValue:\n \"\"\"Get the escrow address for the application.\n\n Args:\n app: An index into Txn.ForeignApps that correspond to the application to check.\n Must evaluate to uint64.\n \"\"\"\n require_type(app.type_of(), TealType.uint64)\n return MaybeValue(\n Op.app_params_get, TealType.bytes, immediate_args=[\"AppAddress\"], args=[app]\n )\n\n\nAppParam.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.6736292243003845,
"alphanum_fraction": 0.6858137249946594,
"avg_line_length": 24.53333282470703,
"blob_id": "fee7b0485e9176012c0f5378432ecb119f4e0eb7",
"content_id": "dbce8bbd9bd33dee51ae04e2eef1ec754dd97e9a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1149,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 45,
"path": "/pyteal/ast/assert_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\nteal2Options = CompileOptions(version=2)\nteal3Options = CompileOptions(version=3)\n\n\ndef test_teal_2_assert():\n arg = Int(1)\n expr = Assert(arg)\n assert expr.type_of() == TealType.none\n\n expected, _ = arg.__teal__(teal2Options)\n expectedBranch = TealConditionalBlock([])\n expectedBranch.setTrueBlock(TealSimpleBlock([]))\n expectedBranch.setFalseBlock(Err().__teal__(teal2Options)[0])\n expected.setNextBlock(expectedBranch)\n\n actual, _ = expr.__teal__(teal2Options)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n\ndef test_teal_3_assert():\n arg = Int(1)\n expr = Assert(arg)\n assert expr.type_of() == TealType.none\n\n expected = TealSimpleBlock([TealOp(arg, Op.int, 1), TealOp(expr, Op.assert_)])\n\n actual, _ = expr.__teal__(teal3Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_assert_invalid():\n with pytest.raises(TealTypeError):\n Assert(Txn.receiver())\n"
},
{
"alpha_fraction": 0.5593491792678833,
"alphanum_fraction": 0.5950919985771179,
"avg_line_length": 25.77857208251953,
"blob_id": "400ba2368348e6d309231f3fcda45afb4eb0ebc2",
"content_id": "22b4817bdd551eb2b5b0e1f19bb072917d4583d1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3749,
"license_type": "permissive",
"max_line_length": 64,
"num_lines": 140,
"path": "/pyteal/ast/ternaryexpr_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\nteal2Options = CompileOptions(version=2)\nteal3Options = CompileOptions(version=3)\nteal4Options = CompileOptions(version=4)\nteal5Options = CompileOptions(version=5)\n\n\ndef test_ed25519verify():\n args = [Bytes(\"data\"), Bytes(\"sig\"), Bytes(\"key\")]\n expr = Ed25519Verify(args[0], args[1], args[2])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, '\"data\"'),\n TealOp(args[1], Op.byte, '\"sig\"'),\n TealOp(args[2], Op.byte, '\"key\"'),\n TealOp(expr, Op.ed25519verify),\n ]\n )\n\n actual, _ = expr.__teal__(teal2Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n\ndef test_ed25519verify_invalid():\n with pytest.raises(TealTypeError):\n Ed25519Verify(Int(0), Bytes(\"sig\"), Bytes(\"key\"))\n\n with pytest.raises(TealTypeError):\n Ed25519Verify(Bytes(\"data\"), Int(0), Bytes(\"key\"))\n\n with pytest.raises(TealTypeError):\n Ed25519Verify(Bytes(\"data\"), Bytes(\"sig\"), Int(0))\n\n\ndef test_set_bit_int():\n args = [Int(0), Int(2), Int(1)]\n expr = SetBit(args[0], args[1], args[2])\n assert expr.type_of() == TealType.uint64\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.int, 0),\n TealOp(args[1], Op.int, 2),\n TealOp(args[2], Op.int, 1),\n TealOp(expr, Op.setbit),\n ]\n )\n\n actual, _ = expr.__teal__(teal3Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal2Options)\n\n\ndef test_set_bit_bytes():\n args = [Bytes(\"base16\", \"0x0000\"), Int(0), Int(1)]\n expr = SetBit(args[0], args[1], args[2])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0x0000\"),\n TealOp(args[1], Op.int, 0),\n TealOp(args[2], Op.int, 1),\n TealOp(expr, Op.setbit),\n ]\n )\n\n actual, _ = expr.__teal__(teal3Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal2Options)\n\n\ndef test_set_bit_invalid():\n with pytest.raises(TealTypeError):\n SetBit(Int(3), Bytes(\"index\"), Int(1))\n\n with pytest.raises(TealTypeError):\n SetBit(Int(3), Int(0), Bytes(\"one\"))\n\n with pytest.raises(TealTypeError):\n SetBit(Bytes(\"base16\", \"0xFF\"), Bytes(\"index\"), Int(1))\n\n with pytest.raises(TealTypeError):\n SetBit(Bytes(\"base16\", \"0xFF\"), Int(0), Bytes(\"one\"))\n\n\ndef test_set_byte():\n args = [Bytes(\"base16\", \"0xFF\"), Int(0), Int(3)]\n expr = SetByte(args[0], args[1], args[2])\n assert expr.type_of() == TealType.bytes\n\n expected = TealSimpleBlock(\n [\n TealOp(args[0], Op.byte, \"0xFF\"),\n TealOp(args[1], Op.int, 0),\n TealOp(args[2], Op.int, 3),\n TealOp(expr, Op.setbyte),\n ]\n )\n\n actual, _ = expr.__teal__(teal3Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal2Options)\n\n\ndef test_set_byte_invalid():\n with pytest.raises(TealTypeError):\n SetByte(Int(3), Int(0), Int(1))\n\n with pytest.raises(TealTypeError):\n SetByte(Bytes(\"base16\", \"0xFF\"), Bytes(\"index\"), Int(1))\n\n with pytest.raises(TealTypeError):\n SetByte(Bytes(\"base16\", \"0xFF\"), Int(0), Bytes(\"one\"))\n"
},
{
"alpha_fraction": 0.5932551026344299,
"alphanum_fraction": 0.5994135141372681,
"avg_line_length": 28.39655113220215,
"blob_id": "ab2feb1a64e13b6ea5311323264d178937c2280f",
"content_id": "454fcef2bb9b04c2a28385eda067dee1fc4744d2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3410,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 116,
"path": "/pyteal/ast/itxn_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\nfrom ..types import types_match\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import MAX_GROUP_SIZE, CompileOptions\n\nteal4Options = CompileOptions(version=4)\nteal5Options = CompileOptions(version=5)\n\n\ndef test_InnerTxnBuilder_Begin():\n expr = InnerTxnBuilder.Begin()\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n expected = TealSimpleBlock([TealOp(expr, Op.itxn_begin)])\n\n actual, _ = expr.__teal__(teal5Options)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal4Options)\n\n\ndef test_InnerTxnBuilder_Submit():\n expr = InnerTxnBuilder.Submit()\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n expected = TealSimpleBlock([TealOp(expr, Op.itxn_submit)])\n\n actual, _ = expr.__teal__(teal5Options)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal4Options)\n\n\ndef test_InnerTxnBuilder_SetField():\n for field in TxnField:\n if field.is_array:\n with pytest.raises(TealInputError):\n InnerTxnBuilder.SetField(field, Int(0))\n continue\n\n for value, opArgs in (\n (Int(0), (Op.int, 0)),\n (Bytes(\"value\"), (Op.byte, '\"value\"')),\n ):\n assert field.type_of() in (TealType.uint64, TealType.bytes)\n\n if not types_match(field.type_of(), value.type_of()):\n with pytest.raises(TealTypeError):\n InnerTxnBuilder.SetField(field, value)\n continue\n\n expr = InnerTxnBuilder.SetField(field, value)\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n expected = TealSimpleBlock(\n [TealOp(value, *opArgs), TealOp(expr, Op.itxn_field, field.arg_name)]\n )\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal4Options)\n\n\ndef test_InnerTxnBuilder_SetFields():\n cases = (\n ({}, Seq()),\n ({TxnField.amount: Int(5)}, InnerTxnBuilder.SetField(TxnField.amount, Int(5))),\n (\n {\n TxnField.type_enum: TxnType.Payment,\n TxnField.close_remainder_to: Txn.sender(),\n },\n Seq(\n InnerTxnBuilder.SetField(TxnField.type_enum, TxnType.Payment),\n InnerTxnBuilder.SetField(TxnField.close_remainder_to, Txn.sender()),\n ),\n ),\n )\n\n for fields, expectedExpr in cases:\n expr = InnerTxnBuilder.SetFields(fields)\n assert expr.type_of() == TealType.none\n assert not expr.has_return()\n\n expected, _ = expectedExpr.__teal__(teal5Options)\n expected.addIncoming()\n expected = TealBlock.NormalizeBlocks(expected)\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n with TealComponent.Context.ignoreExprEquality():\n assert actual == expected\n\n if len(fields) != 0:\n with pytest.raises(TealInputError):\n expr.__teal__(teal4Options)\n\n\n# txn_test.py performs additional testing\n"
},
{
"alpha_fraction": 0.6352253556251526,
"alphanum_fraction": 0.6502504348754883,
"avg_line_length": 22.490196228027344,
"blob_id": "46a5ba1b73e13bfd71e0c4c19f0763f38888234e",
"content_id": "7d622ddb786af737d884ee4f3efc706c64fb5c37",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1198,
"license_type": "permissive",
"max_line_length": 77,
"num_lines": 51,
"path": "/pyteal/ast/arg_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom .. import CompileOptions\n\nteal2Options = CompileOptions(version=2)\nteal4Options = CompileOptions(version=4)\nteal5Options = CompileOptions(version=5)\n\n\ndef test_arg_static():\n for i in range(256):\n expr = Arg(i)\n assert expr.type_of() == TealType.bytes\n assert not expr.has_return()\n\n expected = TealSimpleBlock([TealOp(expr, Op.arg, i)])\n\n actual, _ = expr.__teal__(teal2Options)\n assert actual == expected\n\n\ndef test_arg_dynamic():\n i = Int(7)\n expr = Arg(i)\n assert expr.type_of() == TealType.bytes\n assert not expr.has_return()\n\n expected = TealSimpleBlock([TealOp(i, Op.int, 7), TealOp(expr, Op.args)])\n\n actual, _ = expr.__teal__(teal5Options)\n actual.addIncoming()\n actual = TealBlock.NormalizeBlocks(actual)\n\n assert actual == expected\n\n with pytest.raises(TealInputError):\n expr.__teal__(teal4Options)\n\n\ndef test_arg_invalid():\n with pytest.raises(TealTypeError):\n Arg(Bytes(\"k\"))\n\n with pytest.raises(TealInputError):\n Arg(-1)\n\n with pytest.raises(TealInputError):\n Arg(256)\n"
},
{
"alpha_fraction": 0.5940136313438416,
"alphanum_fraction": 0.5960224866867065,
"avg_line_length": 27.609195709228516,
"blob_id": "17af6b61c74ef66b83f8abc055df88e0d40c6fdb",
"content_id": "7efae9d5fa3d81b840ef9e52b90d87ce7a3556c5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4978,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 174,
"path": "/pyteal/ast/scratch.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "from typing import TYPE_CHECKING\n\nfrom ..types import TealType\nfrom ..config import NUM_SLOTS\nfrom ..errors import TealInputError\nfrom .expr import Expr\n\nif TYPE_CHECKING:\n from ..compiler import CompileOptions\n\n\nclass ScratchSlot:\n \"\"\"Represents the allocation of a scratch space slot.\"\"\"\n\n # Unique identifier for the compiler to automatically assign slots\n # The id field is used by the compiler to map to an actual slot in the source code\n # Slot ids under 256 are manually reserved slots\n nextSlotId = NUM_SLOTS\n\n def __init__(self, requestedSlotId: int = None):\n \"\"\"Initializes a scratch slot with a particular id\n\n Args:\n requestedSlotId (optional): A scratch slot id that the compiler must store the value.\n This id may be a Python int in the range [0-256).\n \"\"\"\n if requestedSlotId is None:\n self.id = ScratchSlot.nextSlotId\n ScratchSlot.nextSlotId += 1\n self.isReservedSlot = False\n else:\n if requestedSlotId < 0 or requestedSlotId >= NUM_SLOTS:\n raise TealInputError(\n \"Invalid slot ID {}, shoud be in [0, {})\".format(\n requestedSlotId, NUM_SLOTS\n )\n )\n self.id = requestedSlotId\n self.isReservedSlot = True\n\n def store(self, value: Expr = None) -> Expr:\n \"\"\"Get an expression to store a value in this slot.\n\n Args:\n value (optional): The value to store in this slot. If not included, the last value on\n the stack will be stored. NOTE: storing the last value on the stack breaks the typical\n semantics of PyTeal, only use if you know what you're doing.\n \"\"\"\n if value is not None:\n return ScratchStore(self, value)\n return ScratchStackStore(self)\n\n def load(self, type: TealType = TealType.anytype) -> \"ScratchLoad\":\n \"\"\"Get an expression to load a value from this slot.\n\n Args:\n type (optional): The type being loaded from this slot, if known. Defaults to\n TealType.anytype.\n \"\"\"\n return ScratchLoad(self, type)\n\n def __repr__(self):\n return \"ScratchSlot({})\".format(self.id)\n\n def __str__(self):\n return \"slot#{}\".format(self.id)\n\n def __hash__(self):\n return hash(self.id)\n\n\nScratchSlot.__module__ = \"pyteal\"\n\n\nclass ScratchLoad(Expr):\n \"\"\"Expression to load a value from scratch space.\"\"\"\n\n def __init__(self, slot: ScratchSlot, type: TealType = TealType.anytype):\n \"\"\"Create a new ScratchLoad expression.\n\n Args:\n slot: The slot to load the value from.\n type (optional): The type being loaded from this slot, if known. Defaults to\n TealType.anytype.\n \"\"\"\n super().__init__()\n self.slot = slot\n self.type = type\n\n def __str__(self):\n return \"(Load {})\".format(self.slot)\n\n def __teal__(self, options: \"CompileOptions\"):\n from ..ir import TealOp, Op, TealBlock\n\n op = TealOp(self, Op.load, self.slot)\n return TealBlock.FromOp(options, op)\n\n def type_of(self):\n return self.type\n\n def has_return(self):\n return False\n\n\nScratchLoad.__module__ = \"pyteal\"\n\n\nclass ScratchStore(Expr):\n \"\"\"Expression to store a value in scratch space.\"\"\"\n\n def __init__(self, slot: ScratchSlot, value: Expr):\n \"\"\"Create a new ScratchStore expression.\n\n Args:\n slot: The slot to store the value in.\n value: The value to store.\n \"\"\"\n super().__init__()\n self.slot = slot\n self.value = value\n\n def __str__(self):\n return \"(Store {} {})\".format(self.slot, self.value)\n\n def __teal__(self, options: \"CompileOptions\"):\n from ..ir import TealOp, Op, TealBlock\n\n op = TealOp(self, Op.store, self.slot)\n return TealBlock.FromOp(options, op, self.value)\n\n def type_of(self):\n return TealType.none\n\n def has_return(self):\n return False\n\n\nScratchStore.__module__ = \"pyteal\"\n\n\nclass ScratchStackStore(Expr):\n \"\"\"Expression to store a value from the stack in scratch space.\n\n NOTE: This expression breaks the typical semantics of PyTeal, only use if you know what you're\n doing.\n \"\"\"\n\n def __init__(self, slot: ScratchSlot):\n \"\"\"Create a new ScratchStackStore expression.\n\n Args:\n slot: The slot to store the value in.\n \"\"\"\n super().__init__()\n self.slot = slot\n\n def __str__(self):\n return \"(StackStore {})\".format(self.slot)\n\n def __teal__(self, options: \"CompileOptions\"):\n from ..ir import TealOp, Op, TealBlock\n\n op = TealOp(self, Op.store, self.slot)\n return TealBlock.FromOp(options, op)\n\n def type_of(self):\n return TealType.none\n\n def has_return(self):\n return False\n\n\nScratchStackStore.__module__ = \"pyteal\"\n"
},
{
"alpha_fraction": 0.5437811613082886,
"alphanum_fraction": 0.5820996165275574,
"avg_line_length": 15.796542167663574,
"blob_id": "b8450d41cf1696068fb3f2abab13d70a112f0e0b",
"content_id": "3679be27b5744da29f9bfa6498b0a20f7d43d845",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 25262,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 1504,
"path": "/pyteal/compiler/compiler_test.py",
"repo_name": "harsh9200/pyteal",
"src_encoding": "UTF-8",
"text": "import pytest\n\nfrom .. import *\n\n# this is not necessary but mypy complains if it's not included\nfrom ..ast import *\n\n\ndef test_compile_single():\n expr = Int(1)\n\n expected = \"\"\"\n#pragma version 2\nint 1\nreturn\n\"\"\".strip()\n actual_application = compileTeal(expr, Mode.Application)\n actual_signature = compileTeal(expr, Mode.Signature)\n\n assert actual_application == actual_signature\n assert actual_application == expected\n\n\ndef test_compile_sequence():\n expr = Seq([Pop(Int(1)), Pop(Int(2)), Int(3) + Int(4)])\n\n expected = \"\"\"\n#pragma version 2\nint 1\npop\nint 2\npop\nint 3\nint 4\n+\nreturn\n\"\"\".strip()\n actual_application = compileTeal(expr, Mode.Application)\n actual_signature = compileTeal(expr, Mode.Signature)\n\n assert actual_application == actual_signature\n assert actual_application == expected\n\n\ndef test_compile_branch():\n expr = If(Int(1)).Then(Int(2)).Else(Int(3))\n\n expected = \"\"\"\n#pragma version 2\nint 1\nbnz main_l2\nint 3\nb main_l3\nmain_l2:\nint 2\nmain_l3:\nreturn\n\"\"\".strip()\n actual_application = compileTeal(expr, Mode.Application)\n actual_signature = compileTeal(expr, Mode.Signature)\n\n assert actual_application == actual_signature\n assert actual_application == expected\n\n\ndef test_compile_branch_multiple():\n expr = If(Int(1)).Then(Int(2)).ElseIf(Int(3)).Then(Int(4)).Else(Int(5))\n\n expected = \"\"\"\n#pragma version 2\nint 1\nbnz main_l4\nint 3\nbnz main_l3\nint 5\nb main_l5\nmain_l3:\nint 4\nb main_l5\nmain_l4:\nint 2\nmain_l5:\nreturn\n\"\"\".strip()\n actual_application = compileTeal(expr, Mode.Application)\n actual_signature = compileTeal(expr, Mode.Signature)\n\n assert actual_application == actual_signature\n assert actual_application == expected\n\n\ndef test_empty_branch():\n program = Seq(\n [\n If(Txn.application_id() == Int(0)).Then(Seq()),\n Approve(),\n ]\n )\n\n expected = \"\"\"#pragma version 5\ntxn ApplicationID\nint 0\n==\nbnz main_l1\nmain_l1:\nint 1\nreturn\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=5, assembleConstants=False)\n assert actual == expected\n\n\ndef test_compile_mode():\n expr = App.globalGet(Bytes(\"key\"))\n\n expected = \"\"\"\n#pragma version 2\nbyte \"key\"\napp_global_get\nreturn\n\"\"\".strip()\n actual_application = compileTeal(expr, Mode.Application)\n\n assert actual_application == expected\n\n with pytest.raises(TealInputError):\n compileTeal(expr, Mode.Signature)\n\n\ndef test_compile_version_invalid():\n expr = Int(1)\n\n with pytest.raises(TealInputError):\n compileTeal(expr, Mode.Signature, version=1) # too small\n\n with pytest.raises(TealInputError):\n compileTeal(expr, Mode.Signature, version=6) # too large\n\n with pytest.raises(TealInputError):\n compileTeal(expr, Mode.Signature, version=2.0) # decimal\n\n\ndef test_compile_version_2():\n expr = Int(1)\n\n expected = \"\"\"\n#pragma version 2\nint 1\nreturn\n\"\"\".strip()\n actual = compileTeal(expr, Mode.Signature, version=2)\n assert actual == expected\n\n\ndef test_compile_version_default():\n expr = Int(1)\n\n actual_default = compileTeal(expr, Mode.Signature)\n actual_version_2 = compileTeal(expr, Mode.Signature, version=2)\n assert actual_default == actual_version_2\n\n\ndef test_compile_version_3():\n expr = Int(1)\n\n expected = \"\"\"\n#pragma version 3\nint 1\nreturn\n\"\"\".strip()\n actual = compileTeal(expr, Mode.Signature, version=3)\n assert actual == expected\n\n\ndef test_compile_version_4():\n expr = Int(1)\n\n expected = \"\"\"\n#pragma version 4\nint 1\nreturn\n\"\"\".strip()\n actual = compileTeal(expr, Mode.Signature, version=4)\n assert actual == expected\n\n\ndef test_compile_version_5():\n expr = Int(1)\n expected = \"\"\"\n#pragma version 5\nint 1\nreturn\n\"\"\".strip()\n actual = compileTeal(expr, Mode.Signature, version=5)\n assert actual == expected\n\n\ndef test_slot_load_before_store():\n\n program = AssetHolding.balance(Int(0), Int(0)).value()\n with pytest.raises(TealInternalError):\n compileTeal(program, Mode.Application, version=2)\n\n program = AssetHolding.balance(Int(0), Int(0)).hasValue()\n with pytest.raises(TealInternalError):\n compileTeal(program, Mode.Application, version=2)\n\n program = App.globalGetEx(Int(0), Bytes(\"key\")).value()\n with pytest.raises(TealInternalError):\n compileTeal(program, Mode.Application, version=2)\n\n program = App.globalGetEx(Int(0), Bytes(\"key\")).hasValue()\n with pytest.raises(TealInternalError):\n compileTeal(program, Mode.Application, version=2)\n\n program = ScratchVar().load()\n with pytest.raises(TealInternalError):\n compileTeal(program, Mode.Application, version=2)\n\n\ndef test_assign_scratch_slots():\n myScratch = ScratchVar(TealType.uint64)\n otherScratch = ScratchVar(TealType.uint64, 1)\n anotherScratch = ScratchVar(TealType.uint64, 0)\n lastScratch = ScratchVar(TealType.uint64)\n prog = Seq(\n [\n myScratch.store(Int(5)), # Slot 2\n otherScratch.store(Int(0)), # Slot 1\n anotherScratch.store(Int(7)), # Slot 0\n lastScratch.store(Int(9)), # Slot 3\n Approve(),\n ]\n )\n\n expected = \"\"\"\n#pragma version 4\nint 5\nstore 2\nint 0\nstore 1\nint 7\nstore 0\nint 9\nstore 3\nint 1\nreturn\n\"\"\".strip()\n actual = compileTeal(prog, mode=Mode.Signature, version=4)\n assert actual == expected\n\n\ndef test_scratchvar_double_assign_invalid():\n myvar = ScratchVar(TealType.uint64, 10)\n otherVar = ScratchVar(TealType.uint64, 10)\n prog = Seq([myvar.store(Int(5)), otherVar.store(Int(0)), Approve()])\n with pytest.raises(TealInternalError):\n compileTeal(prog, mode=Mode.Signature, version=4)\n\n\ndef test_assembleConstants():\n program = Itob(Int(1) + Int(1) + Tmpl.Int(\"TMPL_VAR\")) == Concat(\n Bytes(\"test\"), Bytes(\"test\"), Bytes(\"test2\")\n )\n\n expectedNoAssemble = \"\"\"\n#pragma version 3\nint 1\nint 1\n+\nint TMPL_VAR\n+\nitob\nbyte \"test\"\nbyte \"test\"\nconcat\nbyte \"test2\"\nconcat\n==\nreturn\n\"\"\".strip()\n actualNoAssemble = compileTeal(\n program, Mode.Application, version=3, assembleConstants=False\n )\n assert expectedNoAssemble == actualNoAssemble\n\n expectedAssemble = \"\"\"\n#pragma version 3\nintcblock 1\nbytecblock 0x74657374\nintc_0 // 1\nintc_0 // 1\n+\npushint TMPL_VAR // TMPL_VAR\n+\nitob\nbytec_0 // \"test\"\nbytec_0 // \"test\"\nconcat\npushbytes 0x7465737432 // \"test2\"\nconcat\n==\nreturn\n\"\"\".strip()\n actualAssemble = compileTeal(\n program, Mode.Application, version=3, assembleConstants=True\n )\n assert expectedAssemble == actualAssemble\n\n with pytest.raises(TealInternalError):\n compileTeal(program, Mode.Application, version=2, assembleConstants=True)\n\n\ndef test_compile_while():\n i = ScratchVar()\n program = Seq(\n [\n i.store(Int(0)),\n While(i.load() < Int(2)).Do(Seq([i.store(i.load() + Int(1))])),\n Approve(),\n ]\n )\n\n expected = \"\"\"\n #pragma version 4\nint 0\nstore 0\nmain_l1:\nload 0\nint 2\n<\nbz main_l3\nload 0\nint 1\n+\nstore 0\nb main_l1\nmain_l3:\nint 1\nreturn\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert expected == actual\n\n # nested\n i = ScratchVar()\n j = ScratchVar()\n\n program = Seq(\n [\n i.store(Int(0)),\n While(i.load() < Int(2)).Do(\n Seq(\n [\n j.store(Int(0)),\n While(j.load() < Int(5)).Do(Seq([j.store(j.load() + Int(1))])),\n i.store(i.load() + Int(1)),\n ]\n )\n ),\n Approve(),\n ]\n )\n\n expected = \"\"\"#pragma version 4\nint 0\nstore 0\nmain_l1:\nload 0\nint 2\n<\nbz main_l6\nint 0\nstore 1\nmain_l3:\nload 1\nint 5\n<\nbnz main_l5\nload 0\nint 1\n+\nstore 0\nb main_l1\nmain_l5:\nload 1\nint 1\n+\nstore 1\nb main_l3\nmain_l6:\nint 1\nreturn\n \"\"\".strip()\n\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert expected == actual\n\n\ndef test_compile_for():\n i = ScratchVar()\n program = Seq(\n [\n For(i.store(Int(0)), i.load() < Int(10), i.store(i.load() + Int(1))).Do(\n Seq([App.globalPut(Itob(i.load()), i.load() * Int(2))])\n ),\n Approve(),\n ]\n )\n\n expected = \"\"\"\n #pragma version 4\nint 0\nstore 0\nmain_l1:\nload 0\nint 10\n<\nbz main_l3\nload 0\nitob\nload 0\nint 2\n*\napp_global_put\nload 0\nint 1\n+\nstore 0\nb main_l1\nmain_l3:\nint 1\nreturn\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert expected == actual\n\n # nested\n i = ScratchVar()\n j = ScratchVar()\n program = Seq(\n [\n For(i.store(Int(0)), i.load() < Int(10), i.store(i.load() + Int(1))).Do(\n Seq(\n [\n For(\n j.store(Int(0)),\n j.load() < Int(4),\n j.store(j.load() + Int(2)),\n ).Do(Seq([App.globalPut(Itob(j.load()), j.load() * Int(2))]))\n ]\n )\n ),\n Approve(),\n ]\n )\n\n expected = \"\"\"\n #pragma version 4\nint 0\nstore 0\nmain_l1:\nload 0\nint 10\n<\nbz main_l6\nint 0\nstore 1\nmain_l3:\nload 1\nint 4\n<\nbnz main_l5\nload 0\nint 1\n+\nstore 0\nb main_l1\nmain_l5:\nload 1\nitob\nload 1\nint 2\n*\napp_global_put\nload 1\nint 2\n+\nstore 1\nb main_l3\nmain_l6:\nint 1\nreturn\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert expected == actual\n\n\ndef test_compile_break():\n\n # While\n i = ScratchVar()\n program = Seq(\n [\n i.store(Int(0)),\n While(i.load() < Int(3)).Do(\n Seq([If(i.load() == Int(2), Break()), i.store(i.load() + Int(1))])\n ),\n Approve(),\n ]\n )\n\n expected = \"\"\"#pragma version 4\nint 0\nstore 0\nmain_l1:\nload 0\nint 3\n<\nbz main_l4\nload 0\nint 2\n==\nbnz main_l4\nload 0\nint 1\n+\nstore 0\nb main_l1\nmain_l4:\nint 1\nreturn\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert expected == actual\n\n # For\n i = ScratchVar()\n program = Seq(\n [\n For(i.store(Int(0)), i.load() < Int(10), i.store(i.load() + Int(1))).Do(\n Seq(\n [\n If(i.load() == Int(4), Break()),\n App.globalPut(Itob(i.load()), i.load() * Int(2)),\n ]\n )\n ),\n Approve(),\n ]\n )\n\n expected = \"\"\"#pragma version 4\nint 0\nstore 0\nmain_l1:\nload 0\nint 10\n<\nbz main_l4\nload 0\nint 4\n==\nbnz main_l4\nload 0\nitob\nload 0\nint 2\n*\napp_global_put\nload 0\nint 1\n+\nstore 0\nb main_l1\nmain_l4:\nint 1\nreturn\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert expected == actual\n\n\ndef test_compile_continue():\n # While\n i = ScratchVar()\n program = Seq(\n [\n i.store(Int(0)),\n While(i.load() < Int(3)).Do(\n Seq([If(i.load() == Int(2), Continue()), i.store(i.load() + Int(1))])\n ),\n Approve(),\n ]\n )\n\n expected = \"\"\"#pragma version 4\nint 0\nstore 0\nmain_l1:\nload 0\nint 3\n<\nbz main_l4\nmain_l2:\nload 0\nint 2\n==\nbnz main_l2\nload 0\nint 1\n+\nstore 0\nb main_l1\nmain_l4:\nint 1\nreturn\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert expected == actual\n\n # For\n i = ScratchVar()\n program = Seq(\n [\n For(i.store(Int(0)), i.load() < Int(10), i.store(i.load() + Int(1))).Do(\n Seq(\n [\n If(i.load() == Int(4), Continue()),\n App.globalPut(Itob(i.load()), i.load() * Int(2)),\n ]\n )\n ),\n Approve(),\n ]\n )\n\n expected = \"\"\"#pragma version 4\nint 0\nstore 0\nmain_l1:\nload 0\nint 10\n<\nbz main_l5\nload 0\nint 4\n==\nbnz main_l4\nload 0\nitob\nload 0\nint 2\n*\napp_global_put\nmain_l4:\nload 0\nint 1\n+\nstore 0\nb main_l1\nmain_l5:\nint 1\nreturn\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert expected == actual\n\n\ndef test_compile_continue_break_nested():\n\n i = ScratchVar()\n program = Seq(\n [\n i.store(Int(0)),\n While(i.load() < Int(10)).Do(\n Seq(\n [\n i.store(i.load() + Int(1)),\n If(i.load() < Int(4), Continue(), Break()),\n ]\n )\n ),\n Approve(),\n ]\n )\n\n expected = \"\"\"#pragma version 4\nint 0\nstore 0\nload 0\nint 10\n<\nbz main_l2\nmain_l1:\nload 0\nint 1\n+\nstore 0\nload 0\nint 4\n<\nbnz main_l1\nmain_l2:\nint 1\nreturn\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert expected == actual\n\n i = ScratchVar()\n program = Seq(\n [\n i.store(Int(0)),\n While(i.load() < Int(10)).Do(\n Seq(\n [\n If(i.load() == Int(8), Break()),\n While(i.load() < Int(6)).Do(\n Seq(\n [\n If(i.load() == Int(3), Break()),\n i.store(i.load() + Int(1)),\n ]\n )\n ),\n If(i.load() < Int(5), Continue()),\n i.store(i.load() + Int(1)),\n ]\n )\n ),\n Approve(),\n ]\n )\n\n expected = \"\"\"#pragma version 4\nint 0\nstore 0\nmain_l1:\nload 0\nint 10\n<\nbz main_l8\nmain_l2:\nload 0\nint 8\n==\nbnz main_l8\nmain_l3:\nload 0\nint 6\n<\nbnz main_l6\nmain_l4:\nload 0\nint 5\n<\nbnz main_l2\nload 0\nint 1\n+\nstore 0\nb main_l1\nmain_l6:\nload 0\nint 3\n==\nbnz main_l4\nload 0\nint 1\n+\nstore 0\nb main_l3\nmain_l8:\nint 1\nreturn\n\"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert expected == actual\n\n\ndef test_compile_subroutine_unsupported():\n @Subroutine(TealType.none)\n def storeValue(value: Expr) -> Expr:\n return App.globalPut(Bytes(\"key\"), value)\n\n program = Seq(\n [\n If(Txn.sender() == Global.creator_address()).Then(\n storeValue(Txn.application_args[0])\n ),\n Approve(),\n ]\n )\n\n with pytest.raises(TealInputError):\n compileTeal(program, Mode.Application, version=3)\n\n\ndef test_compile_subroutine_no_return():\n @Subroutine(TealType.none)\n def storeValue(value: Expr) -> Expr:\n return App.globalPut(Bytes(\"key\"), value)\n\n program = Seq(\n [\n If(Txn.sender() == Global.creator_address()).Then(\n storeValue(Txn.application_args[0])\n ),\n Approve(),\n ]\n )\n\n expected = \"\"\"#pragma version 4\ntxn Sender\nglobal CreatorAddress\n==\nbz main_l2\ntxna ApplicationArgs 0\ncallsub sub0\nmain_l2:\nint 1\nreturn\nsub0: // storeValue\nstore 0\nbyte \"key\"\nload 0\napp_global_put\nretsub\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert actual == expected\n\n\ndef test_compile_subroutine_with_return():\n @Subroutine(TealType.none)\n def storeValue(value: Expr) -> Expr:\n return App.globalPut(Bytes(\"key\"), value)\n\n @Subroutine(TealType.bytes)\n def getValue() -> Expr:\n return App.globalGet(Bytes(\"key\"))\n\n program = Seq(\n [\n If(Txn.sender() == Global.creator_address()).Then(\n storeValue(Txn.application_args[0])\n ),\n If(getValue() == Bytes(\"fail\")).Then(Reject()),\n Approve(),\n ]\n )\n\n expected = \"\"\"#pragma version 4\ntxn Sender\nglobal CreatorAddress\n==\nbnz main_l3\nmain_l1:\ncallsub sub1\nbyte \"fail\"\n==\nbz main_l4\nint 0\nreturn\nmain_l3:\ntxna ApplicationArgs 0\ncallsub sub0\nb main_l1\nmain_l4:\nint 1\nreturn\nsub0: // storeValue\nstore 0\nbyte \"key\"\nload 0\napp_global_put\nretsub\nsub1: // getValue\nbyte \"key\"\napp_global_get\nretsub\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert actual == expected\n\n\ndef test_compile_subroutine_many_args():\n @Subroutine(TealType.uint64)\n def calculateSum(\n a1: Expr, a2: Expr, a3: Expr, a4: Expr, a5: Expr, a6: Expr\n ) -> Expr:\n return a1 + a2 + a3 + a4 + a5 + a6\n\n program = Return(\n calculateSum(Int(1), Int(2), Int(3), Int(4), Int(5), Int(6))\n == Int(1 + 2 + 3 + 4 + 5 + 6)\n )\n\n expected = \"\"\"#pragma version 4\nint 1\nint 2\nint 3\nint 4\nint 5\nint 6\ncallsub sub0\nint 21\n==\nreturn\nsub0: // calculateSum\nstore 5\nstore 4\nstore 3\nstore 2\nstore 1\nstore 0\nload 0\nload 1\n+\nload 2\n+\nload 3\n+\nload 4\n+\nload 5\n+\nretsub\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert actual == expected\n\n\ndef test_compile_subroutine_recursive():\n @Subroutine(TealType.uint64)\n def isEven(i: Expr) -> Expr:\n return (\n If(i == Int(0))\n .Then(Int(1))\n .ElseIf(i == Int(1))\n .Then(Int(0))\n .Else(isEven(i - Int(2)))\n )\n\n program = Return(isEven(Int(6)))\n\n expected = \"\"\"#pragma version 4\nint 6\ncallsub sub0\nreturn\nsub0: // isEven\nstore 0\nload 0\nint 0\n==\nbnz sub0_l4\nload 0\nint 1\n==\nbnz sub0_l3\nload 0\nint 2\n-\nload 0\ndig 1\ncallsub sub0\nswap\nstore 0\nswap\npop\nb sub0_l5\nsub0_l3:\nint 0\nb sub0_l5\nsub0_l4:\nint 1\nsub0_l5:\nretsub\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert actual == expected\n\n\ndef test_compile_subroutine_recursive_5():\n @Subroutine(TealType.uint64)\n def isEven(i: Expr) -> Expr:\n return (\n If(i == Int(0))\n .Then(Int(1))\n .ElseIf(i == Int(1))\n .Then(Int(0))\n .Else(isEven(i - Int(2)))\n )\n\n program = Return(isEven(Int(6)))\n\n expected = \"\"\"#pragma version 5\nint 6\ncallsub sub0\nreturn\nsub0: // isEven\nstore 0\nload 0\nint 0\n==\nbnz sub0_l4\nload 0\nint 1\n==\nbnz sub0_l3\nload 0\nint 2\n-\nload 0\nswap\ncallsub sub0\nswap\nstore 0\nb sub0_l5\nsub0_l3:\nint 0\nb sub0_l5\nsub0_l4:\nint 1\nsub0_l5:\nretsub\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=5, assembleConstants=False)\n assert actual == expected\n\n\ndef test_compile_subroutine_recursive_multiple_args():\n @Subroutine(TealType.uint64)\n def multiplyByAdding(a, b):\n return (\n If(a == Int(0))\n .Then(Return(Int(0)))\n .Else(Return(b + multiplyByAdding(a - Int(1), b)))\n )\n\n program = Return(multiplyByAdding(Int(3), Int(5)))\n\n expected = \"\"\"#pragma version 4\nint 3\nint 5\ncallsub sub0\nreturn\nsub0: // multiplyByAdding\nstore 1\nstore 0\nload 0\nint 0\n==\nbnz sub0_l2\nload 1\nload 0\nint 1\n-\nload 1\nload 0\nload 1\ndig 3\ndig 3\ncallsub sub0\nstore 0\nstore 1\nload 0\nswap\nstore 0\nswap\npop\nswap\npop\n+\nretsub\nsub0_l2:\nint 0\nretsub\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert actual == expected\n\n\ndef test_compile_subroutine_recursive_multiple_args_5():\n @Subroutine(TealType.uint64)\n def multiplyByAdding(a, b):\n return (\n If(a == Int(0))\n .Then(Return(Int(0)))\n .Else(Return(b + multiplyByAdding(a - Int(1), b)))\n )\n\n program = Return(multiplyByAdding(Int(3), Int(5)))\n\n expected = \"\"\"#pragma version 5\nint 3\nint 5\ncallsub sub0\nreturn\nsub0: // multiplyByAdding\nstore 1\nstore 0\nload 0\nint 0\n==\nbnz sub0_l2\nload 1\nload 0\nint 1\n-\nload 1\nload 0\nload 1\nuncover 3\nuncover 3\ncallsub sub0\ncover 2\nstore 1\nstore 0\n+\nretsub\nsub0_l2:\nint 0\nretsub\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=5, assembleConstants=False)\n assert actual == expected\n\n\ndef test_compile_subroutine_mutually_recursive():\n @Subroutine(TealType.uint64)\n def isEven(i: Expr) -> Expr:\n return If(i == Int(0), Int(1), Not(isOdd(i - Int(1))))\n\n @Subroutine(TealType.uint64)\n def isOdd(i: Expr) -> Expr:\n return If(i == Int(0), Int(0), Not(isEven(i - Int(1))))\n\n program = Return(isEven(Int(6)))\n\n expected = \"\"\"#pragma version 4\nint 6\ncallsub sub0\nreturn\nsub0: // isEven\nstore 0\nload 0\nint 0\n==\nbnz sub0_l2\nload 0\nint 1\n-\nload 0\ndig 1\ncallsub sub1\nswap\nstore 0\nswap\npop\n!\nb sub0_l3\nsub0_l2:\nint 1\nsub0_l3:\nretsub\nsub1: // isOdd\nstore 1\nload 1\nint 0\n==\nbnz sub1_l2\nload 1\nint 1\n-\nload 1\ndig 1\ncallsub sub0\nswap\nstore 1\nswap\npop\n!\nb sub1_l3\nsub1_l2:\nint 0\nsub1_l3:\nretsub\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert actual == expected\n\n\ndef test_compile_subroutine_mutually_recursive_5():\n @Subroutine(TealType.uint64)\n def isEven(i: Expr) -> Expr:\n return If(i == Int(0), Int(1), Not(isOdd(i - Int(1))))\n\n @Subroutine(TealType.uint64)\n def isOdd(i: Expr) -> Expr:\n return If(i == Int(0), Int(0), Not(isEven(i - Int(1))))\n\n program = Return(isEven(Int(6)))\n\n expected = \"\"\"#pragma version 5\nint 6\ncallsub sub0\nreturn\nsub0: // isEven\nstore 0\nload 0\nint 0\n==\nbnz sub0_l2\nload 0\nint 1\n-\nload 0\nswap\ncallsub sub1\nswap\nstore 0\n!\nb sub0_l3\nsub0_l2:\nint 1\nsub0_l3:\nretsub\nsub1: // isOdd\nstore 1\nload 1\nint 0\n==\nbnz sub1_l2\nload 1\nint 1\n-\nload 1\nswap\ncallsub sub0\nswap\nstore 1\n!\nb sub1_l3\nsub1_l2:\nint 0\nsub1_l3:\nretsub\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=5, assembleConstants=False)\n assert actual == expected\n\n\ndef test_compile_loop_in_subroutine():\n @Subroutine(TealType.none)\n def setState(value: Expr) -> Expr:\n i = ScratchVar()\n return For(i.store(Int(0)), i.load() < Int(10), i.store(i.load() + Int(1))).Do(\n App.globalPut(Itob(i.load()), value)\n )\n\n program = Seq([setState(Bytes(\"value\")), Approve()])\n\n expected = \"\"\"#pragma version 4\nbyte \"value\"\ncallsub sub0\nint 1\nreturn\nsub0: // setState\nstore 0\nint 0\nstore 1\nsub0_l1:\nload 1\nint 10\n<\nbz sub0_l3\nload 1\nitob\nload 0\napp_global_put\nload 1\nint 1\n+\nstore 1\nb sub0_l1\nsub0_l3:\nretsub\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=False)\n assert actual == expected\n\n\ndef test_compile_subroutine_assemble_constants():\n @Subroutine(TealType.none)\n def storeValue(key: Expr, t1: Expr, t2: Expr, t3: Expr) -> Expr:\n return App.globalPut(key, t1 + t2 + t3 + Int(10))\n\n program = Seq(\n [\n If(Txn.application_id() == Int(0)).Then(\n storeValue(\n Concat(Bytes(\"test\"), Bytes(\"test\"), Bytes(\"a\")),\n Int(1),\n Int(1),\n Int(3),\n )\n ),\n Approve(),\n ]\n )\n\n expected = \"\"\"#pragma version 4\nintcblock 1\nbytecblock 0x74657374\ntxn ApplicationID\npushint 0 // 0\n==\nbz main_l2\nbytec_0 // \"test\"\nbytec_0 // \"test\"\nconcat\npushbytes 0x61 // \"a\"\nconcat\nintc_0 // 1\nintc_0 // 1\npushint 3 // 3\ncallsub sub0\nmain_l2:\nintc_0 // 1\nreturn\nsub0: // storeValue\nstore 3\nstore 2\nstore 1\nstore 0\nload 0\nload 1\nload 2\n+\nload 3\n+\npushint 10 // 10\n+\napp_global_put\nretsub\n \"\"\".strip()\n actual = compileTeal(program, Mode.Application, version=4, assembleConstants=True)\n assert actual == expected\n\n\ndef test_compile_wide_ratio():\n cases = (\n (\n WideRatio([Int(2), Int(100)], [Int(5)]),\n \"\"\"#pragma version 5\nint 2\nint 100\nmulw\nint 0\nint 5\ndivmodw\npop\npop\nswap\n!\nassert\nreturn\n\"\"\",\n ),\n (\n WideRatio([Int(2), Int(100)], [Int(10), Int(5)]),\n \"\"\"#pragma version 5\nint 2\nint 100\nmulw\nint 10\nint 5\nmulw\ndivmodw\npop\npop\nswap\n!\nassert\nreturn\n\"\"\",\n ),\n (\n WideRatio([Int(2), Int(100), Int(3)], [Int(10), Int(5)]),\n \"\"\"#pragma version 5\nint 2\nint 100\nmulw\nint 3\nuncover 2\ndig 1\n*\ncover 2\nmulw\ncover 2\n+\nswap\nint 10\nint 5\nmulw\ndivmodw\npop\npop\nswap\n!\nassert\nreturn\n\"\"\",\n ),\n (\n WideRatio([Int(2), Int(100), Int(3)], [Int(10), Int(5), Int(6)]),\n \"\"\"#pragma version 5\nint 2\nint 100\nmulw\nint 3\nuncover 2\ndig 1\n*\ncover 2\nmulw\ncover 2\n+\nswap\nint 10\nint 5\nmulw\nint 6\nuncover 2\ndig 1\n*\ncover 2\nmulw\ncover 2\n+\nswap\ndivmodw\npop\npop\nswap\n!\nassert\nreturn\n\"\"\",\n ),\n (\n WideRatio([Int(2), Int(100), Int(3), Int(4)], [Int(10), Int(5), Int(6)]),\n \"\"\"#pragma version 5\nint 2\nint 100\nmulw\nint 3\nuncover 2\ndig 1\n*\ncover 2\nmulw\ncover 2\n+\nswap\nint 4\nuncover 2\ndig 1\n*\ncover 2\nmulw\ncover 2\n+\nswap\nint 10\nint 5\nmulw\nint 6\nuncover 2\ndig 1\n*\ncover 2\nmulw\ncover 2\n+\nswap\ndivmodw\npop\npop\nswap\n!\nassert\nreturn\n\"\"\",\n ),\n )\n\n for program, expected in cases:\n actual = compileTeal(\n program, Mode.Application, version=5, assembleConstants=False\n )\n assert actual == expected.strip()\n"
}
] | 74 |
dzambon/sjf-data-viz
|
https://github.com/dzambon/sjf-data-viz
|
7891d5dc47f7ef8d8b27341dacfe45e79c673c2d
|
ca1a86fe308f03fbb5cb3bc61234ff3e42bbcf44
|
d0d18d57063f0e19df2d8f951f8d98aa060890a5
|
refs/heads/master
| 2022-12-10T01:33:23.935784 | 2020-09-11T07:06:20 | 2020-09-11T07:06:20 | 291,998,054 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7520710229873657,
"alphanum_fraction": 0.7556213140487671,
"avg_line_length": 29.178571701049805,
"blob_id": "9ec6d62a849930a91238df035f31f422e52bab73",
"content_id": "3f78887553681a91f16b31e6b7ae181a626bb1e1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1691,
"license_type": "no_license",
"max_line_length": 275,
"num_lines": 56,
"path": "/README.md",
"repo_name": "dzambon/sjf-data-viz",
"src_encoding": "UTF-8",
"text": "# Machine learning for large datasets visualization\n\nStudy Week -- Fascinating Informatics -- [www.sjf.ch](www.sjf.ch) \nSeptember 2020\n\nSamuel Baumgartner (Gymnasium Oberwil) \nMichelle Lebo (Gymnasium NMS Bern) \nSupervised by Daniele Zambon (Università della Svizzera italiana) \n\n**Abstract**: \nCan we perceive dimensions beyond the third? Can we, as aspiring data scientists, make visual representations of large and multi-dimensional data sets, that seem too complex for humans to have a sense of?\nIn this project, we implement machine learning algorithms to collections of images, non-numeric entities like words, or data from social networks, and we let the machine find patterns that allow us to draw intuitive visualizations and to navigate through such huge data sets.\n\n## Road map\n\n**Preliminaries**\n\n- Data visualization in Python\n\n**Music representation and visualization**\n\n- Download data from Spotify through the API\n- Data cleaning\n- 2D and 3D representations\n- Visualization of the representations\n\n**Playlist creation**\n\n- Explore the dataset \n- Create a playlist from walks\n- Upload the playlist on Spotify through the API\n\n\n## Software setup\n\n### Python\n```\npy -m pip install virtualenv\npy -m virtualenv venv\nsource venv/bin/activate\n```\n\n### Git repository of the project\n```\ngit clone https://github.com/dzambon/sjf-data-viz\npy -m pip install -e .\n```\n\n### PyCharm IDE (Optional)\nDownload and open the project. Select the created virtual environment in the interpreter settings.\n\n### FFMPEG (Optional)\nDownload `ffmpeg` from [https://ffmpeg.zeranoe.com/builds/](https://ffmpeg.zeranoe.com/builds/) and add the bin folder to `PATH` variable:\n```\nexport PATH=\"...ffmpeg.folder.../bin:$PATH\"\n```\n"
},
{
"alpha_fraction": 0.6348074078559875,
"alphanum_fraction": 0.660485029220581,
"avg_line_length": 24.035715103149414,
"blob_id": "cdd2aea508a70b934cf080d1740274c6cf3a56bb",
"content_id": "d60b190dfb977f558a93c01a80f36c29bdeb8e8d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 701,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 28,
"path": "/digits/create_representations.py",
"repo_name": "dzambon/sjf-data-viz",
"src_encoding": "UTF-8",
"text": "import matplotlib.pyplot as plt\nfrom sjf_data_viz.utils.visualization import *\nfrom sklearn import datasets\nimport numpy as np\n\nn_components=3\nonly_method=\"t-SNE\"\n\nds = datasets.load_digits(n_class=10)\nX = ds.data\ny = ds.target\nn_neighbors = 30\n\nz_list = draw_all(X=X, color=y, n_neighbors=n_neighbors, n_components=n_components,\n only_method=only_method, base_folder=\"./results\")\n\nif only_method is not None:\n pp = np.random.permutation(X.shape[0])[::10]\n imgs = X[pp].reshape(-1, 8, 8)/16.\n z = z_list[0][1][pp]\n\n if n_components == 3:\n _ = ImageAnnotations3D(z, imgs)\n else:\n ImageAnnotations2D(z, imgs)\n\nplt.savefig(\"./results/digits.pdf\")\nplt.show()\n"
},
{
"alpha_fraction": 0.6548672318458557,
"alphanum_fraction": 0.6601769924163818,
"avg_line_length": 32.17647171020508,
"blob_id": "612fc1ec42cf70b587ef30db20f86ecc8307625d",
"content_id": "7e13b68f94d5645bdb46ef9758d83ce3fe827764",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 565,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 17,
"path": "/setup.py",
"repo_name": "dzambon/sjf-data-viz",
"src_encoding": "UTF-8",
"text": "from setuptools import setup\nimport os\n\ndef read(fname):\n return open(os.path.join(os.path.dirname(__file__), fname)).read()\n\nsetup(\n name='sjf_data_viz', # Required\n version='0.1.0', # Required\n description='SJF Fascinating Informatics -- Data visualization project', # Optional\n long_description=read('README.md'),\n url='https://github.com/dzambon/sjf-data-viz', # Optional\n author='D. Zambon',\n author_email='[email protected]', # Optional\n packages=['sjf_data_viz'],\n install_requires=['matplotlib', 'sklearn', 'umap-learn', 'spotipy']\n)\n\n"
},
{
"alpha_fraction": 0.5968706011772156,
"alphanum_fraction": 0.603281557559967,
"avg_line_length": 33.59774398803711,
"blob_id": "dbef4f02e6668b05078b04204d0fec9d3d6c41c7",
"content_id": "506a7198f7db1064b9de026a3ec348e57fad4cc8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9203,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 266,
"path": "/sjf_data_viz/spotify.py",
"repo_name": "dzambon/sjf-data-viz",
"src_encoding": "UTF-8",
"text": "import numpy as np\nfrom matplotlib import pyplot as plt\nimport spotipy\nfrom .config import USERNAME, CID, SECRET, PERSONAL_PLAYLIST, REDIRECT_URI\nSPOTIFY_CONNECTION = None\n\ndef login_to_spotify():\n global SPOTIFY_CONNECTION, USERNAME, CID, SECRET, REDIRECT_URI\n\n if SPOTIFY_CONNECTION is not None:\n return SPOTIFY_CONNECTION\n\n #for avaliable scopes see https://developer.spotify.com/web-api/using-scopes/\n scope = 'user-library-read playlist-modify-public playlist-read-private playlist-modify-private'\n\n client_credentials_manager = spotipy.oauth2.SpotifyClientCredentials(client_id=CID, client_secret=SECRET)\n sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)\n token = spotipy.util.prompt_for_user_token(USERNAME, scope, CID, SECRET, REDIRECT_URI)\n\n if token:\n print(\"Got token for\", USERNAME)\n SPOTIFY_CONNECTION = spotipy.Spotify(auth=token)\n return SPOTIFY_CONNECTION\n else:\n print(\"Can't get token for\", USERNAME)\n return None\n\ndef get_spotify_playlist(pl_id):\n global SPOTIFY_CONNECTION, USERNAME\n \n if SPOTIFY_CONNECTION is None:\n SPOTIFY_CONNECTION = login_to_spotify()\n \n pl_ = SPOTIFY_CONNECTION.user_playlist(USERNAME, pl_id)\n \n return pl_\n\ndef get_spotify_features(song_id):\n global SPOTIFY_CONNECTION\n if SPOTIFY_CONNECTION is None:\n SPOTIFY_CONNECTION = login_to_spotify()\n\n feat_ = SPOTIFY_CONNECTION.audio_features(song_id)\n assert len(feat_) == 1\n return feat_[0]\n\ndef upload_playlist(song_ids):\n # Create a new playlist for tracks to add - you may also add these tracks to your source playlist and proceed\n # playlist_recs = sp.user_playlist_create(username, name='proj0_pl_out', public=False)\n # Take existing\n global SPOTIFY_CONNECTION, USERNAME, PERSONAL_PLAYLIST\n\n if SPOTIFY_CONNECTION is None:\n SPOTIFY_CONNECTION = login_to_spotify()\n\n playlist_recs = SPOTIFY_CONNECTION.user_playlist(USERNAME, PERSONAL_PLAYLIST)\n print(playlist_recs['id'])\n # # Add tracks to the playlist\n # sp.user_playlist_add_tracks(username, playlist_recs['id'], selected_df['id'].values.tolist());\n # Replace all tracks in the playlist\n SPOTIFY_CONNECTION.user_playlist_replace_tracks(USERNAME, playlist_recs['id'], song_ids);\n\ndef get_integer_labels(str_labels):\n from sklearn import preprocessing\n le = preprocessing.LabelEncoder()\n int_labels = le.fit_transform(str_labels)\n return int_labels, le\n\ndef improve_data_representation(X, feature_names, extra_info, scale=False,\n use_playlist=-1, aug_ica=False, aug_tsne=False, aug_umap=False):\n\n # y, playlist_names = pd.factorize(extra_info[\"original_playlist\"])\n #\n y, le = get_integer_labels(extra_info[\"original_playlist\"])\n playlist_names = le.classes_\n cols = feature_names\n \n if use_playlist > 0:\n oh = np.zeros((y.shape[0], y.max() + 1))\n oh[np.arange(y.shape[0]), y] = 1\n X = np.concatenate([X, oh], axis=1)\n playlist_oh_columns = np.array(list(range(len(cols), len(cols)+len(playlist_names))))\n cols += list(playlist_names)\n \n if aug_ica:\n from sklearn.decomposition import FastICA\n n_components = 3\n ica = FastICA(n_components=n_components)\n x_ = ica.fit(X).transform(X)\n X = np.concatenate([X, x_], axis=1)\n cols += [\"ica{}\".format(i + 1) for i in range(n_components)]\n \n if aug_tsne:\n from sklearn.manifold import TSNE\n n_components = 3\n tsne = TSNE(n_components=n_components)\n x_ = tsne.fit_transform(X)\n X = np.concatenate([X, x_], axis=1)\n cols += [\"tsne{}\".format(i + 1) for i in range(n_components)]\n \n if aug_umap:\n n_components = 3\n n_neighbors = 10\n from umap import UMAP\n umap = UMAP(n_components=n_components, n_neighbors=n_neighbors)\n x_ = umap.fit_transform(X)\n X = np.concatenate([X, x_], axis=1)\n cols += [\"umap{}\".format(i + 1) for i in range(n_components)]\n \n if scale:\n # scaler = MinMaxScaler()\n from sklearn.preprocessing import StandardScaler\n scaler = StandardScaler()\n scaler.fit(X)\n X = scaler.transform(X)\n \n if use_playlist > 0:\n X[:, playlist_oh_columns] = X[:, playlist_oh_columns] * use_playlist\n \n return X, cols, scaler\n\ndef visualize_representations(z, extra_info=None, with_click=False):\n y, le = get_integer_labels(extra_info[\"original_playlist\"])\n playlist_names = le.classes_\n \n fig, ax = plt.subplots(figsize=(8, 8))\n scatter = plt.scatter(z[:, 0], z[:, 1], c=y, picker=5, marker=\".\")\n legend1 = ax.legend(scatter.legend_elements()[0], playlist_names,\n loc=\"lower left\", title=\"Classes\")\n ax.add_artist(legend1)\n ax.axis(\"equal\")\n plt.legend()\n\n if with_click:\n current_label = ax.annotate(\"None\", xy=z[0])\n \n def onpick(event):\n for i in event.ind:\n print(\"[{}] {} | {}\\nhttps://open.spotify.com/track/{}\".format(i,\n extra_info[\"song_artist\"][i],\n extra_info[\"song_name\"][i],\n extra_info[\"id\"][i]))\n current_label.set_text(\"{}_{}\".format(str(i), extra_info[\"song_name\"][i]))\n current_label.set_x(z[i, 0])\n current_label.set_y(z[i, 1])\n current_label.figure.canvas.draw()\n \n fig.canvas.mpl_connect('pick_event', onpick)\n\ndef extract_song_ids(pl_info):\n # Michelle and Samuel\n playlist_song_ids = []\n for i in range(len(pl_info[\"tracks\"][\"items\"])):\n playlist_song_ids.append(pl_info[\"tracks\"][\"items\"][i][\"track\"][\"id\"])\n return playlist_song_ids\n\n\ndef extract_song_artists(pl_info):\n # Michelle\n playlist_song_artists = []\n for i in range(len(pl_info[\"tracks\"][\"items\"])):\n playlist_song_artists.append(pl_info[\"tracks\"][\"items\"][i][\"track\"][\"artists\"][0][\"name\"])\n return playlist_song_artists\n\ndef extract_song_names(pl_info):\n # Samuel\n playlist_song_names = []\n for i in range(len(pl_info[\"tracks\"][\"items\"])):\n playlist_song_names.append(pl_info[\"tracks\"][\"items\"][i][\"track\"][\"name\"])\n return playlist_song_names\n\ndef get_feature_list(song_list):\n # Michelle\n song_feature_list = []\n for song_id in song_list:\n song_features = get_spotify_features(song_id)\n song_feature_list.append(song_features)\n return song_feature_list\n\ndef create_data_matrix(feature_list):\n # Samuel\n desired_features = ['danceability', 'energy', 'key', 'loudness', \n 'mode', 'speechiness', 'acousticness', 'instrumentalness', 'liveness', \n 'valence', 'tempo',\n # 'type', 'id', 'uri', 'track_href', 'analysis_url',\n 'duration_ms', 'time_signature', \n # 'track_name', 'playlist', 'track_artist'\n ]\n\n X = np.zeros((len(feature_list), len(desired_features)))\n for s in range(len(feature_list)):\n for f in range(len(desired_features)):\n X[s, f] = feature_list[s][desired_features[f]]\n return X, desired_features\n\ndef distance(z_i, z_j):\n assert z_i.ndim == 1\n assert z_j.ndim == 1\n return np.linalg.norm(z_i - z_j)\n\ndef create_ordered_playlist(z, s0, consider_s0=True, drift=False):\n # Michelle and Samuel\n num_of_songs = z.shape[0]\n D = np.zeros((num_of_songs, num_of_songs))\n \n playlist = [s0]\n \n # precompute all distances\n for i in range(num_of_songs):\n for j in range(num_of_songs):\n D[i, j] = distance(z[i], z[j])\n \n tot_songs_in_playlist = 20\n for current_songs_in_playlist in range(tot_songs_in_playlist):\n \n # sort the corresponding row\n scores = 0\n if consider_s0:\n scores += D[s0]\n if drift:\n scores += D[playlist[-1]]\n\n sorted_indices = np.argsort(scores)\n \n # find the next song\n l = 0\n while sorted_indices[l] in playlist:\n l = l + 1\n playlist.append(sorted_indices[l])\n \n print(playlist)\n return np.array(playlist)\n\n\ndef create_random_walk_playlist(z, s0, k=5):\n # Michelle and Samuel\n num_of_songs = z.shape[0]\n D = np.zeros((num_of_songs, num_of_songs))\n \n playlist = [s0]\n \n # precompute all distances\n for i in range(num_of_songs):\n for j in range(num_of_songs):\n D[i, j] = distance(z[i], z[j])\n \n tot_songs_in_playlist = 20\n for current_songs_in_playlist in range(tot_songs_in_playlist):\n \n # sort the corresponding row\n sorted_indices = np.argsort(D[s0])\n \n # find the next song\n l = 0\n closest_song_list = []\n while len(closest_song_list) < k:\n if sorted_indices[l] in playlist:\n l = l + 1\n else:\n closest_song_list.append(l)\n l = l + 1\n next_song = np.random.choice(closest_song_list)\n playlist.append(sorted_indices[next_song])\n \n print(playlist)\n return np.array(playlist)\n"
},
{
"alpha_fraction": 0.5192531943321228,
"alphanum_fraction": 0.5301439166069031,
"avg_line_length": 39.82539749145508,
"blob_id": "5efaa2adbc9acc33a514f65429c655d873d41875",
"content_id": "cd56c49ee7cc8dea54af6a2469a413e5a6117012",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2571,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 63,
"path": "/spotify/dz_template.py",
"repo_name": "dzambon/sjf-data-viz",
"src_encoding": "UTF-8",
"text": "from sjf_data_viz.spotify import *\n\nsp = login_to_spotify()\n\n# -----------------------------------------------------\n# Collect some data from spotify and save them\n# -----------------------------------------------------\n# 1) Create a dictionary of playlist IDs like the following\n# >>> playlist_ids = {\n# >>> \"punk\": \"37i9dQZF1DX3LDIBRoaCDQ\",\n# >>> \"reggae\": \"37i9dQZF1DXa8n42306eJB\",\n# >>> }\n\n# 2) Download the playlist information for each one of the above playlists.\n# hint:\n# - Make a for loop and iterate through the playlist ids. you can follow this\n# example https://www.tutorialspoint.com/How-to-iterate-through-a-dictionary-in-Python\n# - Use the following function to download the information\n# >>> playlist_information = get_spotify_playlist(\"37i9dQZF1DX3LDIBRoaCDQ\")\n\n# 3) Save to files the objects with the playlist information.\n# hints:\n# - create a folder `downloaded_playlists`\n# - save each playlist info on a different file, so do another for loop.\n# - use `pickle` package to save the playlist information to file. A simple example\n# https://wiki.python.org/moin/UsingPickle.\n# For example, the punk playlist can be saved as follows:\n# >>> pickle.dump(playlist_information, open(\"downloaded_playlists/playlist_info_{}.p\".format(\"punk\"), \"wb\" ))\n\n# 4) Create a function to extract a list of song IDs from the playlist information (only the IDs!)\n# hints:\n# - playlist_information is a dictionary. Explore it and find the list of IDs\n# - store them in list:\n# >>> playlist_songs = []\n# >>> for song_id in playlist_information.........:\n# >>> playlist_songs.append(song_id)\n\n# 5) Iterate the song IDs and download the spotify features. Store all the information.\n# hints:\n# >>> song_feature = get_spotify_features(song_id)\n\n# -----------------------------------------------------\n# Create 2D representation\n# -----------------------------------------------------\n# X = music\n# z = tsne.fit_transform(X)\n\n# -----------------------------------------------------\n# Visualize it\n# -----------------------------------------------------\n# plt.plot(z)\n\n# -----------------------------------------------------\n# Explore it to create the Playlist\n# -----------------------------------------------------\n# ref_song = \"...\"\n# num_songs = 10\n# playlist = create_playlist(ref_song, num_songs)\n\n# -----------------------------------------------------\n# Upload playlist\n# -----------------------------------------------------\n# upload_playlist(playlist)"
},
{
"alpha_fraction": 0.5718843936920166,
"alphanum_fraction": 0.5912162065505981,
"avg_line_length": 37.330936431884766,
"blob_id": "789d1e65a2bcc0477b64863558274aa987cf2018",
"content_id": "411e2450076ad56c0aea66f703bc9ed3b3d78e90",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5328,
"license_type": "no_license",
"max_line_length": 129,
"num_lines": 139,
"path": "/spotify/main.py",
"repo_name": "dzambon/sjf-data-viz",
"src_encoding": "UTF-8",
"text": "import os\nimport pickle\nfrom datetime import datetime\nfrom sjf_data_viz.spotify import *\n\nstarting_song = 234\nrecompute_representation = False\n\n# -----------------------------------------------------\n# Collect some data from spotify and save them\n# -----------------------------------------------------\n# 1) Create a dictionary of playlist IDs like the following\nplaylist_ids = {\n \"punk1\": \"37i9dQZF1DX3LDIBRoaCDQ\",\n \"punk2\": \"37i9dQZF1DXa9wYJr1oMFq\",\n \"reggae\": \"37i9dQZF1DXa8n42306eJB\",\n \"rap\": \"3fxpDkHyW6Y2aR5FFRLOtO\",\n \"classical\": \"5tXCRZAUKp2uqtmJZNkQxY\",\n \"classic\": \"37i9dQZF1DX9OZisIoJQhG\",\n \"rock-classic\": \"37i9dQZF1DWXRqgorJj26U\",\n \"heavy metal\": \"37i9dQZF1DX9qNs32fujYe\",\n \"jazz\": \"37i9dQZF1DX4wta20PHgwo\",\n \"funk\": \"4xFSdiuP4gpR4wq2OghlOs\",\n \"hip hop\": \"37i9dQZF1DX0XUsuxWHRQd\",\n \"electronic\": \"37i9dQZF1DWSFNWN7fsnAm\",\n \"indie\": \"37i9dQZF1DX8hcTuUCeYxa\"}\n\nX_list = [] # it will contain all the matrices X of each playlist\nextra_info = {\"id\": [],\n \"song_artist\": [],\n \"song_name\": [],\n \"original_playlist\": []}\nfor pl_name, pl_id in playlist_ids.items():\n print(pl_id + \":\\t\" + pl_name + \"\\t\\t\", end=\"\")\n \n #check whether or not we already have the playlist information\n filename = \"downloaded_playlists/pl_info_\" + pl_name + \".pickle\"\n if not os.path.isfile(filename):\n\n # 2) Download the playlist information for each one of the above playlists.\n playlist_information = get_spotify_playlist(pl_id)\n\n # 3) Save to files the objects with the playlist information.\n pickle.dump(playlist_information, open(\"downloaded_playlists/pl_info_\" + pl_name + \".pickle\", \"wb\"))\n\n else:\n playlist_information = pickle.load(open(filename, \"rb\"))\n\n # 4) Create a function to extract a list of song IDs from the playlist information (only the IDs!)\n song_list = extract_song_ids(playlist_information)\n\n #check whether or not we already have the playlist information\n filename = \"downloaded_playlists/pl_feat_\" + pl_name + \".pickle\"\n if not os.path.isfile(filename):\n # 5) Iterate the song IDs and download the spotify features. Store all the information.\n song_feature_list = get_feature_list(song_list)\n pickle.dump(song_feature_list, open(filename, \"wb\"))\n else:\n song_feature_list = pickle.load(open(filename, \"rb\"))\n\n # 6) Create data matrix X\n X_current_playlist, feature_names = create_data_matrix(song_feature_list)\n X_list.append(X_current_playlist)\n extra_info[\"id\"] += song_list\n extra_info[\"song_name\"] += extract_song_names(playlist_information)\n extra_info[\"song_artist\"] += extract_song_artists(playlist_information)\n extra_info[\"original_playlist\"] += [pl_name]*len(song_list)\n \n print(\"with \" + str(len(song_list)) + \" songs\")\n\n# 7) Concatenate all matrices X\nX = np.concatenate(X_list, axis=0)\nprint(X.shape)\n\n# 8) Improve data representation\nplaylist_relevance = .4\nX, feature_names, _ = improve_data_representation(X, feature_names, extra_info, scale=True,\n use_playlist=playlist_relevance, aug_ica=False, aug_tsne=False, aug_umap=False)\n\n# -----------------------------------------------------\n# Create 2D representation\n# -----------------------------------------------------\nfrom sklearn.manifold import Isomap, TSNE, MDS\nfrom umap import UMAP\nman = TSNE(perplexity=50)\nfilename = \"downloaded_playlists/z_\" + str(man) + str(playlist_relevance) + \".pickle\"\nif recompute_representation or not os.path.isfile(filename):\n z = man.fit_transform(X)\n pickle.dump(z, open(filename, \"wb\"))\nelse:\n z = pickle.load(open(filename, \"rb\"))\nprint(z.shape)\n\n# -----------------------------------------------------\n# Visualize its\n# -----------------------------------------------------\n# visualize_representations(z, extra_info, with_click=True)\n# plt.title(man)\n# plt.show()\n\n\n# -----------------------------------------------------\n# Explore it to create the Playlist\n# -----------------------------------------------------\ns0 = starting_song\n# playlist = create_ordered_playlist(z, s0)\n# playlist = create_ordered_playlist(z, s0, drift=True, consider_s0=False)\nplaylist = create_random_walk_playlist(z, s0)\n\nfor song_index in playlist:\n print(extra_info[\"id\"][song_index])\n\nfor song_index in playlist:\n print(extra_info[\"song_name\"][song_index] + \" | \" + extra_info[\"song_artist\"][song_index])\n\nsong_ids = []\nfor song_index in playlist:\n song_ids.append(extra_info[\"id\"][song_index])\n\npickle.dump(song_ids, open(\"created_playlists/\" + datetime.now().strftime(\"%y%m%d_%H%M\") + \".pickle\", \"wb\"))\n\n# -----------------------------------------------------\n# Visualize playlist\n# -----------------------------------------------------\nvisualize_representations(z, extra_info, with_click=True)\nplt.scatter(z[s0, 0], z[s0, 1], c=\"g\", marker=\"+\", label=\"start\", s=200)\nplt.plot(z[playlist, 0], z[playlist, 1], c=\"r\", marker=\"\", label=\"playlist\")\n\n# plt.plot(z[song_ids, ])\nplt.title(man)\nplt.savefig(\"playlist.pdf\")\nplt.show()\n\n# -----------------------------------------------------\n# Upload playlist\n# -----------------------------------------------------\nres = input(\"Do you want to upload it? yes/[no]\\n\")\nif res == \"yes\":\n upload_playlist(song_ids)\n"
},
{
"alpha_fraction": 0.7012726068496704,
"alphanum_fraction": 0.7227059602737427,
"avg_line_length": 27.169811248779297,
"blob_id": "807d2aa56247fbf824a96c219a61c7594de208bc",
"content_id": "6685eaf0bdd59ede74ffe8e098e6273ccdab43b7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1493,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 53,
"path": "/digits/visualize_learning_process.py",
"repo_name": "dzambon/sjf-data-viz",
"src_encoding": "UTF-8",
"text": "\"\"\"\nCheckout also these:\n- https://distill.pub/2016/misread-tsne/\n\"\"\"\n\nfrom sklearn import datasets\nfrom sjf_data_viz.utils.representations import JumpingTSNE\n\n# Create representation with our modified TSNE\nn_components=2\n\nds = datasets.load_digits(n_class=10)\nX = ds.data#[:100]\ny = ds.target#[:100]\nn_neighbors = 30\n\njtsne = JumpingTSNE(jump_size=6, n_components=n_components, init='random', random_state=0)\n\nprint(\"Learning the representations\")\nz = jtsne.fit_transform(X)\nz_list = jtsne.X_embedded_jumps\nn_exploration, n_finetuning = jtsne.num_jumps_per_type()\n\n\n# Create animation\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib.animation as animation\nfrom matplotlib import cm\n\nfig, ax = plt.subplots()\nprint(\"Preparing animation...\")\n\n#base scatter plot\nscatter = ax.scatter(z[:, 0], z[:, 1], c=y)\nlegend1 = ax.legend(*scatter.legend_elements(), loc=\"lower left\", title=\"Digit\")\nax.add_artist(legend1)\n\n#function to edit scatter plot\ndef animate(i):\n scatter.set_offsets(z_list[i])\n ax.set_title(\"Exploration {iexp}/{exptot} | Fine tuning {ift}/{fttot}\".format(\n iexp=min([i, n_exploration]), exptot=n_exploration, \n ift=max([i-n_exploration, 0]), fttot=n_finetuning))\n return scatter\n\n#animation\nani = animation.FuncAnimation(\n fig, animate, frames=len(z_list), interval=10, blit=False, save_count=50) #, init_func=init)\nwriter = animation.FFMpegWriter(fps=5, bitrate=1800)\nani.save(\"results/viz_learning.mp4\", writer=writer)\n\nplt.show()\n"
},
{
"alpha_fraction": 0.5357142686843872,
"alphanum_fraction": 0.5399159789085388,
"avg_line_length": 46.599998474121094,
"blob_id": "44154e12ec163568532bf937e8cd66f580f8028c",
"content_id": "1b08ead8a5c3541682221de63afbd1a042f82593",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 476,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 10,
"path": "/sjf_data_viz/utils/others.py",
"repo_name": "dzambon/sjf-data-viz",
"src_encoding": "UTF-8",
"text": "def dict_to_string(d, indent=0, output=\"\", only_type=True):\n \"\"\" Nice dictionary visualization \"\"\"\n if isinstance(d, dict):\n for k, v in d.items():\n val = type(v).__name__ if only_type else v\n msg = \"{ind}{key}: \\t{val}\".format(ind=\"\".join([\" \"]*indent), key=k, val=val)\n # print(msg)\n output += msg + \"\\n\"\n output = dict_to_string(v, indent=indent + 2, output=output, only_type=only_type)\n return output\n"
},
{
"alpha_fraction": 0.5372173190116882,
"alphanum_fraction": 0.5524936318397522,
"avg_line_length": 35.09189224243164,
"blob_id": "72cf9234f8533d0d834773016048e26efa789362",
"content_id": "ac29bc6668f506fa4d3bc42c035aebdc544e5bf3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6677,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 185,
"path": "/sjf_data_viz/utils/visualization.py",
"repo_name": "dzambon/sjf-data-viz",
"src_encoding": "UTF-8",
"text": "from sklearn import manifold\nimport umap\n\nimport matplotlib.pyplot as plt\nfrom matplotlib.ticker import NullFormatter\nfrom mpl_toolkits.mplot3d import Axes3D\n\nimport os\nfrom functools import partial\nfrom collections import OrderedDict\nimport pickle\nfrom time import time\nimport numpy as np\n\ndef draw_all(X, color, n_neighbors=10, only_method=None, base_folder=\".\", n_components=2):\n \n LLE = partial(manifold.LocallyLinearEmbedding,\n n_neighbors, n_components, eigen_solver='auto')\n\n methods = OrderedDict()\n methods['LLE'] = LLE(method='standard')\n methods['LTSA'] = LLE(method='ltsa')\n methods['Hessian LLE'] = LLE(method='hessian')\n methods['Modified LLE'] = LLE(method='modified')\n methods['Isomap'] = manifold.Isomap(n_neighbors, n_components)\n methods['MDS'] = manifold.MDS(n_components, max_iter=100, n_init=1)\n methods['SE'] = manifold.SpectralEmbedding(n_components=n_components,\n n_neighbors=n_neighbors)\n methods['t-SNE'] = manifold.TSNE(n_components=n_components, init='pca',\n random_state=0)\n methods['UMAP'] = umap.UMAP(n_components=n_components, n_neighbors=n_neighbors)\n\n if only_method is not None:\n m = OrderedDict()\n m[only_method] = methods[only_method]\n methods = m\n\n # Create figure\n fig = plt.figure(figsize=(12, 12))\n\n # Plot results\n z_list = []\n for i, (method_name, method) in enumerate(methods.items()):\n\n # Init the correct subplot grid and axes\n if only_method is None:\n if n_components == 3:\n ax = fig.add_subplot(3, 3, i+1, projection=Axes3D.name)\n else: \n ax = fig.add_subplot(3, 3, i+1)\n else:\n if n_components == 3:\n ax = fig.add_subplot(111, projection=Axes3D.name)\n else: \n ax = fig.add_subplot(111)\n \n # Compute or load data\n fname = base_folder + \"/\" + method_name + \".pkl\"\n if os.path.isfile(fname):\n #load\n with open(fname, 'rb') as f:\n [z, dt] = pickle.load(f)\n else:\n #compute\n try:\n t0 = time()\n print(\"fitting the \" + str(method) + \"...\")\n z = method.fit_transform(X)\n t1 = time()\n dt = t1 - t0\n except Exception as e:\n import traceback\n traceback.print_exc()\n dt, z = -1, None\n if dt > 0:\n #save\n with open(fname, 'wb') as f: pickle.dump([z, dt], f)\n\n # Draw representations\n print(\"{}: {:.2g} sec\".format(method_name, dt))\n scatter = ax.scatter(*[z[:, i] for i in range(z.shape[1])], c=color, marker=\".\")\n legend1 = ax.legend(*scatter.legend_elements(), title=\"Digit\")\n ax.add_artist(legend1)\n ax.set_title(\"{} ({:.2g} sec)\".format(method_name, dt))\n ax.axis('tight')\n if only_method is None:\n ax.xaxis.set_major_formatter(NullFormatter())\n ax.yaxis.set_major_formatter(NullFormatter())\n\n # Store representations to be returned\n z_list.append((method_name, z))\n\n return z_list\n\nfrom mpl_toolkits.mplot3d import proj3d\nfrom matplotlib import offsetbox\n\nclass ImageAnnotations2D(object):\n \"\"\" Displays images directly in the plot. \"\"\"\n\n def __init__(self, xy, imgs, ax2d=None, cmap=plt.get_cmap(\"Greys\")):\n\n # store attributes\n self.imgs = imgs\n self.ax2d = plt.gca() if ax2d is None else ax2d \n self.cmap = cmap\n self.xy = xy\n\n # draw images\n self.annot = []\n for s,im in zip(self.xy, self.imgs):\n x, y = s\n self.annot.append(self.image(im,[x,y]))\n # ab_list.append(add_image(x=X[p], z=z_list[0][1][p], ax=ax))\n\n def image(self, img, xy):\n \"\"\" Draw image to location xy \"\"\"\n imagebox = offsetbox.OffsetImage(img, zoom=2, cmap=self.cmap)\n imagebox.image.axes = self.ax2d\n ab = offsetbox.AnnotationBbox(imagebox, xy, xycoords='data', pad=0.0, frameon=True )\n self.ax2d.add_artist(ab)\n return ab\n \n\nclass ImageAnnotations3D(object):\n \"\"\" Displays images directly in the plot. \"\"\"\n\n def __init__(self, xyz, imgs, ax3d=None, ax2d=None, cmap=plt.get_cmap(\"Greys\")):\n\n # store attributes\n self.xyz = xyz\n self.imgs = imgs\n self.cmap = cmap\n self.ax3d = plt.gca() if ax3d is None else ax3d \n if ax2d is None:\n ax2d = plt.gcf().add_subplot(111,frame_on=False) \n ax2d.axis(\"off\")\n self.ax2d = ax2d\n\n # draw images\n self.annot = []\n for s,im in zip(self.xyz, self.imgs):\n x,y = self.proj(s)\n self.annot.append(self.image(im,[x,y]))\n self.lim = self.ax3d.get_w_lims()\n self.rot = self.ax3d.get_proj()\n self.cid = self.ax3d.figure.canvas.mpl_connect(\"draw_event\",self.update)\n\n # register events\n self.funcmap = {\"button_press_event\" : self.ax3d._button_press,\n \"motion_notify_event\" : self.ax3d._on_move,\n \"button_release_event\" : self.ax3d._button_release}\n self.cfs = [self.ax3d.figure.canvas.mpl_connect(kind, self.cb) \\\n for kind in self.funcmap.keys()]\n\n def cb(self, event):\n event.inaxes = self.ax3d\n self.funcmap[event.name](event)\n\n def proj(self, X):\n \"\"\" From a 3D point in axes ax1, calculate position in 2D in ax2 \"\"\"\n x, y, z = X\n x2, y2, _ = proj3d.proj_transform(x,y,z, self.ax3d.get_proj())\n tr = self.ax3d.transData.transform((x2, y2))\n return self.ax2d.transData.inverted().transform(tr)\n\n def image(self,arr,xy):\n \"\"\" Place an image (arr) as annotation at position xy \"\"\"\n im = offsetbox.OffsetImage(arr, zoom=2, cmap=self.cmap)\n im.image.axes = self.ax3d\n ab = offsetbox.AnnotationBbox(im, xy, #xybox=(-30., 30.),\n xycoords='data', boxcoords=\"offset points\",\n pad=0.0,) # arrowprops=dict(arrowstyle=\"->\"))\n self.ax2d.add_artist(ab)\n\n return ab\n\n def update(self,event):\n \"\"\" Update drawing \"\"\"\n if np.any(self.ax3d.get_w_lims() != self.lim) or \\\n np.any(self.ax3d.get_proj() != self.rot):\n self.lim = self.ax3d.get_w_lims()\n self.rot = self.ax3d.get_proj()\n for s,ab in zip(self.xyz, self.annot):\n ab.xy = self.proj(s)\n"
},
{
"alpha_fraction": 0.5351710915565491,
"alphanum_fraction": 0.5420151948928833,
"avg_line_length": 44.35344696044922,
"blob_id": "b5fdc0a4d7a386058fc9e58a99b82c27caba3337",
"content_id": "212010d6042bbec04726d9840253c9d8de6fb319",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5260,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 116,
"path": "/sjf_data_viz/utils/representations.py",
"repo_name": "dzambon/sjf-data-viz",
"src_encoding": "UTF-8",
"text": "from sklearn.manifold._t_sne import TSNE, _kl_divergence, _kl_divergence_bh, _openmp_effective_n_threads, \\\n _gradient_descent\n\nclass JumpingTSNE(TSNE):\n\n def __init__(self, jump_size=None, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.jump_size = jump_size\n self.X_embedded_jumps = []\n self.momentum_jumps = []\n\n def num_jumps_per_type(self):\n from collections import Counter\n cnt = Counter(self.momentum_jumps)\n assert len(cnt.keys()) == 2\n return cnt[min(cnt.keys())], cnt[max(cnt.keys())]\n\n def _tsne(self, P, degrees_of_freedom, n_samples, X_embedded,\n neighbors=None, skip_num_points=0):\n \"\"\"Runs t-SNE.\"\"\"\n # t-SNE minimizes the Kullback-Leiber divergence of the Gaussians P\n # and the Student's t-distributions Q. The optimization algorithm that\n # we use is batch gradient descent with two stages:\n # * initial optimization with early exaggeration and momentum at 0.5\n # * final optimization with momentum at 0.8\n params = X_embedded.ravel()\n\n opt_args = {\n \"it\": 0,\n \"n_iter_check\": self._N_ITER_CHECK,\n \"min_grad_norm\": self.min_grad_norm,\n \"learning_rate\": self.learning_rate,\n \"verbose\": self.verbose,\n \"kwargs\": dict(skip_num_points=skip_num_points),\n \"args\": [P, degrees_of_freedom, n_samples, self.n_components],\n \"n_iter_without_progress\": self._EXPLORATION_N_ITER,\n \"n_iter\": self._EXPLORATION_N_ITER,\n \"momentum\": 0.5,\n }\n if self.method == 'barnes_hut':\n obj_func = _kl_divergence_bh\n opt_args['kwargs']['angle'] = self.angle\n # Repeat verbose argument for _kl_divergence_bh\n opt_args['kwargs']['verbose'] = self.verbose\n # Get the number of threads for gradient computation here to\n # avoid recomputing it at each iteration.\n opt_args['kwargs']['num_threads'] = _openmp_effective_n_threads()\n else:\n obj_func = _kl_divergence\n\n # Learning schedule (part 1): do 250 iteration with lower momentum but\n # higher learning rate controlled via the early exaggeration parameter\n P *= self.early_exaggeration\n self.jump_size = self.jump_size//2\n params, kl_divergence, it = self.jumped_gradient_descent(obj_func, params,\n **opt_args)\n self.jump_size = self.jump_size*2\n if self.verbose:\n print(\"[t-SNE] KL divergence after %d iterations with early \"\n \"exaggeration: %f\" % (it + 1, kl_divergence))\n\n # Learning schedule (part 2): disable early exaggeration and finish\n # optimization with a higher momentum at 0.8\n P /= self.early_exaggeration\n remaining = self.n_iter - self._EXPLORATION_N_ITER\n if it < self._EXPLORATION_N_ITER or remaining > 0:\n opt_args['n_iter'] = self.n_iter\n opt_args['it'] = it + 1\n opt_args['momentum'] = 0.8\n opt_args['n_iter_without_progress'] = self.n_iter_without_progress\n params, kl_divergence, it = self.jumped_gradient_descent(obj_func, params,\n **opt_args)\n\n # Save the final number of iterations\n self.n_iter_ = it\n\n if self.verbose:\n print(\"[t-SNE] KL divergence after %d iterations: %f\"\n % (it + 1, kl_divergence))\n\n X_embedded = params.reshape(n_samples, self.n_components)\n self.kl_divergence_ = kl_divergence\n\n return X_embedded\n\n\n def jumped_gradient_descent(self, obj_func, params, **opt_args):\n it = opt_args['it'] - 1\n\n n_jump_without_progress = opt_args[\"n_iter_without_progress\"] // self.jump_size + 1\n remaining = opt_args['n_iter'] - it\n n_jumps = remaining // self.jump_size + 1\n ct_no_improvement = 0\n kl_divergence_best = None\n \n new_opt_args = opt_args.copy()\n for jump in range(n_jumps):\n new_opt_args['it'] = it + 1\n new_opt_args['n_iter'] = it + self.jump_size\n params, kl_divergence, it = _gradient_descent(obj_func, params,\n **new_opt_args)\n self.X_embedded_jumps.append(params.reshape(-1, self.n_components))\n self.momentum_jumps.append(opt_args['momentum'])\n if jump > 0:\n print(\"\\rJump {}/{}: best_kl={:.6f}\\t current_kl={:.6f}\".format(jump + 1, n_jumps,\n kl_divergence_best,\n kl_divergence), end=\"\", flush=True)\n if kl_divergence_best is None or kl_divergence < kl_divergence_best:\n kl_divergence_best = kl_divergence\n ct_no_improvement = 0\n else:\n ct_no_improvement += 1\n if ct_no_improvement >= n_jump_without_progress:\n break\n print()\n return params, kl_divergence, it"
}
] | 10 |
DiorChoppa/CPiscine
|
https://github.com/DiorChoppa/CPiscine
|
169f6c967495acc6a12394dbd93ed9f759b9e4d3
|
9cef17654ec8643a03687dd6292673ae37a5083c
|
ad391cb7d9f8f15d6bd5fa6c87cf1da32026fcd0
|
refs/heads/main
| 2023-07-15T23:06:47.539688 | 2021-08-31T14:00:54 | 2021-08-31T14:00:54 | 401,705,284 | 3 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.4047267436981201,
"alphanum_fraction": 0.4176514148712158,
"avg_line_length": 22.34482765197754,
"blob_id": "2656d03f5fadb332088844b02e3fae415c8f2acf",
"content_id": "812799db94dfe6b2d52a4bbff24bfae20a041e31",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2708,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 116,
"path": "/Final Project/srcs/solution.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* solution.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: fyusuf-a <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/19 12:24:45 by fyusuf-a #+# #+# */\n/* Updated: 2019/08/21 20:08:58 by fyusuf-a ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"bsq.h\"\n#include \"solution.h\"\n#include \"utilities.h\"\n\nt_bool\t\t\tfind_solution(t_solution *sol, const t_grid *grid)\n{\n\tUINT\t\ti;\n\tUINT\t\tj;\n\tt_solution\tintent;\n\n\ti = 0;\n\tsol->size = 0;\n\tintent.size = 1;\n\twhile (i < grid->h - sol->size)\n\t{\n\t\tj = 0;\n\t\twhile (j < grid->w - sol->size)\n\t\t{\n\t\t\tintent.x = j;\n\t\t\tintent.y = i;\n\t\t\tactualize_sol(sol, &intent, grid);\n\t\t\tj++;\n\t\t}\n\t\ti++;\n\t}\n\treturn (sol->size ? true : false);\n}\n\nstatic void\t\tactualize_sol(t_solution *sol, t_solution *intent,\n\t\t\t\t\tconst t_grid *grid)\n{\n\tif (!fits(intent, grid))\n\t\treturn ;\n\tsol->x = intent->x;\n\tsol->y = intent->y;\n\tsol->size++;\n\tintent->size++;\n\twhile (fits_succ(intent, grid))\n\t{\n\t\tsol->size++;\n\t\tintent->size++;\n\t}\n}\n\nstatic t_bool\tfits(const t_solution *intent, const t_grid *grid)\n{\n\tint i;\n\tint i_max;\n\tint j;\n\tint j_max;\n\n\tif (dont_fit_basic(intent, grid))\n\t\treturn (false);\n\ti = intent->y;\n\ti_max = i + intent->size;\n\tj_max = intent->x + intent->size;\n\twhile (i < i_max)\n\t{\n\t\tj = intent->x;\n\t\twhile (j < j_max)\n\t\t{\n\t\t\tif (grid->map[i][j] == grid->translate[obstacle])\n\t\t\t\treturn (false);\n\t\t\tj++;\n\t\t}\n\t\ti++;\n\t}\n\treturn (true);\n}\n\nstatic t_bool\tfits_succ(const t_solution *intent, const t_grid *grid)\n{\n\tint i;\n\tint j;\n\tint i_max;\n\n\tif (dont_fit_basic(intent, grid))\n\t\treturn (false);\n\ti = intent->x;\n\ti_max = i + intent->size;\n\tj = intent->y + intent->size - 1;\n\twhile (i < i_max)\n\t{\n\t\tif (grid->map[j][i] == grid->translate[obstacle])\n\t\t\treturn (false);\n\t\ti++;\n\t}\n\ti = intent->y;\n\ti_max = i + intent->size - 1;\n\tj = intent->x + intent->size - 1;\n\twhile (i < i_max)\n\t{\n\t\tif (grid->map[i][j] == grid->translate[obstacle])\n\t\t\treturn (false);\n\t\ti++;\n\t}\n\treturn (true);\n}\n\nstatic t_bool\tdont_fit_basic(const t_solution *intent, const t_grid *grid)\n{\n\treturn (intent->x + intent->size > grid->w\n\t\t\t|| intent->y + intent->size > grid->h);\n}\n"
},
{
"alpha_fraction": 0.2521856129169464,
"alphanum_fraction": 0.27841290831565857,
"avg_line_length": 27.056604385375977,
"blob_id": "eb0d2d5fcc7eec5c719d8b380b17f4ec64908fa4",
"content_id": "078d160aa4f20412b86e240ff99d12188a8db720",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1487,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 53,
"path": "/C11/ex03/ft_count_if_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_count_if.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/16 11:59:11 by ecaceres #+# #+# */\n/* Updated: 2019/08/16 11:59:12 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdio.h>\n#include <stdlib.h>\n\nint\tft_count_if(char **tab, int length, int (*f)(char*))\n{\n\tint\tindex;\n\tint\tcount;\n\n\tindex = 0;\n\tcount = 0;\n\twhile (index < length)\n\t{\n\t\tif ((*f)(tab[index]) != 0)\n\t\t\tcount++;\n\t\tindex++;\n\t}\n\treturn (count);\n}\n\nint\tft_is_first_x(char *str)\n{\n\tif (*str == 'X')\n\t\treturn (1);\n\treturn (0);\n}\n\nint\tmain(void)\n{\n\tint\t\tlength;\n\tchar\t**array;\n\n\tlength = 5;\n\tarray = malloc(length * sizeof(char *));\n\tarray[0] = \"O\";\n\tarray[1] = \"X\";\n\tarray[2] = \"O\";\n\tarray[3] = \"X\";\n\tarray[4] = \"O\";\n\tprintf(\"ft_count_if(...) = %d\\n\",\n\t\tft_count_if(array, length, &ft_is_first_x));\n}\n"
},
{
"alpha_fraction": 0.544280469417572,
"alphanum_fraction": 0.5553505420684814,
"avg_line_length": 14.485713958740234,
"blob_id": "87e87619bed5779181fb38f5c8232c64860f4b8d",
"content_id": "8144809b361cb4e70ed25d542179bcbd23f860c5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 542,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 35,
"path": "/Rush00/ex00/rush00.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdbool.h>\n\nvoid\tft_putchar(char c);\n\nvoid\tprint_if(bool condition, char if_true, char if_false)\n{\n\tif (condition)\n\t\tft_putchar(if_true);\n\telse\n\t\tft_putchar(if_false);\n}\n\nvoid\trush(int x, int y)\n{\n\tint\t\tline;\n\tint\t\tcolumn;\n\tbool\tlast_column;\n\n\tline = 0;\n\twhile (line < y)\n\t{\n\t\tcolumn = 0;\n\t\twhile (column < x)\n\t\t{\n\t\t\tlast_column = column == 0 || column == x - 1;\n\t\t\tif (line == 0 || line == y - 1)\n\t\t\t\tprint_if(last_column, 'o', '-');\n\t\t\telse\n\t\t\t\tprint_if(last_column, '|', ' ');\n\t\t\tcolumn++;\n\t\t}\n\t\tft_putchar('\\n');\n\t\tline++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.6035254597663879,
"alphanum_fraction": 0.6168158650398254,
"avg_line_length": 55.28346633911133,
"blob_id": "2e3ce73c2986d10fd3d93f1fd8be9864df6db988",
"content_id": "01a94abefb0f14c7dce766373bcbb231e6e54f38",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 7148,
"license_type": "no_license",
"max_line_length": 197,
"num_lines": 127,
"path": "/C10/ex02/Makefile",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "C_YELLOW_B=\\033[1;33m\nC_LIGHT_RED_B=\\033[0;91m\nC_WHITE=\\033[0;97m\nC_RESET=\\033[0;39m\n\nSRCS\t= ./srcs/ft_abs.c ./srcs/ft_args_parser.c ./srcs/ft_atoi.c ./srcs/ft_console_io.c ./srcs/ft_is.c ./srcs/ft_strncpy.c ./srcs/ft_tail.c ./srcs/ft_tail_out.c ./srcs/ft_tail_utils.c ./srcs/main.c \nOBJS\t= ${SRCS:.c=.o}\nINCS\t= includes\nNAME\t= ft_tail\nCC\t\t= gcc\nRM\t\t= rm -f\nCFLAGS\t= -Wall -Wextra -Werror\n\n.c.o :\n\t${CC} ${CFLAGS} -c $< -o ${<:.c=.o} -I${INCS}\n\n${NAME} : ${OBJS}\n\t${CC} ${CFLAGS} ${OBJS} -o ${NAME}\n\nall : ${NAME}\n\nclean :\n\t${RM} ${OBJS}\n\nfclean : clean\n\t${RM} ${NAME}\n\nnorm :\n\tnorminette -R CheckForbiddenSourceHeader */*.[ch]\n\nre : fclean all\n\ntest : re\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)Single file: ./srcs/ft_abs.c$(C_RESET)\" ;\n\t@tail ./srcs/ft_abs.c > result-off.txt ;\n\t@./ft_tail ./srcs/ft_abs.c > result-usr.txt ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)Multiple file: ./srcs/ft_abs.c ./srcs/ft_abs.c$(C_RESET)\" ;\n\t@tail ./srcs/ft_abs.c ./srcs/ft_abs.c > result-off.txt ;\n\t@./ft_tail ./srcs/ft_abs.c ./srcs/ft_abs.c > result-usr.txt ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)From STDIN: x < ./Makefile$(C_RESET)\" ;\n\t@tail < ./Makefile > result-off.txt ;\n\t@./ft_tail < ./Makefile > result-usr.txt ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)From STDIN: x < ./srcs/ft_abs.c$(C_RESET)\" ;\n\t@tail < ./srcs/ft_abs.c > result-off.txt ;\n\t@./ft_tail < ./srcs/ft_abs.c > result-usr.txt ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)From file and from STDIN (priority test): x ./srcs/ft_abs.c < ./srcs/ft_abs.c$(C_RESET)\" ;\n\t@tail ./srcs/ft_abs.c < ./srcs/ft_abs.c > result-off.txt ;\n\t@./ft_tail ./srcs/ft_abs.c < ./srcs/ft_abs.c > result-usr.txt ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)Multiple file with -c 25: ./srcs/ft_abs.c ./srcs/ft_abs.c$(C_RESET)\" ;\n\t@tail -c 25 ./srcs/ft_abs.c ./srcs/ft_abs.c > result-off.txt ;\n\t@./ft_tail -c 25 ./srcs/ft_abs.c ./srcs/ft_abs.c > result-usr.txt ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)With directory: ./includes/$(C_RESET)\" ;\n\t@tail ./includes/ > result-off.txt ;\n\t@./ft_tail ./includes/ > result-usr.txt ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)On 2 directory: ./srcs/ ./srcs/$(C_RESET)\" ;\n\t@tail ./srcs/ ./srcs/ > result-off.txt || true ;\n\t@./ft_tail ./srcs/ ./srcs/ > result-usr.txt || true ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)On a random binary file: ./tests/random.bin$(C_RESET)\" ;\n\t@tail ./tests/random.bin > result-off.txt || true ;\n\t@./ft_tail ./tests/random.bin > result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)On a random binary file: ./tests/random2.bin$(C_RESET)\" ;\n\t@tail ./tests/random2.bin > result-off.txt || true ;\n\t@./ft_tail ./tests/random2.bin > result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)On a random binary file with -c 200: ./tests/random.bin$(C_RESET)\" ;\n\t@tail -c 200 ./tests/random.bin > result-off.txt || true ;\n\t@./ft_tail -c 200 ./tests/random.bin > result-usr.txt || true ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)On a random binary file with -c 200: ./tests/random.bin2$(C_RESET)\" ;\n\t@tail -c 200 ./tests/random2.bin > result-off.txt || true ;\n\t@./ft_tail -c 200 ./tests/random2.bin > result-usr.txt || true ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)On a empty file: ./tests/empty$(C_RESET)\" ;\n\t@touch ./tests/empty\n\t@tail ./tests/empty > out 2>result-off.txt || true ;\n\t@./ft_tail ./tests/empty > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)On a empty file with STDIN: ./tests/empty$(C_RESET)\" ;\n\t@tail < ./tests/empty > out 2>result-off.txt || true ;\n\t@./ft_tail < ./tests/empty > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)Error -> double -c: -c 25 -c 26$(C_RESET)\" ;\n\t@tail -c 25 -c 26 > out 2>result-off.txt || true ;\n\t@./ft_tail -c 25 -c 26 > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)Error -> double -c but not completed: -c 25$(C_RESET)\" ;\n\t@tail -c 25 -c > out 2>result-off.txt || true ;\n\t@./ft_tail -c 25 -c > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)Error -> illegal -c value: -c 25a$(C_RESET)\" ;\n\t@tail -c 25a > out 2>result-off.txt || true ;\n\t@./ft_tail -c 25a > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)Multiple file with invalid: ./srcs/ft_abs.c a ./srcs/ft_abs.c$(C_RESET)\" ;\n\t@tail -c 25 ./srcs/ft_abs.c a ./srcs/ft_abs.c > out 2>result-off.txt || true ;\n\t@./ft_tail -c 25 ./srcs/ft_abs.c a ./srcs/ft_abs.c > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index --text result-usr.txt result-off.txt || true ;\n\t@##\n\t@rm result-off.txt result-usr.txt out ./tests/empty ;\n\n.PHONY: all clean fclean re .c.o test\n"
},
{
"alpha_fraction": 0.6248989701271057,
"alphanum_fraction": 0.6329830288887024,
"avg_line_length": 16.671428680419922,
"blob_id": "61ad7ab7ceab4cc01e3ec782a8ef3b887897cced",
"content_id": "b324d05087093ea8f69a8dae8d22f3ff629d2f1b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1237,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 70,
"path": "/C10/ex01/srcs/cat.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <libgen.h>\n#include <fcntl.h>\n#include <unistd.h>\n#include <string.h>\n#include <errno.h>\n#include <stdbool.h>\n\n#include \"cat.h\"\n\nvoid\tshow_error(char *executable, char *path)\n{\n\tif (errno == 0)\n\t\treturn ;\n\tstr_write(ERR, basename(executable));\n\tstr_write(ERR, \": \");\n\tstr_write(ERR, path);\n\tstr_write(ERR, \": \");\n\tstr_write(ERR, strerror(errno));\n\tstr_write(ERR, \"\\n\");\n\terrno = 0;\n}\n\nvoid\tdo_cat(char *executable, char *path, int file_descriptor)\n{\n\tunsigned int\tbyte_read;\n\tchar\t\t\tbuffer[BUFFER_SIZE];\n\n\twhile ((byte_read = read(file_descriptor, buffer, BUFFER_SIZE)) > 0)\n\t{\n\t\tif (errno != 0)\n\t\t{\n\t\t\tshow_error(executable, path);\n\t\t\tbreak ;\n\t\t}\n\t\twrite(OUT, &buffer[0], byte_read);\n\t}\n\tif (file_descriptor > 2)\n\t\tclose(file_descriptor);\n}\n\nvoid\tstdin_loop(void)\n{\n\tdo_cat(NULL, NULL, 0);\n}\n\nbool\tcat(char **paths, int count, int offset)\n{\n\tbool\tresult;\n\tint\t\tfile_descriptor;\n\tint\t\tindex;\n\tchar\t*path;\n\n\tresult = true;\n\tindex = offset;\n\twhile (index < count + offset)\n\t{\n\t\tpath = paths[index];\n\t\tfile_descriptor = open(path, O_RDONLY);\n\t\tif (file_descriptor < 0)\n\t\t{\n\t\t\tshow_error(paths[0], path);\n\t\t\tresult = false;\n\t\t}\n\t\telse\n\t\t\tdo_cat(paths[0], path, file_descriptor);\n\t\tindex++;\n\t}\n\treturn (result);\n}\n"
},
{
"alpha_fraction": 0.2834506928920746,
"alphanum_fraction": 0.31690141558647156,
"avg_line_length": 24.432836532592773,
"blob_id": "59aeaa1ff5c414e1fef9774df25c193d2dd71f23",
"content_id": "7baa52764f073e6fdc9bf3c79f4e8cacf53a7d19",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1704,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 67,
"path": "/C05/ex06/ft_is_prime_dev_mine_only.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_is_prime.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/06 13:50:09 by ecaceres #+# #+# */\n/* Updated: 2019/08/06 13:50:11 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdio.h>\n\nint\tft_sqrt(int nb)\n{\n\tunsigned int\t\tsqrt;\n\tunsigned int\t\tindex;\n\n\tif (nb < 0)\n\t\treturn (0);\n\tif (nb <= 1)\n\t\treturn (nb);\n\tindex = 0;\n\twhile ((sqrt = index * index) <= (unsigned int)nb)\n\t\tindex++;\n\tindex -= 1;\n\treturn (index);\n}\n\nint\tft_is_prime(int nb)\n{\n\tint\tindex;\n\tint\tsqrt;\n\n\tif (nb <= 1)\n\t\treturn (0);\n\tif (nb <= 3)\n\t\treturn (1);\n\tif (nb % 2 == 0 || nb % 3 == 0)\n\t\treturn (0);\n\tindex = 2;\n\tsqrt = ft_sqrt(nb);\n\twhile ((index <= sqrt) && (nb % index != 0))\n\t\tindex++;\n\treturn (index > sqrt);\n}\n\nint\tmain(void)\n{\n\tint\tnumber;\n\tint\tresult;\n\tint\tcount;\n\n\tnumber = 0;\n\tcount = 0;\n\twhile (number <= 50000)\n\t{\n\t\tresult = ft_is_prime(number);\n\t\tprintf(\"is_prime(%d) = %d\\n\", number, result);\n\t\tnumber++;\n\t\tif (result)\n\t\t\tcount++;\n\t}\n\tprintf(\"first 50'000 number, %d prime\\n\", count);\n\treturn (0);\n}\n"
},
{
"alpha_fraction": 0.3333333432674408,
"alphanum_fraction": 0.3333333432674408,
"avg_line_length": 32,
"blob_id": "33e950dc548bf9421967988e1cd0c526726c1cd2",
"content_id": "96ceb8de8fb015b9e729a6621742a73f598e83be",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 33,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 1,
"path": "/Shell01/ex05/command.sh",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "touch \"\\\"\\\\?\\$*'MaRViN'*\\$?\\\\\\\"\"\n"
},
{
"alpha_fraction": 0.637499988079071,
"alphanum_fraction": 0.643750011920929,
"avg_line_length": 19,
"blob_id": "47affa6da0373b4c2e6b1bc75a9a24483e6e3a5c",
"content_id": "42b541154551b6454b4b4425fe16ece8b658ea96",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 160,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 8,
"path": "/C12/ex03/ft_list_last.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include \"ft_list.h\"\n\nt_list\t*ft_list_last(t_list *begin_list)\n{\n\tif (begin_list->next == 0)\n\t\treturn (begin_list);\n\treturn (ft_list_last(begin_list->next));\n}\n"
},
{
"alpha_fraction": 0.3608984053134918,
"alphanum_fraction": 0.3808065354824066,
"avg_line_length": 25.472972869873047,
"blob_id": "55c52113476e205e5c6c9c1e8bd065237f575d83",
"content_id": "8fb40bf070eb5d1dc1bb99e0f09173363686762d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1959,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 74,
"path": "/Rush01/ex00/bruteforce_search.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* bruteforce_search.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/10 19:30:27 by ecaceres #+# #+# */\n/* Updated: 2019/08/10 19:30:29 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"boolean.h\"\n#include \"constraint.h\"\n#include \"checker.h\"\n#include \"maths.h\"\n\nt_bool\tfill_grid_row(int *row, int size, int offset)\n{\n\tint\tindex;\n\tint\tjndex;\n\tint\tpower;\n\n\tindex = 0;\n\tpower = 1;\n\twhile (index < size)\n\t{\n\t\trow[size - 1 - index] = (offset / power) % size + 1;\n\t\tpower *= size;\n\t\tindex++;\n\t}\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tjndex = index + 1;\n\t\twhile (jndex < size)\n\t\t{\n\t\t\tif (row[index] == row[jndex])\n\t\t\t\treturn (false);\n\t\t\tjndex++;\n\t\t}\n\t\tindex++;\n\t}\n\treturn (true);\n}\n\nt_bool\trecursive_search(int **grid, t_constr cnstr[4], int comb, int row)\n{\n\tint\tsize;\n\tint\toffset;\n\n\tsize = cnstr[UP].size;\n\tif (row >= size)\n\t\treturn (check_grid_validity(grid, size, cnstr));\n\toffset = 0;\n\twhile (offset < comb)\n\t{\n\t\tif (fill_grid_row(grid[row], size, offset))\n\t\t{\n\t\t\tif (recursive_search(grid, cnstr, comb, row + 1))\n\t\t\t\treturn (true);\n\t\t}\n\t\toffset++;\n\t}\n\treturn (false);\n}\n\nt_bool\tsearch_solution(int **grid, int size, t_constr cnstr[4])\n{\n\tint\tcomb_count;\n\n\tcomb_count = ft_power(size, size);\n\treturn (recursive_search(grid, cnstr, comb_count, 0));\n}\n"
},
{
"alpha_fraction": 0.6666666865348816,
"alphanum_fraction": 0.677570104598999,
"avg_line_length": 19.70967674255371,
"blob_id": "25909aa7d40274c5a6831f421f8f5d910623f0b3",
"content_id": "dfec2b9d78d0f9f5c21daf1f876bbc737182c881",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 642,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 31,
"path": "/C10/ex03/srcs/ft_hexdump.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdbool.h>\n#include <errno.h>\n\n#include \"ft_args_parser.h\"\n#include \"ft_console_io.h\"\n#include \"ft_file_utils.h\"\n#include \"ft_dumper.h\"\n\nbool\tft_process_input(t_options *options, int count)\n{\n\tunsigned int\tlength;\n\tchar\t\t\t*content;\n\n\tlength = 0;\n\tif (count == -1)\n\t\tcontent = ft_read_full(IN, &length);\n\telse\n\t\tcontent = ft_read_multiple(options, count, &length, count);\n\tft_dump(options, content, length, length / 16 + 1);\n\treturn (errno == 0);\n}\n\nbool\tft_stdin_hexdump(t_options *options)\n{\n\treturn (ft_process_input(options, -1));\n}\n\nbool\tft_hexdump(t_options *options, int count)\n{\n\treturn (ft_process_input(options, count));\n}\n"
},
{
"alpha_fraction": 0.6746987700462341,
"alphanum_fraction": 0.6746987700462341,
"avg_line_length": 12.833333015441895,
"blob_id": "1f9d2065b4c68160763a42f4a11bf155ac2987f7",
"content_id": "cd11fd154b835cab3e654402fffcdd32124ccaa1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 166,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 12,
"path": "/C10/ex02/includes/ft_is.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_IS_H\n# define FT_IS_H\n\n# include <stdbool.h>\n\nbool\tft_is_in_string(char c, char *str);\n\nbool\tft_is_whitespace(char c);\n\nbool\tft_is_number(char c);\n\n#endif\n"
},
{
"alpha_fraction": 0.37025171518325806,
"alphanum_fraction": 0.38855835795402527,
"avg_line_length": 26.3125,
"blob_id": "78e9f0d689d90586983c0991fa993a0d7b4f5b80",
"content_id": "2c1653955e47f62790fbe008ae3f0b3326f7ce5e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2185,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 80,
"path": "/C08/ex04/ft_strs_to_tab_dev2.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_strs_to_tab.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/11 18:01:29 by ecaceres #+# #+# */\n/* Updated: 2019/08/11 19:47:15 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n#include <stdio.h>\n\n#include \"ft_stock_str.h\"\n\nint\t\t\t\t\tft_str_length(char *str)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (str[index])\n\t\tindex++;\n\treturn (index);\n}\n\nchar\t\t\t\t*ft_strdup(char *src)\n{\n\tint\t\tindex;\n\tchar\t*dest;\n\n\tindex = 0;\n\tif ((dest = (char *)malloc(ft_str_length(src) * sizeof(char) + 1)) == NULL)\n\t\treturn (0);\n\twhile (src[index])\n\t{\n\t\tdest[index] = src[index];\n\t\tindex++;\n\t}\n\tdest[index] = '\\0';\n\treturn (dest);\n}\n\nstruct s_stock_str\t*ft_strs_to_tab(int ac, char **av)\n{\n\tint\t\t\t\t\tindex;\n\tstruct s_stock_str\t*array;\n\n\tac = ac + 0;\n\tif (!(array = malloc((ac + 1) * sizeof(struct s_stock_str))))\n\t\treturn (NULL);\n\tindex = 0;\n\twhile (index < ac)\n\t{\n\t\tarray[index].size = ft_str_length(av[index]);\n\t\tarray[index].str = av[index];\n\t\tarray[index].copy = ft_strdup(av[index]);\n\t\tindex++;\n\t}\n\tarray[index] = (struct s_stock_str){0, 0, 0};\n\treturn (array);\n}\n\nint\t\t\t\t\tmain(int argc, char **argv)\n{\n\tint\t\t\t\t\tindex;\n\tstruct s_stock_str\t*structs;\n\n\tstructs = ft_strs_to_tab(argc, argv);\n\tindex = 0;\n\twhile (index < argc)\n\t{\n\t\tprintf(\"%d\\n\", index);\n\t\tprintf(\"\\t| original : $%s$ @ %p\\n\", structs[index].str, structs[index].str);\n\t\tprintf(\"\\t| copied : $%s$ @ %p\\n\", structs[index].copy, structs[index].copy);\n\t\tprintf(\"\\t| size : %d\\n\", structs[index].size);\n\t\tindex++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.5080128312110901,
"alphanum_fraction": 0.5336538553237915,
"avg_line_length": 12.866666793823242,
"blob_id": "356a52de4cfba5e7ceb932f7bd13d92e2eaa6b8f",
"content_id": "0562f8d590b754cc8732c3fc2136ed29ea5edd93",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 624,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 45,
"path": "/C05/ex07/ft_find_next_prime.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdbool.h>\n\nint\tft_sqrt(int nb)\n{\n\tunsigned int\t\tsqrt;\n\tunsigned int\t\tindex;\n\n\tif (nb < 0)\n\t\treturn (0);\n\tif (nb <= 1)\n\t\treturn (nb);\n\tindex = 0;\n\twhile ((sqrt = index * index) <= (unsigned int)nb)\n\t\tindex++;\n\tindex -= 1;\n\treturn (index);\n}\n\nint\tft_is_prime(int nb)\n{\n\tint\tindex;\n\tint\tsqrt;\n\n\tif (nb <= 1)\n\t\treturn (0);\n\tif (nb <= 3)\n\t\treturn (1);\n\tif (nb % 2 == 0 || nb % 3 == 0)\n\t\treturn (0);\n\tindex = 2;\n\tsqrt = ft_sqrt(nb);\n\twhile ((index <= sqrt) && (nb % index != 0))\n\t\tindex++;\n\treturn (index > sqrt);\n}\n\nint\tft_find_next_prime(int nb)\n{\n\twhile (true)\n\t{\n\t\tif (ft_is_prime(nb))\n\t\t\treturn (nb);\n\t\tnb++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.28512680530548096,
"alphanum_fraction": 0.3077450394630432,
"avg_line_length": 32.930233001708984,
"blob_id": "63f9f1d2124ad17c6b54fe02f26f8b499203fc69",
"content_id": "209e3c9c5b3a6819a6401d18f1f5ca3cd3e23f46",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1459,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 43,
"path": "/C12/ex03/ft_list_last_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_list_last.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/22 14:39:19 by ecaceres #+# #+# */\n/* Updated: 2019/08/22 14:39:19 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"ft_list.h\"\n\n#include <stdio.h>\n#include <stdlib.h>\n\nt_list\t*ft_list_last(t_list *begin_list)\n{\n\tif (begin_list->next == 0)\n\t\treturn (begin_list);\n\treturn (ft_list_last(begin_list->next));\n}\n\nint\t\tmain(void)\n{\n\tint\t\tindex;\n\tint\t\t*malloced_index;\n\tt_list\t*list;\n\tt_list\t*last;\n\n\tlist = ft_create_elem(0);\n\tindex = 0;\n\twhile (index < 10)\n\t{\n\t\tmalloced_index = malloc(sizeof(int));\n\t\t*malloced_index = index;\n\t\tft_list_push_front(&list, (void *)malloced_index);\n\t\tindex++;\n\t}\n\tlast = ft_list_last(list);\n\tprintf(\"last malloced index: %d\\n\", *((int *)list->data));\n}\n"
},
{
"alpha_fraction": 0.30676329135894775,
"alphanum_fraction": 0.3254830837249756,
"avg_line_length": 29.10909080505371,
"blob_id": "5c1911b093a395a1fa87e36e67926187c502cbd8",
"content_id": "0243c6b8ce6192825919995727929e00a3471243",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1656,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 55,
"path": "/C03/ex04/ft_strstr_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_strstr.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/04 14:45:27 by ecaceres #+# #+# */\n/* Updated: 2019/08/04 14:45:29 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n#include <stdio.h>\n#include <string.h>\n\nchar\t*ft_strstr(char *str, char *to_find)\n{\n\tchar *haystack;\n\tchar *needle;\n\n\tif (*to_find == '\\0')\n\t\treturn (str);\n\thaystack = str;\n\tneedle = to_find;\n\twhile (true)\n\t{\n\t\tif (*needle == '\\0')\n\t\t\treturn ((char *)(haystack - (needle - to_find)));\n\t\tif (*haystack == *needle)\n\t\t\tneedle++;\n\t\telse\n\t\t\tneedle = to_find;\n\t\tif (*haystack == '\\0')\n\t\t\tbreak ;\n\t\thaystack++;\n\t}\n\treturn (NULL);\n}\n\nint\t\tmain(void)\n{\n\tchar *haystack;\n\tchar *needle;\n\tchar *result_c;\n\tchar *result_ft;\n\n\thaystack = \"Foo Bar Baz\";\n\tneedle = \"Bar\";\n\tresult_c = strstr(haystack, needle);\n\tresult_ft = ft_strstr(haystack, needle);\n\tprintf(\"%p / %p\\n\", result_c, result_ft);\n\tprintf(\"c : %s$\\n\", result_c);\n\tprintf(\"ft : %s$\\n\", result_ft);\n}\n"
},
{
"alpha_fraction": 0.6178861856460571,
"alphanum_fraction": 0.6219512224197388,
"avg_line_length": 15.399999618530273,
"blob_id": "2e68820a31782116ee02703afae2fa70a7f6d59e",
"content_id": "ee26e753dc145fccce4d379d010c767ce639956f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 246,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 15,
"path": "/C10/ex03/srcs/ft_equal_buffer.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdbool.h>\n\nbool\tft_is_equal(char *a, char *b, unsigned int size)\n{\n\tunsigned int\tindex;\n\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tif ((unsigned char)a[index] != (unsigned char)b[index])\n\t\t\treturn (false);\n\t\tindex++;\n\t}\n\treturn (true);\n}\n"
},
{
"alpha_fraction": 0.3973607122898102,
"alphanum_fraction": 0.4178885519504547,
"avg_line_length": 30.96875,
"blob_id": "10a600aeba03487ad3c0a12153f45b0f545216bf",
"content_id": "065b49b828081d029a0225928273d3da8fb6ba70",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2046,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 64,
"path": "/Final Project/includes/bsq.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* bsq.h :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: fyusuf-a <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/19 11:52:57 by fyusuf-a #+# #+# */\n/* Updated: 2019/08/20 15:48:59 by fyusuf-a ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#ifndef BSQ_H\n# define BSQ_H\n\n# define SIZE_1B 1\n# define SIZE_3M 3000000\n\n# define SIZE_MAP 100\n\n# include <stdlib.h>\n# include <fcntl.h>\n# include <unistd.h>\n# include <stdio.h>\n# include \"utilities.h\"\n# include \"ft_bool.h\"\n\ntypedef struct\ts_grid {\n\tUINT\t\t\tw;\n\tUINT\t\t\th;\n\tchar\t\t\t*source;\n\tunsigned char\t**map;\n\tunsigned char\ttranslate[3];\n}\t\t\t\tt_grid;\n\ntypedef struct\ts_solution {\n\tUINT\tsize;\n\tUINT\tx;\n\tUINT\ty;\n}\t\t\t\tt_solution;\n\ntypedef\tenum {\n\tempty,\n\tobstacle,\n\tfill,\n}\tt_block;\n\nt_bool\t\t\tft_load_grid(char *path, t_grid *grid);\nvoid\t\t\tft_free_grid(t_grid *grid);\nvoid\t\t\tft_process_grid(t_grid *grid, t_solution *sol);\n\nt_bool\t\t\tft_parse_header_line(t_grid *grid, char *line, UINT length);\nt_bool\t\t\tft_parse_normal_line(t_grid *grid, char *line, UINT length);\nt_bool\t\t\tft_read_full(int fd, char **file_content, UINT *total);\nt_bool\t\t\tft_has_width_changed(t_grid *grid, UINT *curr_w);\nt_bool\t\t\tft_process_lines(t_grid *grid, UINT index,\n\t\t\t\t\t\t\tchar *file_content, UINT total);\nt_bool\t\t\tft_parse_grid(int fd, t_grid *grid);\n\nt_bool\t\t\tfind_solution(t_solution *sol, const t_grid *grid);\n\nvoid\t\t\tft_print_solution(t_grid *grid, t_solution *solution);\n\n#endif\n"
},
{
"alpha_fraction": 0.6536964774131775,
"alphanum_fraction": 0.6634241342544556,
"avg_line_length": 19.559999465942383,
"blob_id": "ad38c5d9e7d8abff7afb0b97d321c35835beb4a0",
"content_id": "2cf8df0d12512fdacaad8814b8a54d706b298206",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 514,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 25,
"path": "/C10/ex00/srcs/display_file.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdbool.h>\n#include <unistd.h>\n#include <stdio.h>\n#include <fcntl.h>\n\n#include \"display_file.h\"\n\nbool\tdisplay_file(char *path)\n{\n\tint\t\t\t\tfile_descriptor;\n\tunsigned int\tbyte_read;\n\tchar\t\t\tbuffer[BUFFER_SIZE];\n\n\tfile_descriptor = open(path, O_RDONLY);\n\tif (file_descriptor < 0)\n\t\treturn (false);\n\twhile ((byte_read = read(file_descriptor, buffer, BUFFER_SIZE)) > 0)\n\t{\n\t\tif (byte_read == (unsigned int)-1)\n\t\t\treturn (false);\n\t\twrite(1, &buffer[0], byte_read);\n\t}\n\tclose(file_descriptor);\n\treturn (true);\n}\n"
},
{
"alpha_fraction": 0.3020022213459015,
"alphanum_fraction": 0.32258063554763794,
"avg_line_length": 24.323944091796875,
"blob_id": "db597441eb43ce218fd6a77763a76aabde488ead",
"content_id": "e93223025e870a7133f55bed4b28fa814a332bd6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1798,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 71,
"path": "/C07/ex04/dev/ft_convert_base2.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_convert_base2.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/08 13:30:32 by ecaceres #+# #+# */\n/* Updated: 2019/08/08 13:30:35 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n\nint\t\tft_str_length(char *str)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (str[index])\n\t\tindex++;\n\treturn (index);\n}\n\nbool\tis_space(char c)\n{\n\treturn (c == '\\t' || c == '\\n' || c == '\\v' || c == '\\f'\n\t\t|| c == '\\r' || c == ' ');\n}\n\nbool\tis_base_valid(char *str)\n{\n\tchar\t*curr;\n\tint\t\tindex;\n\tint\t\tjndex;\n\n\tcurr = str;\n\tif (str == 0 || ft_str_length(str) <= 1)\n\t\treturn (false);\n\twhile (*curr)\n\t{\n\t\tif (is_space(*curr) || *curr == '+' || *curr == '-')\n\t\t\treturn (false);\n\t\tcurr++;\n\t}\n\tindex = 0;\n\twhile (index < curr - str)\n\t{\n\t\tjndex = index + 1;\n\t\twhile (jndex < curr - str)\n\t\t\tif (str[index] == str[jndex++])\n\t\t\t\treturn (false);\n\t\tindex++;\n\t}\n\treturn (true);\n}\n\nint\t\tcompute_number_length(unsigned int number, int radix, bool negative)\n{\n\tunsigned int\tlength;\n\n\tlength = negative ? 1 : 0;\n\twhile (true)\n\t{\n\t\tlength++;\n\t\tif (number / radix == 0)\n\t\t\tbreak ;\n\t\tnumber /= radix;\n\t}\n\treturn (length);\n}\n"
},
{
"alpha_fraction": 0.7157360315322876,
"alphanum_fraction": 0.7157360315322876,
"avg_line_length": 15.416666984558105,
"blob_id": "7cac6948f405f52a844e990ab8a715216b689e6a",
"content_id": "4e3d4b91a66ed5e6fcc4e7ec88d222b72ab3977c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 197,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 12,
"path": "/C10/ex03/includes/ft_hexdump.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_HEXDUMP_H\n# define FT_HEXDUMP_H\n\n# include <stdbool.h>\n\n# include \"ft_args_parser.h\"\n\nbool\tft_stdin_hexdump(t_options *options);\n\nbool\tft_hexdump(t_options *options, int count);\n\n#endif\n"
},
{
"alpha_fraction": 0.3162633776664734,
"alphanum_fraction": 0.35171636939048767,
"avg_line_length": 23.68055534362793,
"blob_id": "c762e5309978aaba564043cb8ae21297ca8c77ed",
"content_id": "9567118efe75e8fb54e361e346db799b4478ef4a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1777,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 72,
"path": "/C02/ex11/ft_putstr_non_printable_dev_bin.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_putstr_non_printable.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/02 13:43:53 by ecaceres #+# #+# */\n/* Updated: 2019/08/03 21:04:31 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n#include <unistd.h>\n#include <stdlib.h>\n#include <stdio.h>\n\nvoid\tft_putchar(char c)\n{\n\twrite(1, &c, 1);\n}\n\nbool\tis_char_printable(char c)\n{\n\treturn (c >= ' ' && c != 127);\n}\n\nvoid\tft_putstr_non_printable(char *str)\n{\n\tint\t\t\t\tindex;\n\tunsigned char\tcurrent;\n\n\tindex = 0;\n\twhile (true)\n\t{\n\t\tcurrent = str[index];\n\t\tif (current == '\\0')\n\t\t\tbreak ;\n\t\tif (is_char_printable(current))\n\t\t\tft_putchar(current);\n\t\telse\n\t\t{\n\t\t\tft_putchar('\\\\');\n\t\t\tft_putchar(\"0123456789abcdef\"[current / 16]);\n\t\t\tft_putchar(\"0123456789abcdef\"[current % 16]);\n\t\t}\n\t\tindex++;\n\t}\n}\n\nint\t\tmain(void)\n{\n\tchar\t*buffer;\n\tlong\tlength;\n\tFILE\t*f;\n\n\tbuffer = 0;\n\tf = fopen(\"blah.bin\", \"rb\");\n\tif (f)\n\t{\n\t\tfseek(f, 0, SEEK_END);\n\t\tlength = ftell(f);\n\t\tfseek(f, 0, SEEK_SET);\n\t\tbuffer = malloc(length);\n\t\tif (buffer)\n\t\t{\n\t\t\tfread(buffer, 1, length, f);\n\t\t}\n\t\tfclose(f);\n\t}\n\tft_putstr_non_printable(buffer);\n}\n"
},
{
"alpha_fraction": 0.4107142984867096,
"alphanum_fraction": 0.4293650686740875,
"avg_line_length": 24.714284896850586,
"blob_id": "ca9502930babdf320709d38d30aabae5792692b3",
"content_id": "b45c5829317de86db8b7f5d8bd4252aedb391942",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2520,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 98,
"path": "/Rush02/ex00/srcs/ft_number_dictionary_sort.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_number_dictionary_sort.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/18 14:10:54 by ecaceres #+# #+# */\n/* Updated: 2019/08/18 14:10:54 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"ft_number_dictionary.h\"\n#include \"ft_boolean.h\"\n#include \"ft_short_types.h\"\n\nvoid\tft_swap_dictionary_entry(t_dict_entry *a, t_dict_entry *b)\n{\n\tt_dict_entry\tc;\n\n\tc = *a;\n\t*a = *b;\n\t*b = c;\n}\n\nvoid\tft_update_normal_flag(t_dict *dict)\n{\n\tint\t\t\t\tindex;\n\tt_dict_entry\t*entry;\n\tt_bool\t\t\tnormal;\n\n\tindex = 0;\n\twhile (index < dict->size)\n\t{\n\t\tentry = &dict->entries[index];\n\t\tnormal = false;\n\t\tif (entry->value <= 20)\n\t\t\tnormal = true;\n\t\tif (entry->value > 20 || entry->value < 100)\n\t\t\tif (entry->value % 10 == 0)\n\t\t\t\tnormal = true;\n\t\tentry->normal = normal;\n\t\tindex++;\n\t}\n}\n\nvoid\tft_do_sort_dict(t_dict *dict, int start, int end,\n\t\t\t\t\t\tint (*func)(t_dict_entry*, t_dict_entry*))\n{\n\tint\t\t\t\tindex;\n\tint\t\t\t\tjndex;\n\tt_bool\t\t\tswapped;\n\tt_dict_entry\t*entries;\n\n\tentries = dict->entries;\n\tindex = start;\n\twhile (index < end)\n\t{\n\t\tjndex = start;\n\t\tswapped = false;\n\t\twhile (jndex < end - index)\n\t\t{\n\t\t\tif ((*func)(&entries[jndex], &entries[jndex + 1]))\n\t\t\t{\n\t\t\t\tft_swap_dictionary_entry(&entries[jndex], &entries[jndex + 1]);\n\t\t\t\tswapped = true;\n\t\t\t}\n\t\t\tjndex++;\n\t\t}\n\t\tindex++;\n\t\tif (!swapped)\n\t\t\tbreak ;\n\t}\n}\n\nvoid\tft_sort_dictionary(t_dict *dict)\n{\n\tint\t\tindex;\n\tint\t\tindex_of_zero;\n\n\tft_update_normal_flag(dict);\n\tft_do_sort_dict(dict, 0, dict->size - 1, &ft_sort_dict_f_normal);\n\tindex = 0;\n\tindex_of_zero = INVALID;\n\twhile (index < dict->size)\n\t{\n\t\tif (dict->entries[index].value == 0)\n\t\t{\n\t\t\tindex_of_zero = index;\n\t\t\tbreak ;\n\t\t}\n\t\tindex++;\n\t}\n\tif (index_of_zero == INVALID)\n\t\treturn ;\n\tft_do_sort_dict(dict, index_of_zero, dict->size - 1, &ft_sort_dict_f_value);\n\tft_do_sort_dict(dict, 0, index_of_zero, &ft_sort_dict_f_value);\n}\n"
},
{
"alpha_fraction": 0.2884250581264496,
"alphanum_fraction": 0.311195433139801,
"avg_line_length": 26.736841201782227,
"blob_id": "c1524b96a547c06f75daac31e2e1713efd7d7030",
"content_id": "51d565ce9917db8afb4e1fc8b9a65f759f3354dc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1581,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 57,
"path": "/Rush02/ex00/srcs/ft_str_duplicate.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_str_duplicate.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/17 12:11:44 by ecaceres #+# #+# */\n/* Updated: 2019/08/17 12:11:44 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n\n#include \"ft_str.h\"\n\nchar\t*ft_str_duplicate(char *src)\n{\n\tint\t\tlength;\n\tint\t\tindex;\n\tchar\t*dest;\n\n\tlength = ft_str_length(src);\n\tindex = 0;\n\tif ((dest = (char *)malloc((length + 1) * sizeof(char))) == NULL)\n\t\treturn (0);\n\twhile (src[index])\n\t{\n\t\tdest[index] = src[index];\n\t\tindex++;\n\t}\n\tdest[index] = '\\0';\n\treturn (dest);\n}\n\nchar\t*ft_str_n_duplicate(char *str, int n)\n{\n\tint\t\tindex;\n\tint\t\tlength;\n\tchar\t*dup;\n\n\tlength = 0;\n\twhile (str[length])\n\t\tlength++;\n\tif (length > n)\n\t\tlength = n;\n\tif (!(dup = malloc((length + 1) * sizeof(char))))\n\t\treturn (NULL);\n\tindex = 0;\n\twhile (index < length)\n\t{\n\t\tdup[index] = str[index];\n\t\tindex++;\n\t}\n\tdup[index] = '\\0';\n\treturn (dup);\n}\n"
},
{
"alpha_fraction": 0.25070226192474365,
"alphanum_fraction": 0.2872191071510315,
"avg_line_length": 29.95652198791504,
"blob_id": "2ac73ac505e00fe2d1b9a979efbe2221157df67d",
"content_id": "2fd892491f62737d20d04070df8fb569cc292da0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1424,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 46,
"path": "/C11/ex00/ft_foreach_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_foreach.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/16 11:11:48 by ecaceres #+# #+# */\n/* Updated: 2019/08/16 11:11:48 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n#include <unistd.h>\n\nvoid\tx_ft_put_number(int number)\n{\n\tif (number > 9)\n\t\tx_ft_put_number(number / 10);\n\twrite(1, &\"0123456789\"[number % 10], 1);\n}\n\nvoid\tft_foreach(int *tab, int length, void (*f)(int))\n{\n\tif (length <= 0)\n\t\treturn ;\n\t(*f)(tab[0]);\n\tft_foreach(tab + 1, length - 1, f);\n}\n\nint\t\tmain(void)\n{\n\tint\tindex;\n\tint\t*tab;\n\tint\tlength;\n\n\tlength = 10;\n\ttab = malloc(length * sizeof(int));\n\tindex = 0;\n\twhile (index < length)\n\t{\n\t\ttab[index] = index;\n\t\tindex++;\n\t}\n\tft_foreach(tab, length, &x_ft_put_number);\n}\n"
},
{
"alpha_fraction": 0.5894206762313843,
"alphanum_fraction": 0.5919395685195923,
"avg_line_length": 10.027777671813965,
"blob_id": "9f0e807f38bb51f4bd25f77b21f0625c37df858e",
"content_id": "ef3553ab96161e337c4b7a42bdb4deb3fd7b90ce",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 397,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 36,
"path": "/C10/ex02/srcs/ft_console_io.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <unistd.h>\n\n#include \"ft_console_io.h\"\n\nvoid\twrite_char_to(char c, int fd)\n{\n\twrite(fd, &c, 1);\n}\n\nvoid\twrite_char_out(char c)\n{\n\twrite_char_to(c, OUT);\n}\n\nvoid\twrite_str_out(char *str)\n{\n\twhile (*str)\n\t{\n\t\twrite_char_out(*str);\n\t\tstr++;\n\t}\n}\n\nvoid\twrite_char_err(char c)\n{\n\twrite_char_to(c, ERR);\n}\n\nvoid\twrite_str_err(char *str)\n{\n\twhile (*str)\n\t{\n\t\twrite_char_err(*str);\n\t\tstr++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.3497537076473236,
"alphanum_fraction": 0.3689107894897461,
"avg_line_length": 31.052631378173828,
"blob_id": "4e2a6e446c03d86fa57e30859a8cb274974a5de8",
"content_id": "97b1d7079741f5588bc4e73375f16dfb5a4865c6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1827,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 57,
"path": "/C12/ex10/ft_list_foreach_if_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_list_foreach_if.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/22 17:38:24 by ecaceres #+# #+# */\n/* Updated: 2019/08/22 17:38:24 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"ft_list.h\"\n\n#include <stdlib.h>\n#include <stdio.h>\n\nvoid\tft_list_foreach_if(t_list *begin_list, void (*f)(void *),\n\t\t\t\tvoid *data_ref, int (*cmp)())\n{\n\tif (begin_list == 0)\n\t\treturn ;\n\tif ((*cmp)(begin_list->data, data_ref) == 0)\n\t\t(*f)(begin_list->data);\n\tft_list_foreach_if(begin_list->next, f, data_ref, cmp);\n}\n\nvoid\tprint_element(void *element)\n{\n\tprintf(\"element:: %d\\n\", *((int *)element));\n}\n\nint\t\tcompare_modulo(void *data, void *data_ref)\n{\n\treturn (*((int *)data) % *((int *)data_ref));\n}\n\nint\t\tmain(void)\n{\n\tint\t\t*index;\n\tint\t\t*modulo;\n\tint\t\t*malloced_index;\n\tt_list\t*list;\n\n\tindex = malloc(sizeof(int));\n\t*index = 0;\n\tlist = ft_create_elem(index);\n\twhile ((*index += 1) < 10)\n\t{\n\t\tmalloced_index = malloc(sizeof(int));\n\t\t*malloced_index = *index;\n\t\tft_list_push_back(&list, (void *)malloced_index);\n\t}\n\tmodulo = malloc(sizeof(int));\n\t*modulo = 2;\n\tft_list_foreach_if(list, &print_element, modulo, &compare_modulo);\n}\n"
},
{
"alpha_fraction": 0.3397085666656494,
"alphanum_fraction": 0.3583788573741913,
"avg_line_length": 23.954545974731445,
"blob_id": "f63a919b8c5f9bcd8c5516c5cb78639533f72c43",
"content_id": "1bc708ba289a164769969290a1c9b0e4ad9d979d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2196,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 88,
"path": "/ExamFinal/work-done/ft_split/ft_split.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_split.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: exam <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/23 11:38:11 by exam #+# #+# */\n/* Updated: 2019/08/23 11:38:12 by exam ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n#include <stdbool.h>\n\nbool\tft_is_whitespace(char c)\n{\n\treturn (c == ' ' || c == '\\t' || c == '\\v' || c == '\\f'\n\t\t\t|| c == '\\n' || c == '\\r');\n}\n\nint\t\tft_split_count_word(char *str)\n{\n\tint\t\tindex;\n\tint\t\tcount;\n\n\tindex = 0;\n\tcount = 0;\n\twhile (str[index])\n\t{\n\t\twhile (ft_is_whitespace(str[index]))\n\t\t\tindex++;\n\t\tif (str[index] == '\\0')\n\t\t\tbreak ;\n\t\twhile (!ft_is_whitespace(str[index]) && str[index] != '\\0')\n\t\t\tindex++;\n\t\tcount++;\n\t}\n\treturn (count);\n}\n\nchar\t*ft_str_n_duplicate(char *str, int n)\n{\n\tint\t\tindex;\n\tchar\t*duplicate;\n\n\tif (!(duplicate = (char *)malloc((n + 1) * sizeof(char))))\n\t\treturn (NULL);\n\tindex = 0;\n\twhile (str[index] && index < n)\n\t{\n\t\tduplicate[index] = str[index];\n\t\tindex++;\n\t}\n\twhile (index < n + 1)\n\t{\n\t\tduplicate[index] = '\\0';\n\t\tindex++;\n\t}\n\treturn (duplicate);\n}\n\nchar\t**ft_split(char *str)\n{\n\tint\t\tindex;\n\tint\t\twords;\n\tchar\t*start;\n\tchar\t**array;\n\n\tindex = 0;\n\twords = ft_split_count_word(str);\n\tif (!(array = (char **)malloc((words + 1) * sizeof(char *))))\n\t\treturn (NULL);\n\twhile (index < words)\n\t{\n\t\twhile (ft_is_whitespace(*str))\n\t\t\tstr++;\n\t\tif (*str == '\\0')\n\t\t\tbreak ;\n\t\tstart = str;\n\t\twhile (!ft_is_whitespace(*str) && *str != '\\0')\n\t\t\tstr++;\n\t\tarray[index] = ft_str_n_duplicate(start, str - start);\n\t\tindex++;\n\t}\n\tarray[index] = 0;\n\treturn (array);\n}\n"
},
{
"alpha_fraction": 0.6009852290153503,
"alphanum_fraction": 0.6256157755851746,
"avg_line_length": 14.615385055541992,
"blob_id": "84e1857fd330a66dc924a111a80e72fc6f6bfc14",
"content_id": "97080ce395bbfb1d9bc57517b19c023809e5d71d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 203,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 13,
"path": "/C05/ex00/ft_iterative_factorial.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "int\tft_iterative_factorial(int nb)\n{\n\tint\tfactorial;\n\tint\tindex;\n\n\tif (nb < 0)\n\t\treturn (0);\n\tindex = 1;\n\tfactorial = 1;\n\twhile (index < nb + 1)\n\t\tfactorial = factorial * index++;\n\treturn (factorial);\n}\n"
},
{
"alpha_fraction": 0.5345622301101685,
"alphanum_fraction": 0.5622119903564453,
"avg_line_length": 18.727272033691406,
"blob_id": "2afd550fb237acd1532dcce7e7c6d8dcf20e5b20",
"content_id": "28dd8ea1afdc5d74ad015da83a9b76c08a5d00f6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 217,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 11,
"path": "/Shell01/ex08/test.sh",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "# Example 1\nexport FT_NBR1=\\\\\\'?\\\"\\\\\\\"\\'\\\\\nexport FT_NBR2=rcrdmddd\n\n./add_chelou.sh\n\n# Example 2\nexport FT_NBR1='\\\\\"\\\\\"!\\\\\"\\\\\"!\\\\\"\\\\\"!\\\\\"\\\\\"!\\\\\"\\\\\"!\\\\\"\\\\\"'\nexport FT_NBR2=dcrcmcmooododmrrrmorcmcrmomo\n\n./add_chelou.sh\n"
},
{
"alpha_fraction": 0.29551899433135986,
"alphanum_fraction": 0.3210436701774597,
"avg_line_length": 24.550724029541016,
"blob_id": "cfc956c4b6d338561519efb324d852d29660a0ba",
"content_id": "d17e235fd2b5cdbe6fbd5d023c410294495ae992",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1763,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 69,
"path": "/Exam00/work-done/ft_atoi/ft_atoi.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_atoi.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: exam <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/02 18:54:26 by exam #+# #+# */\n/* Updated: 2019/08/02 18:54:30 by exam ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n\nint\t\tft_str_length(const char *str)\n{\n\tint index;\n\n\tindex = 0;\n\twhile (str[index++] != '\\0');\n\treturn (index - 1);\n}\n\nint\t\tft_pow(int a, int b)\n{\n\tint\tresult;\n\n\tresult = 1;\n\twhile (a-- != 0 && a > 0)\n\t{\n\t\tresult *= b;\n\t}\n\treturn (result);\n}\n\nvoid\tft_remove_excess(int *number, int index, int str_length)\n{\n\twhile ((str_length - index) != 0)\n\t{\n\t\t*number /= 10;\n\t\tindex++;\n\t}\n}\n\nint\t\tft_atoi(const char* str)\n{\n\tint\t\tresult;\n\tint\t\tstr_length;\n\tint\t\tindex;\n\tbool negative;\n\n\tresult = 0;\n\tstr_length = ft_str_length(str);\n\tnegative = str[0] == '-';\n\tindex = negative;\n\twhile(index <= str_length - 1)\n\t{\n\t\tif (!(str[index] >= '0' && str[index] <= '9'))\n\t\t{\n\t\t\tft_remove_excess(&result, index, str_length);\n\t\t\tbreak ;\n\t\t}\n\t\tresult += (str[index] - '0') * ft_pow(str_length - index, 10);\n\t\tindex++;\n\t}\n\tif (negative)\n\t\tresult = -result;\n\treturn (result);\n}\n"
},
{
"alpha_fraction": 0.5937052965164185,
"alphanum_fraction": 0.6309012770652771,
"avg_line_length": 20.84375,
"blob_id": "2094e91d27354fc61cc0e51e69166cfee45cf07a",
"content_id": "3f3d24faa34afd481c48bebd2f441cdb24b03ace",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 699,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 32,
"path": "/C10/ex03/srcs/ft_write_number.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <unistd.h>\n\n#include \"ft_write_number.h\"\n\nvoid\tft_buffer_number(int number, int radix, int buffer[], int index)\n{\n\tif (number > radix - 1)\n\t\tft_buffer_number(number / radix, radix, buffer, index + 1);\n\tbuffer[index] = number % radix;\n}\n\nvoid\tft_write_number(unsigned int number, int radix, int char_count)\n{\n\tint\tbuffer[MAX_BUFFER_SIZE + 1];\n\tint\tindex;\n\n\tindex = -1;\n\twhile (index++ < MAX_BUFFER_SIZE)\n\t\tbuffer[index] = 0;\n\tft_buffer_number(number, radix, buffer, 0);\n\tindex = -1;\n\twhile (index++ < char_count)\n\t\twrite(1, &\"0123456789abcdef\"[buffer[char_count - index]], 1);\n}\n\nvoid\tft_write_safe_char(char *c)\n{\n\tif (*c >= ' ' && *c != 127)\n\t\twrite(1, c, 1);\n\telse\n\t\twrite(1, &\".\", 1);\n}\n"
},
{
"alpha_fraction": 0.5637755393981934,
"alphanum_fraction": 0.5765306353569031,
"avg_line_length": 13,
"blob_id": "c6927c7845f3257f051797b61b1975169cec2fbc",
"content_id": "2c73b30569e4ca1ace8af2336b2255e2c5b5450c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 392,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 28,
"path": "/C07/ex00/ft_strdup.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdlib.h>\n\nint\t\tft_str_length(char *str)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (str[index])\n\t\tindex++;\n\treturn (index);\n}\n\nchar\t*ft_strdup(char *src)\n{\n\tint\t\tindex;\n\tchar\t*dest;\n\n\tindex = 0;\n\tif ((dest = (char *)malloc(ft_str_length(src) * sizeof(char) + 1)) == NULL)\n\t\treturn (0);\n\twhile (src[index])\n\t{\n\t\tdest[index] = src[index];\n\t\tindex++;\n\t}\n\tdest[index] = '\\0';\n\treturn (dest);\n}\n"
},
{
"alpha_fraction": 0.5517241358757019,
"alphanum_fraction": 0.5646551847457886,
"avg_line_length": 11.210526466369629,
"blob_id": "8755c5b4f2449053f9291d5a129d06d33ab228ba",
"content_id": "778f98406fa32f689836304b8b2d36331595b399",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 232,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 19,
"path": "/C06/ex02/ft_rev_params.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <unistd.h>\n\nvoid\tft_putstr(char *str)\n{\n\twhile (*str)\n\t\twrite(1, str++, 1);\n}\n\nint\t\tmain(int argc, char **argv)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (++index < argc)\n\t{\n\t\tft_putstr(argv[argc - index]);\n\t\tft_putstr(\"\\n\");\n\t}\n}\n"
},
{
"alpha_fraction": 0.5857740640640259,
"alphanum_fraction": 0.5920501947402954,
"avg_line_length": 22.899999618530273,
"blob_id": "ee089c15cea99c8fdd907c67fa4ed85891132119",
"content_id": "77c91bc720034aabb40a66e94b2487df0d4ed565",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 478,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 20,
"path": "/C12/ex12/ft_list_remove_if.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdlib.h>\n\n#include \"ft_list.h\"\n\nvoid\tft_list_remove_if(t_list **begin_list, void *data_ref, int (*cmp)(),\n\t\t\t\tvoid (*free_fct)(void *))\n{\n\tt_list\t*removed;\n\n\tif (begin_list == 0 || (*begin_list)->next == 0)\n\t\treturn ;\n\tif ((*cmp)((*begin_list)->next->data, data_ref) == 0)\n\t{\n\t\tremoved = (*begin_list)->next;\n\t\t(*begin_list)->next = removed->next;\n\t\t(*free_fct)(removed->data);\n\t\tfree(removed);\n\t}\n\tft_list_remove_if(&(*begin_list)->next, data_ref, cmp, free_fct);\n}\n"
},
{
"alpha_fraction": 0.29514098167419434,
"alphanum_fraction": 0.32033592462539673,
"avg_line_length": 24.25757598876953,
"blob_id": "872bd8583a29e39d41dde0fd7eb186d9662f8580",
"content_id": "01610eb6557d032c8cc118dcb3d7fe8620c4fbb8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1667,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 66,
"path": "/C01/ex07/ft_rev_int_tab_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_rev_int_tab.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/01 14:41:18 by ecaceres #+# #+# */\n/* Updated: 2019/08/01 14:41:24 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdio.h>\n\nvoid\tft_rev_int_tab(int *tab, int size)\n{\n\tint index;\n\tint tmp;\n\n\tindex = 0;\n\twhile (index < size / 2)\n\t{\n\t\ttmp = tab[index];\n\t\ttab[index] = tab[size - 1 - index];\n\t\ttab[size - 1 - index] = tmp;\n\t\tindex++;\n\t}\n}\n\nvoid\tdebug_dump_array(int numbers[], int size)\n{\n\tint index;\n\n\tprintf(\"[ \");\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tprintf(\"%d\", numbers[index]);\n\t\tif (index != size - 1)\n\t\t{\n\t\t\tprintf(\", \");\n\t\t}\n\t\tindex++;\n\t}\n\tprintf(\" ]\");\n}\n\nint\t\tmain(void)\n{\n\tint numbers[9];\n\tint *first_pointer;\n\tint index;\n\n\tindex = 0;\n\twhile (index < 9)\n\t{\n\t\tnumbers[index] = index + 1;\n\t\tindex++;\n\t}\n\tfirst_pointer = &numbers[0];\n\tprintf(\"before: \");\n\tdebug_dump_array(numbers, 9);\n\tft_rev_int_tab(first_pointer, 9);\n\tprintf(\"\\nafter : \");\n\tdebug_dump_array(numbers, 9);\n}\n"
},
{
"alpha_fraction": 0.5461121201515198,
"alphanum_fraction": 0.5840867757797241,
"avg_line_length": 14.36111068725586,
"blob_id": "8c1d6fbb06e13b615bd52b96093bf62d7e6ba96d",
"content_id": "74930f2c76adad81b55fbaa11a89549c249aa4b1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 553,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 36,
"path": "/C08/ex05/ft_show_tab.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <unistd.h>\n\n#include \"ft_stock_str.h\"\n\nvoid\t\t\t\tft_put_string(char *str)\n{\n\twhile (*str)\n\t{\n\t\twrite(1, str, 1);\n\t\tstr++;\n\t}\n}\n\nvoid\t\t\t\tft_put_number(int number)\n{\n\tif (number > 9)\n\t\tft_put_number(number / 10);\n\twrite(1, &\"0123456789\"[number % 10], 1);\n}\n\nvoid\t\t\t\tft_show_tab(struct s_stock_str *par)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (par[index].str != 0)\n\t{\n\t\tft_put_string(par[index].str);\n\t\tft_put_string(\"\\n\");\n\t\tft_put_number(par[index].size);\n\t\tft_put_string(\"\\n\");\n\t\tft_put_string(par[index].copy);\n\t\tft_put_string(\"\\n\");\n\t\tindex++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.641791045665741,
"alphanum_fraction": 0.641791045665741,
"avg_line_length": 10.166666984558105,
"blob_id": "22ca925457ba0f98340f9312962a2f09f5f26458",
"content_id": "ae814abdcf8cbfe05fd475fe69f78e6c2b9608ef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 67,
"license_type": "no_license",
"max_line_length": 24,
"num_lines": 6,
"path": "/C11/ex05/includes/ft_to.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_TO_H\n# define FT_TO_H\n\nint\t\tft_atoi(char *str);\n\n#endif\n"
},
{
"alpha_fraction": 0.5412843823432922,
"alphanum_fraction": 0.5504587292671204,
"avg_line_length": 9.899999618530273,
"blob_id": "b84537f0c0fd33d6935d7f60cb2874b143423b70",
"content_id": "7d6bb60ad9078224f25a215a4768343efc740e68",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 109,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 10,
"path": "/C10/ex01/srcs/string_utils.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <unistd.h>\n\nvoid\tstr_write(int fd, char *str)\n{\n\twhile (*str)\n\t{\n\t\twrite(fd, str, 1);\n\t\tstr++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.5991189479827881,
"alphanum_fraction": 0.5991189479827881,
"avg_line_length": 11.61111068725586,
"blob_id": "8a984b6f2bddb68b940d14da2468b74850050adc",
"content_id": "7f93ec5c0c2d2e1a1db9d41a62a69f08e3ef8a4c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 227,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 18,
"path": "/C11/ex05/srcs/ft_str_write.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <unistd.h>\n\n#include \"ft_io.h\"\n#include \"ft_char.h\"\n\nvoid\tft_str_write_to(int fd, char *str)\n{\n\twhile (*str)\n\t{\n\t\tft_char_write_to(fd, *str);\n\t\tstr++;\n\t}\n}\n\nvoid\tft_str_write(char *str)\n{\n\tft_str_write_to(OUT, str);\n}\n"
},
{
"alpha_fraction": 0.6026490330696106,
"alphanum_fraction": 0.6225165724754333,
"avg_line_length": 17.875,
"blob_id": "48a50f0626373129c01e694c591b79240e92145a",
"content_id": "b4eb1e0cb25a686dd3f313f6f8f39a41fd75e8f7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 151,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 8,
"path": "/C12/ex02/ft_list_size.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include \"ft_list.h\"\n\nint\tft_list_size(t_list *begin_list)\n{\n\tif (begin_list->next == 0)\n\t\treturn (0);\n\treturn (1 + ft_list_size(begin_list->next));\n}\n"
},
{
"alpha_fraction": 0.7407407164573669,
"alphanum_fraction": 0.7407407164573669,
"avg_line_length": 10.571428298950195,
"blob_id": "4c58c39f411e586f1dde31ab7df5f8bbf52c569f",
"content_id": "9e31307e308e77b0b91bfdfe169e9008f0686bb9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 81,
"license_type": "no_license",
"max_line_length": 21,
"num_lines": 7,
"path": "/Shell01/ex01/test.sh",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "export FT_USER=root\n\n./print_groups.sh\n\nexport FT_USER=daemon\n\n./print_groups.sh\n"
},
{
"alpha_fraction": 0.36689215898513794,
"alphanum_fraction": 0.3851969838142395,
"avg_line_length": 20.852174758911133,
"blob_id": "a903bf6760a43bc0ed4f2c51859ac81d807f8270",
"content_id": "f0378ed800d46dc4c3e1770f88238fdad66c673d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2513,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 115,
"path": "/Exam01/work-done/ft_split/ft_split.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_split.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: exam <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/09 19:12:30 by exam #+# #+# */\n/* Updated: 2019/08/09 19:12:32 by exam ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n#include <stdlib.h>\n\nbool\tis_space(char c)\n{\n\treturn (c == '\\f' || c == '\\n' || c == '\\r' ||\n\t\tc == '\\v' || c == '\\t' || c == ' ');\n}\n\nint\t\tstr_length(char *str)\n{\n\tint\tlength;\n\n\tlength = 0;\n\twhile (str[length])\n\t\tlength++;\n\treturn (length);\n}\n\nchar\t*str_copy(char *src, char *dest, int size)\n{\n\twhile (*src && size)\n\t{\n\t\t*dest = *src;\n\t\tsrc++;\n\t\tdest++;\n\t\tsize--;\n\t}\n\t*dest = '\\0';\n\treturn (dest);\n}\n\nchar\t*find_next_word(char *str, int *offset, int *length)\n{\n\tint\t\tindex;\n\tchar\t*curr_word;;\n\n\tindex = *offset;\n\twhile (str[index])\n\t{\n\t\tif (!is_space(str[index]))\n\t\t{\n\t\t\tcurr_word = &str[index];\n\t\t\t*length = 1;\n\t\t\twhile (!is_space(str[index]) && str[index])\n\t\t\t{\n\t\t\t\t*offset += 1;\n\t\t\t\t*length += 1;\n\t\t\t\tindex++;\n\t\t\t}\n\t\t\treturn curr_word;\n\t\t}\n\t\t*offset += 1;\n\t\tindex++;\n\t}\n\treturn (0);\n}\n\nint\t\tcount_words(char *str)\n{\n\tint\t\tcount;\n\tint\t\toffset;\n\tint\t\tsize;\n\tchar\t*new_word;\n\tchar\t*last_word;\n\n\tcount = 0;\n\toffset = 0;\n\tsize = 0;\n\tlast_word = str;\n\twhile (true)\n\t{\n\t\tnew_word = find_next_word(str, &offset, &size);\n\t\tif (new_word == 0)\n\t\t\tbreak ;\n\t\tcount++;\n\t\tlast_word = new_word;\n\t}\n\treturn (count);\n}\n\nchar\t**ft_split(char *str)\n{\n\tint\t\tindex;\n\tint\t\toffset;\n\tint\t\tsize;\n\tchar\t*next_word;\n\tchar\t**array;\n\n\tarray = (char **)malloc((count_words(str) + 1) * sizeof(char *));\n\tindex = 0;\n\toffset = 0;\n\tsize = 0;\n\twhile (index < count_words(str))\n\t{\n\t\tnext_word = find_next_word(str, &offset, &size);\n\t\tarray[index] = (char *)malloc((str_length(next_word) + 1) * sizeof(char));\n\t\tstr_copy(&str[offset - (size - 1)], array[index], size - 1);\n\t\tindex++;\n\t}\n\tarray[count_words(str)] = NULL;\n\treturn (array);\n}\n"
},
{
"alpha_fraction": 0.6224783658981323,
"alphanum_fraction": 0.630403459072113,
"avg_line_length": 18.54929542541504,
"blob_id": "0e1f63d8eb91f82d1f96a05a58cda63558fa40f0",
"content_id": "d4d5af850dc7128c0473fbd23f50640991cbe345",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1388,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 71,
"path": "/C10/ex02/srcs/ft_tail_utils.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <fcntl.h>\n#include <string.h>\n#include <unistd.h>\n#include <errno.h>\n#include <libgen.h>\n#include <stdlib.h>\n#include <stdio.h>\n\n#include \"ft_tail.h\"\n#include \"ft_tail_utils.h\"\n#include \"ft_console_io.h\"\n#include \"ft_strncpy.h\"\n\n#define IS_STD(fd) (fd >= 0 && fd <= 2)\n\nint\t\topen_file(char *executable, char *path)\n{\n\tint\t\tfd;\n\n\tfd = open(path, O_RDONLY);\n\tif (fd < 0)\n\t\tshow_error(executable, path);\n\treturn (fd);\n}\n\nint\t\tclose_file(char *executable, char *path, int fd)\n{\n\tint\t\tresult;\n\n\tresult = close(fd);\n\tif (result < 0)\n\t\tshow_error(executable, path);\n\treturn (result);\n}\n\nvoid\tshow_error(char *executable, char *path)\n{\n\tif (errno == 0)\n\t\treturn ;\n\twrite_str_err(basename(executable));\n\twrite_str_err(\": \");\n\twrite_str_err(path);\n\twrite_str_err(\": \");\n\twrite_str_err(strerror(errno));\n\twrite_str_err(\"\\n\");\n\terrno = 0;\n}\n\nchar\t*read_full(int fd, unsigned long *total_read)\n{\n\tunsigned int\tbyte_read;\n\tchar\t\t\tbuffer[DEFAULT_BUFFER_SIZE];\n\tchar\t\t\t*old;\n\tchar\t\t\t*dest;\n\n\t*total_read = 0;\n\tdest = malloc(0);\n\twhile ((byte_read = read(fd, buffer, DEFAULT_BUFFER_SIZE)) != 0)\n\t{\n\t\tif (errno != 0)\n\t\t\treturn (dest);\n\t\told = dest;\n\t\tif (!(dest = malloc((*total_read + byte_read) * sizeof(char))))\n\t\t\treturn (0);\n\t\tft_str_sized_copy(dest, old, *total_read);\n\t\tft_str_sized_copy(dest + *total_read, buffer, byte_read);\n\t\t*total_read += byte_read;\n\t\tfree(old);\n\t}\n\treturn (dest);\n}\n"
},
{
"alpha_fraction": 0.5678353905677795,
"alphanum_fraction": 0.5807926654815674,
"avg_line_length": 16.7297306060791,
"blob_id": "b785e1a453d89ac5eee54f9d57315f4c369a07af",
"content_id": "17579893c5ddcb052a64fd8c52828a5699ca3878",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1312,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 74,
"path": "/C11/ex05/srcs/main.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include \"ft_operation.h\"\n#include \"ft_boolean.h\"\n#include \"ft_str.h\"\n#include \"ft_number.h\"\n#include \"ft_to.h\"\n#include \"ft_io.h\"\n\n#define INVALID -1\n\nchar\tft_find_operator(char *str)\n{\n\tif (str[0] == '\\0' || str[1] != '\\0')\n\t\treturn (INVALID);\n\treturn (str[0]);\n}\n\nt_bool\tft_validate_operation(int b, char operator)\n{\n\tif (operator == '/' && b == 0)\n\t{\n\t\tft_str_write(\"Stop : division by zero\\n\");\n\t\treturn (false);\n\t}\n\tif (operator == '%' && b == 0)\n\t{\n\t\tft_str_write(\"Stop : modulo by zero\\n\");\n\t\treturn (false);\n\t}\n\treturn (true);\n}\n\nvoid\tft_do_operation(int a, int b, char operator)\n{\n\tstatic char\toperators[5] = { '+', '-', '/', '*', '%' };\n\tstatic int\t(*operations[5])(int, int) = {\n\t\t\t&ft_operation_add,\n\t\t\t&ft_operation_minus,\n\t\t\t&ft_operation_devide,\n\t\t\t&ft_operation_multiply,\n\t\t\t&ft_operation_modulo\n\t};\n\tint\t\t\tresult;\n\tint\t\t\tindex;\n\n\tresult = 0;\n\tindex = 0;\n\twhile (index < 5)\n\t{\n\t\tif (operator == operators[index])\n\t\t{\n\t\t\tresult = (operations[index])(a, b);\n\t\t\tbreak ;\n\t\t}\n\t\tindex++;\n\t}\n\tft_number_write(result);\n\tft_str_write(\"\\n\");\n}\n\nint\t\tmain(int argc, char **argv)\n{\n\tint\t\ta;\n\tint\t\tb;\n\tchar\toperator;\n\n\tif (argc == 4)\n\t{\n\t\ta = ft_atoi(argv[1]);\n\t\tb = ft_atoi(argv[3]);\n\t\toperator = ft_find_operator(argv[2]);\n\t\tif (ft_validate_operation(b, operator))\n\t\t\tft_do_operation(a, b, operator);\n\t}\n}\n"
},
{
"alpha_fraction": 0.3387589156627655,
"alphanum_fraction": 0.357578843832016,
"avg_line_length": 27.91176414489746,
"blob_id": "ffe9a66710c6ca9199a9c441c9b033c9a9d77f1a",
"content_id": "f461aa24c0db3ef7e7a77cae2d1268fa71bf210e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1966,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 68,
"path": "/Rush01/ex00/debug.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* debug.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/10 14:11:16 by ecaceres #+# #+# */\n/* Updated: 2019/08/10 14:11:18 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"string_utils.h\"\n#include \"constraint.h\"\n\nvoid\tprint_line(char *prefix, int length, char *suffix)\n{\n\tint\tindex;\n\n\tstr_write(prefix);\n\tindex = 0;\n\twhile (index++ < length)\n\t\tstr_write(\"--\");\n\tstr_write(suffix);\n}\n\nvoid\tprint_line_of_value(char *prefix, int *array, int size, char *suffix)\n{\n\tint\tindex;\n\n\tstr_write(prefix);\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tstr_write_char((char)(array[index] + '0'));\n\t\tstr_write(\" \");\n\t\tindex++;\n\t}\n\tstr_write(suffix);\n}\n\nvoid\tdump_grid(int **grid, int size, t_constr cnstr[4])\n{\n\tint\tindex;\n\tint\tjndex;\n\n\tprint_line_of_value(\" \", cnstr[UP].v, size, \"\");\n\tprint_line(\"\\n -\", size, \"\\n\");\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tstr_write_char((char)(cnstr[LEFT].v[index] + '0'));\n\t\tstr_write(\" | \");\n\t\tjndex = 0;\n\t\twhile (jndex < size)\n\t\t{\n\t\t\tstr_write_char((char)(grid[index][jndex] + '0'));\n\t\t\tstr_write(\" \");\n\t\t\tjndex++;\n\t\t}\n\t\tstr_write(\"| \");\n\t\tstr_write_char((char)(cnstr[RIGHT].v[index] + '0'));\n\t\tstr_write(\"\\n\");\n\t\tindex++;\n\t}\n\tprint_line(\" -\", size, \"\");\n\tprint_line_of_value(\"\\n \", cnstr[DOWN].v, size, \"\\n\");\n}\n"
},
{
"alpha_fraction": 0.25923439860343933,
"alphanum_fraction": 0.2847548723220825,
"avg_line_length": 26.574073791503906,
"blob_id": "c2cea56958fc7c4c2e5faf02536f83dd3954fff2",
"content_id": "a7c177f4d9bf51f32ac00ddac1f7a67485a812bc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1489,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 54,
"path": "/C02/ex08/ft_strlowcase_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_strlowcase.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/02 10:21:52 by ecaceres #+# #+# */\n/* Updated: 2019/08/02 10:21:55 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n#include <stdio.h>\n\nchar\t*ft_strlowcase(char *str)\n{\n\tint\t\tindex;\n\tchar\tcurr;\n\n\tindex = 0;\n\twhile (true)\n\t{\n\t\tcurr = str[index];\n\t\tif (curr == '\\0')\n\t\t{\n\t\t\tbreak ;\n\t\t}\n\t\tif (curr >= 'A' && curr <= 'Z')\n\t\t{\n\t\t\tstr[index] = curr + 32;\n\t\t}\n\t\tindex++;\n\t}\n\treturn (str);\n}\n\nint\t\tmain(void)\n{\n\tchar\t*str_base;\n\tchar\tstr_lower[6];\n\tint\t\tindex;\n\n\tstr_base = \"HELl0\";\n\tindex = 0;\n\twhile (index < 6)\n\t{\n\t\tstr_lower[index] = str_base[index] + 0;\n\t\tindex++;\n\t}\n\tft_strlowcase(&str_lower[0]);\n\tprintf(\"should be normal : %s\\n\", str_base);\n\tprintf(\"should be lower case: %s\\n\", str_lower);\n}\n"
},
{
"alpha_fraction": 0.44137316942214966,
"alphanum_fraction": 0.45430228114128113,
"avg_line_length": 30.15277862548828,
"blob_id": "03185885d4a3c553b5cb6ed6bcc6ae15c26190b2",
"content_id": "dd9f702cd1817033066d372cc030b86b18ab6b41",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2243,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 72,
"path": "/Rush02/ex00/includes/ft_number_dictionary.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_number_dictionary.h :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/17 10:48:05 by ecaceres #+# #+# */\n/* Updated: 2019/08/17 10:48:05 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#ifndef FT_NUMBER_DICTIONARY_H\n# define FT_NUMBER_DICTIONARY_H\n\n# include \"ft_boolean.h\"\n# include \"ft_short_types.h\"\n\n# define INVALID -1\n# define ENGLISH_DICT \"numbers.dict\"\n\ntypedef struct\ts_dict_entry\n{\n\tULNG\tvalue;\n\tchar\t*str;\n\tt_bool\tnormal;\n}\t\t\t\tt_dict_entry;\n\ntypedef struct\ts_dict\n{\n\tchar\t\t\t*path;\n\tt_bool\t\t\tvalid;\n\tint\t\t\t\tsize;\n\tt_dict_entry\t*entries;\n}\t\t\t\tt_dict;\n\ntypedef enum {\n\tnone,\n\tgeneric,\n\tinvalid_number,\n\tfail_convert,\n\tdict_parsing\n}\tt_error;\n\ntypedef enum\n{\n\tparsing_ok,\n\tempty_line,\n\tfailed,\n\treached_eof\n}\tt_parse_error;\n\nt_dict\t\t\tft_load_default_dictionary(void);\nt_dict\t\t\tft_load_dictionary(char *path);\nvoid\t\t\tft_free_dictionary(t_dict *dict);\n\nvoid\t\t\tft_read_line(t_dict_entry *entr, int fd, t_parse_error *err);\nchar\t\t\t*ft_clean_line(char *str);\nt_parse_error\tft_process_line(t_dict_entry *entry, char *line, UINT length);\nint\t\t\t\tft_count_valid_line(char *path);\nt_bool\t\t\tft_load_valid_line(char *path, int size, t_dict *dict);\n\nvoid\t\t\tft_swap_dictionary_entry(t_dict_entry *a, t_dict_entry *b);\nvoid\t\t\tft_update_normal_flag(t_dict *dict);\nvoid\t\t\tft_do_sort_dict(t_dict *dict, int start, int end,\n\t\t\t\t\t\t\t\tint (*func)(t_dict_entry*, t_dict_entry*));\nvoid\t\t\tft_sort_dictionary(t_dict *dict);\n\nint\t\t\t\tft_sort_dict_f_normal(t_dict_entry *a, t_dict_entry *b);\nint\t\t\t\tft_sort_dict_f_value(t_dict_entry *a, t_dict_entry *b);\n\n#endif\n"
},
{
"alpha_fraction": 0.7132866978645325,
"alphanum_fraction": 0.7132866978645325,
"avg_line_length": 16.875,
"blob_id": "61434b37af958a182c85f4e4c53c739a13d338cd",
"content_id": "0816b1db6099ac5a37e1f4eb72b27ebb2d6e55f9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 143,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 8,
"path": "/C10/ex02/includes/ft_tail_out.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_TAIL_OUT_H\n# define FT_TAIL_OUT_H\n\nvoid\ttail_by_lines(int fd);\n\nvoid\ttail_by_byte_count(int fd, unsigned int byte_to_read);\n\n#endif\n"
},
{
"alpha_fraction": 0.7238805890083313,
"alphanum_fraction": 0.7313432693481445,
"avg_line_length": 21.33333396911621,
"blob_id": "93bd88b23a5a2a402af9e65cbec341b6184af6cc",
"content_id": "8c8e07317eaef1108f7606014ac3c4ea789bdd00",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 268,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 12,
"path": "/C10/ex03/includes/ft_write_number.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_WRITE_NUMBER_H\n# define FT_WRITE_NUMBER_H\n\n# define MAX_BUFFER_SIZE 14\n\nvoid\tft_buffer_number(int number, int radix, int buffer[], int index);\n\nvoid\tft_write_number(unsigned int number, int radix, int char_count);\n\nvoid\tft_write_safe_char(char *c);\n\n#endif\n"
},
{
"alpha_fraction": 0.5732558369636536,
"alphanum_fraction": 0.5860465168952942,
"avg_line_length": 16.200000762939453,
"blob_id": "b4cd4a4d026cfd1a1c4793b53243254f9d68c793",
"content_id": "4e8abadfd24be9c9dedb19d3c696d8fed6000af0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 860,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 50,
"path": "/C08/ex04/ft_strs_to_tab.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdlib.h>\n\n#include \"ft_stock_str.h\"\n\nint\t\t\t\t\tft_str_length(char *str)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (str[index])\n\t\tindex++;\n\treturn (index);\n}\n\nchar\t\t\t\t*ft_strdup(char *src)\n{\n\tint\t\tindex;\n\tchar\t*dest;\n\n\tindex = 0;\n\tif ((dest = (char *)malloc(ft_str_length(src) * sizeof(char) + 1)) == NULL)\n\t\treturn (0);\n\twhile (src[index])\n\t{\n\t\tdest[index] = src[index];\n\t\tindex++;\n\t}\n\tdest[index] = '\\0';\n\treturn (dest);\n}\n\nstruct s_stock_str\t*ft_strs_to_tab(int ac, char **av)\n{\n\tint\t\t\t\t\tindex;\n\tstruct s_stock_str\t*array;\n\n\tac = ac + 0;\n\tif (!(array = malloc((ac + 1) * sizeof(struct s_stock_str))))\n\t\treturn (NULL);\n\tindex = 0;\n\twhile (index < ac)\n\t{\n\t\tarray[index].size = ft_str_length(av[index]);\n\t\tarray[index].str = av[index];\n\t\tarray[index].copy = ft_strdup(av[index]);\n\t\tindex++;\n\t}\n\tarray[index] = (struct s_stock_str){0, 0, 0};\n\treturn (array);\n}\n"
},
{
"alpha_fraction": 0.3785780370235443,
"alphanum_fraction": 0.39612188935279846,
"avg_line_length": 28.2702693939209,
"blob_id": "d408f4e382755fce34f56d4368e4d11cae774609",
"content_id": "a0466a879c1bdebd58f2d443c176efa1284f4f26",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2166,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 74,
"path": "/C12/ex12/ft_list_remove_if_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_list_remove_if.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/22 18:07:15 by ecaceres #+# #+# */\n/* Updated: 2019/08/22 18:07:15 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n\n#include \"ft_list.h\"\n\n#include <stdio.h>\n\nvoid\tft_list_remove_if(t_list **begin_list, void *data_ref, int (*cmp)(),\n\t\t\t\tvoid (*free_fct)(void *))\n{\n\tt_list\t*removed;\n\n\tif (begin_list == 0 || (*begin_list)->next == 0)\n\t\treturn ;\n\tif ((*cmp)((*begin_list)->next->data, data_ref) == 0)\n\t{\n\t\tremoved = (*begin_list)->next;\n\t\t(*begin_list)->next = removed->next;\n\t\t(*free_fct)(removed->data);\n\t\tfree(removed);\n\t}\n\tft_list_remove_if(&(*begin_list)->next, data_ref, cmp, free_fct);\n}\n\nvoid\tdelete_element(void *element)\n{\n\tprintf(\"element:: %d\\n\", *((int *)element));\n\tfree(element);\n}\n\nint\t\tcompare_modulo(void *data, void *data_ref)\n{\n\treturn (*((int *)data) % *((int *)data_ref));\n}\n\nint\t\tmain(void)\n{\n\tint\t\t*index;\n\tint\t\t*modulo;\n\tint\t\t*malloced_index;\n\tt_list\t*list;\n\n\tindex = malloc(sizeof(int));\n\t*index = 0;\n\tlist = ft_create_elem(index);\n\twhile ((*index += 1) < 10)\n\t{\n\t\tmalloced_index = malloc(sizeof(int));\n\t\t*malloced_index = *index;\n\t\tft_list_push_back(&list, (void *)malloced_index);\n\t}\n\tmodulo = malloc(sizeof(int));\n\t*modulo = 2;\n\tft_list_remove_if(&list, modulo, &compare_modulo, &delete_element);\n\tlist = list->next;\n\t*index = 0;\n\twhile (list)\n\t{\n\t\tprintf(\"list[%d] = %d\\n\", *index, *((int *)list->data));\n\t\tlist = list->next;\n\t\t*index += 1;\n\t}\n}\n"
},
{
"alpha_fraction": 0.4758364260196686,
"alphanum_fraction": 0.49070632457733154,
"avg_line_length": 10.69565200805664,
"blob_id": "e67d8305f84eacf03548908921f470b84d00ae9d",
"content_id": "4c1bcbe6bb1c4ce5e90bdb97265875bf680b2fd8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 269,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 23,
"path": "/C02/ex07/ft_strupcase.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdbool.h>\n\nchar\t*ft_strupcase(char *str)\n{\n\tint\t\tindex;\n\tchar\tcurr;\n\n\tindex = 0;\n\twhile (true)\n\t{\n\t\tcurr = str[index];\n\t\tif (curr == '\\0')\n\t\t{\n\t\t\tbreak ;\n\t\t}\n\t\tif (curr >= 'a' && curr <= 'z')\n\t\t{\n\t\t\tstr[index] = curr - 32;\n\t\t}\n\t\tindex++;\n\t}\n\treturn (str);\n}\n"
},
{
"alpha_fraction": 0.5854700803756714,
"alphanum_fraction": 0.6025640964508057,
"avg_line_length": 18.5,
"blob_id": "0e5de30b0eb2e80d485a6a971201ca9247f5ff7f",
"content_id": "e9998b7f4e490569a9c644e586b0b6a681e17028",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 234,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 12,
"path": "/C12/ex05/ft_list_push_strs.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include \"ft_list.h\"\n\nt_list\t*ft_list_push_strs(int size, char **strs)\n{\n\tt_list\t*list;\n\n\tif (size == 0)\n\t\treturn (0);\n\tlist = ft_create_elem((void *)strs[size - 1]);\n\tlist->next = ft_list_push_strs(size - 1, strs);\n\treturn (list);\n}\n"
},
{
"alpha_fraction": 0.2519628703594208,
"alphanum_fraction": 0.2790863811969757,
"avg_line_length": 31.581396102905273,
"blob_id": "a45dcb620adca2dba576a110a0b1454b629160f9",
"content_id": "1c866273e61d69a4b49b206fffa73e14e7147487",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1401,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 43,
"path": "/Rush01/ex00/maths.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* maths.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/10 12:20:14 by ecaceres #+# #+# */\n/* Updated: 2019/08/10 12:20:27 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"boolean.h\"\n\nint\tft_sqrt(int nb, t_bool return_zero_if_float)\n{\n\tunsigned int\t\tsqrt;\n\tunsigned int\t\tindex;\n\n\tif (nb < 0)\n\t\treturn (0);\n\tif (nb <= 1)\n\t\treturn (nb);\n\tindex = 0;\n\twhile ((sqrt = index * index) <= (unsigned int)nb)\n\t\tindex++;\n\tindex -= 1;\n\tif (return_zero_if_float)\n\t\treturn (index * index == (unsigned int)nb ? index : 0);\n\treturn (index);\n}\n\nint\tft_power(int nb, int power)\n{\n\tint result;\n\n\tif (power < 0)\n\t\treturn (0);\n\tresult = 1;\n\twhile (power-- > 0)\n\t\tresult *= nb;\n\treturn (result);\n}\n"
},
{
"alpha_fraction": 0.726190447807312,
"alphanum_fraction": 0.726190447807312,
"avg_line_length": 13,
"blob_id": "a0494ea3d772ea47ef8d4462f6902b60ecb585be",
"content_id": "c2dbfd2706e7c6003b15920a3f8be75bb8bfb5da",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 84,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 6,
"path": "/C11/ex05/includes/ft_number.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_NUMBER_H\n# define FT_NUMBER_H\n\nvoid\tft_number_write(int number);\n\n#endif\n"
},
{
"alpha_fraction": 0.43391987681388855,
"alphanum_fraction": 0.44926995038986206,
"avg_line_length": 26.536083221435547,
"blob_id": "1925c5cfaf9fd9db927b773308cd4e56624e2e2c",
"content_id": "328240e5396aef66d360e0cbf59a6d639200c08e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2671,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 97,
"path": "/Rush02/ex00/srcs/ft_itow.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_itow.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/17 15:44:01 by ecaceres #+# #+# */\n/* Updated: 2019/08/17 15:44:01 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdio.h>\n\n#include \"ft_boolean.h\"\n#include \"ft_number_dictionary.h\"\n#include \"ft_str.h\"\n\n#define NOT_FOUND -1\n\nint\t\tft_resolve_dict_entry_index(t_dict *dict, ULNG number)\n{\n\tint\t\tindex;\n\n\tindex = 0;\n\twhile (index < dict->size)\n\t{\n\t\tif (dict->entries[index].value == number)\n\t\t\treturn (index);\n\t\tindex++;\n\t}\n\treturn (NOT_FOUND);\n}\n\nt_bool\tft_itow_is_value_power_of_ten(ULNG number)\n{\n\tULNG\t\tpower;\n\n\tpower = 1;\n\twhile (power <= number)\n\t{\n\t\tif (power == number)\n\t\t\treturn (true);\n\t\tpower *= 10;\n\t}\n\treturn (false);\n}\n\nvoid\tft_itow_print_if(t_bool print, t_bool *put_space, char *str)\n{\n\tif (!print)\n\t\treturn ;\n\tif (*put_space)\n\t\tft_str_write(\" \");\n\t*put_space = true;\n\tft_str_write(str);\n}\n\nt_bool\tft_itow_short(t_dict *dict, ULNG number, t_bool *put_sp, t_bool prnt)\n{\n\tint\t\tdict_index;\n\n\tdict_index = ft_resolve_dict_entry_index(dict, number);\n\tif (dict_index == NOT_FOUND)\n\t\treturn (false);\n\tft_itow_print_if(prnt, put_sp, dict->entries[dict_index].str);\n\treturn (true);\n}\n\nt_bool\tft_itow(t_dict *dict, ULNG number, t_bool *put_space, t_bool print)\n{\n\tint\t\tindex;\n\tULNG\tmult;\n\tULNG\tvalue;\n\n\tif ((number <= 20 && ft_itow_short(dict, number, 0, false))\n\t\t\t|| (ft_resolve_dict_entry_index(dict, number) != NOT_FOUND\n\t\t\t\t\t&& !ft_itow_is_value_power_of_ten(number)))\n\t\treturn (ft_itow_short(dict, number, put_space, print));\n\tindex = dict->size - 1;\n\twhile ((value = dict->entries[index].value) != 0)\n\t{\n\t\tif (number % value != number)\n\t\t{\n\t\t\tmult = number / value;\n\t\t\tif (value > 99)\n\t\t\t\tif (!ft_itow(dict, mult, put_space, print))\n\t\t\t\t\treturn (false);\n\t\t\tft_itow_print_if(print, put_space, dict->entries[index].str);\n\t\t\tif (number - mult * value == 0)\n\t\t\t\treturn (true);\n\t\t\treturn (ft_itow(dict, number - mult * value, put_space, print));\n\t\t}\n\t\tindex--;\n\t}\n\treturn (true);\n}\n"
},
{
"alpha_fraction": 0.6709265112876892,
"alphanum_fraction": 0.6837060451507568,
"avg_line_length": 23.076923370361328,
"blob_id": "268086362a35dd145eba899d8c3e610ac64551ea",
"content_id": "1a3f297d577fe2f2cf981d119497fa1fcf306b2d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 313,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 13,
"path": "/C05/ex03/ft_recursive_power.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "int\tft_recursive_power_recursive(int number, int power, int result)\n{\n\tif (power-- > 0)\n\t\treturn (ft_recursive_power_recursive(number, power, result *= number));\n\treturn (result);\n}\n\nint\tft_recursive_power(int nb, int power)\n{\n\tif (power < 0)\n\t\treturn (0);\n\treturn (ft_recursive_power_recursive(nb, power, 1));\n}\n"
},
{
"alpha_fraction": 0.48356807231903076,
"alphanum_fraction": 0.5023474097251892,
"avg_line_length": 10.833333015441895,
"blob_id": "ab45cdd574d2a33471c83c5aaa66ca2f90f5a733",
"content_id": "076c52a3d7504e6d3f6de94fe18bbb148307b5a9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 213,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 18,
"path": "/C11/ex02/ft_any.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "int\tft_any(char **tab, int (*f)(char*))\n{\n\tint\tindex;\n\tint\tresult;\n\n\tresult = 0;\n\tindex = 0;\n\twhile (tab[index])\n\t{\n\t\tif ((*f)(tab[index]) != 0)\n\t\t{\n\t\t\tresult = 1;\n\t\t\tbreak ;\n\t\t}\n\t\tindex++;\n\t}\n\treturn (result);\n}\n"
},
{
"alpha_fraction": 0.665083110332489,
"alphanum_fraction": 0.6817102432250977,
"avg_line_length": 15.192307472229004,
"blob_id": "cf5e7c9525bac6bf23fa8f8e96855267850b1367",
"content_id": "6ba889e0c8d7bda4056d91be561bfe8294671fe4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 421,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 26,
"path": "/C10/ex00/includes/display_file.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef DISPLAY_FILE_H\n# define DISPLAY_FILE_H\n\n# include <stdbool.h>\n\n# define BUFFER_SIZE 1024\n\n# define MSG_ERR_NO_FILE \"File name missing.\\n\"\n# define MSG_ERR_TOO_MANY_ARGS \"Too many arguments.\\n\"\n# define MSG_ERR_CANNOT_READ \"Cannot read file.\\n\"\n\n# define IN 0\n# define OUT 1\n# define ERR 2\n\n/*\n** DISPLAY_FILE.c\n*/\nbool\tdisplay_file(char *path);\n\n/*\n** STRING_UTILS.c\n*/\nvoid\tstr_write(int fd, char *str);\n\n#endif\n"
},
{
"alpha_fraction": 0.7064220309257507,
"alphanum_fraction": 0.7064220309257507,
"avg_line_length": 17.16666603088379,
"blob_id": "59cc2c418d26378bea1dc9fd191a9b9b0af4d315",
"content_id": "ab75592e69aabd7527e05eef9ba5e4b36f9964c2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 109,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 6,
"path": "/C10/ex03/includes/ft_strncpy.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_STRNCPY_H\n# define FT_STRNCPY_H\n\nchar\t*ft_strncpy(char *dest, char *src, unsigned int n);\n\n#endif\n"
},
{
"alpha_fraction": 0.5819672346115112,
"alphanum_fraction": 0.6803278923034668,
"avg_line_length": 19.33333396911621,
"blob_id": "551717f970e4586da57c172bc65205886f95b168",
"content_id": "7db462366baa0d8ec9097d7a5d83b6923b47eaa8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 122,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 6,
"path": "/Rush02/ex00/lot.py",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "import os\nimport time\n\nfor i in range(0, 50000):\n\tos.system(\"./rush-02 numbers_french.dict \" + str(i))\n\ttime.sleep(0.001)\n"
},
{
"alpha_fraction": 0.35211995244026184,
"alphanum_fraction": 0.37693899869918823,
"avg_line_length": 28.303030014038086,
"blob_id": "1689c49521b8578c1b628ae776f78a54eefd87dc",
"content_id": "36a4cf410bb79bfcb5532900c4805523642653c5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1934,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 66,
"path": "/C03/ex05/ft_strlcat_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_strlcat.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/04 15:48:24 by ecaceres #+# #+# */\n/* Updated: 2019/08/04 15:49:10 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdio.h>\n#include <string.h>\n\nunsigned int\tft_str_length_fast(char *dest)\n{\n\tunsigned int\tcount;\n\n\tcount = 0;\n\twhile (dest[count] != '\\0')\n\t\tcount++;\n\treturn (count);\n}\n\nunsigned int\tft_strlcat(char *dest, char *src, unsigned int size)\n{\n\tunsigned int\tlength_dest;\n\tunsigned int\tlength_src;\n\tunsigned int\tindex;\n\n\tlength_dest = ft_str_length_fast(dest);\n\tlength_src = ft_str_length_fast(src);\n\tif (length_dest >= size)\n\t\treturn (length_src + size);\n\tif (size == 0)\n\t\treturn (length_src);\n\tindex = 0;\n\twhile (src[index] != '\\0' && index < (size - 1 - length_dest))\n\t{\n\t\tdest[length_dest + index] = src[index];\n\t\tindex++;\n\t}\n\treturn (length_dest + length_src);\n}\n\nint\t\t\t\tmain(void)\n{\n\tchar\t*str_base;\n\tchar\tdest[100];\n\tchar\tdest2[100];\n\tchar\t*src;\n\tint\t\tindex;\n\n\tstr_base = \"Hello\";\n\tsrc = \" World\";\n\tindex = 0;\n\twhile (index < 6)\n\t{\n\t\tdest[index] = str_base[index];\n\t\tdest2[index] = str_base[index];\n\t\tindex++;\n\t}\n\tprintf(\"c : (%lu) $%s$\\n\", strlcat(dest, src, 0), dest);\n\tprintf(\"ft : (%d) $%s$\\n\", ft_strlcat(dest2, src, 0), dest2);\n}\n"
},
{
"alpha_fraction": 0.26039665937423706,
"alphanum_fraction": 0.30582213401794434,
"avg_line_length": 32.9782600402832,
"blob_id": "e3ef18b7e1a39a6d80b059b32ccd1d8b0b83596e",
"content_id": "ebb0bdc3473408fb7efc7338edca3d4cf5bf3871",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1563,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 46,
"path": "/C05/ex05/ft_sqrt_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_sqrt.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/06 13:04:32 by ecaceres #+# #+# */\n/* Updated: 2019/08/06 13:04:33 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdio.h>\n#include <limits.h>\n\nint\tft_sqrt(int nb)\n{\n\tunsigned int\t\tsqrt;\n\tunsigned int\t\tindex;\n\n\tif (nb < 0)\n\t\treturn (0);\n\tif (nb <= 1)\n\t\treturn (nb);\n\tindex = 0;\n\twhile ((sqrt = index * index) <= (unsigned int)nb)\n\t\tindex++;\n\tindex -= 1;\n\treturn (index * index == (unsigned int)nb ? index : 0);\n}\n\nint\tmain(void)\n{\n\tint number;\n\n\tnumber = -2;\n\twhile (number < 26)\n\t{\n\t\tprintf(\"sqrt(%d) = %d\\n\", number, ft_sqrt(number));\n\t\tnumber++;\n\t}\n\tprintf(\"sqrt(%d) = %d\\n\", 1000000, ft_sqrt(1000000));\n\tprintf(\"sqrt(%d) = %d\\n\", INT_MAX, ft_sqrt(INT_MAX));\n\tprintf(\"sqrt(%d) = %d\\n\", INT_MIN, ft_sqrt(INT_MIN));\n\tprintf(\"sqrt(%d) = %d\\n\", 2147395600, ft_sqrt(2147395600));\n}\n"
},
{
"alpha_fraction": 0.6262626051902771,
"alphanum_fraction": 0.6464646458625793,
"avg_line_length": 10,
"blob_id": "3cf69ad1f897741e02f08f0871bf6479cf846156",
"content_id": "733447848d801a025d875b8be0c9ac7cac58460e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 99,
"license_type": "no_license",
"max_line_length": 21,
"num_lines": 9,
"path": "/C11/ex05/includes/ft_boolean.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_BOOLEAN_H\n# define FT_BOOLEAN_H\n\ntypedef enum {\n\tfalse = 0,\n\ttrue = 1\n}\tt_bool;\n\n#endif\n"
},
{
"alpha_fraction": 0.2651413083076477,
"alphanum_fraction": 0.2893674373626709,
"avg_line_length": 28.719999313354492,
"blob_id": "9846e1095e95b8599d16fe3233da3d5a15f0bca7",
"content_id": "9bc257f2cea51c7358f7c4ec663956442cd9c62c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1486,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 50,
"path": "/Rush01/ex00/main.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* main.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/10 12:16:27 by ecaceres #+# #+# */\n/* Updated: 2019/08/10 12:16:28 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"boolean.h\"\n#include \"string_utils.h\"\n#include \"is.h\"\n#include \"solver.h\"\n\nint\t\tcount_numbers(char *str)\n{\n\tint\tcount;\n\n\tcount = 0;\n\twhile (*str)\n\t{\n\t\tif (is_number(*str))\n\t\t\tcount++;\n\t\tstr++;\n\t}\n\treturn (count);\n}\n\nint\t\tmain(int argc, char **argv)\n{\n\tt_bool\terror;\n\tint\t\tnbr_count;\n\tint\t\tgrid_size;\n\n\terror = (argc != 2);\n\tif (!error)\n\t{\n\t\tnbr_count = count_numbers(argv[1]);\n\t\tgrid_size = nbr_count / 4;\n\t\terror = (grid_size < 3 || grid_size > 9 || nbr_count != grid_size * 4);\n\t\tif (!error)\n\t\t\terror = !solve(grid_size, argv[1]);\n\t}\n\tif (error)\n\t\tstr_write(\"Error\");\n\tstr_write(\"\\n\");\n}\n"
},
{
"alpha_fraction": 0.6768060922622681,
"alphanum_fraction": 0.6882129311561584,
"avg_line_length": 13.61111068725586,
"blob_id": "d87d61e5240bf0bd7ab2e1ef43422e52af10d680",
"content_id": "8e37564d4a4f6abe6ea38e5ff0da92ee130726b2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 263,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 18,
"path": "/C10/ex02/includes/ft_console_io.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_CONSOLE_IO_H\n# define FT_CONSOLE_IO_H\n\n# define IN 0\n# define OUT 1\n# define ERR 2\n\nvoid\twrite_char_to(char c, int fd);\n\nvoid\twrite_char_out(char c);\n\nvoid\twrite_str_out(char *str);\n\nvoid\twrite_char_err(char c);\n\nvoid\twrite_str_err(char *str);\n\n#endif\n"
},
{
"alpha_fraction": 0.6468085050582886,
"alphanum_fraction": 0.6765957474708557,
"avg_line_length": 12.823529243469238,
"blob_id": "a7fa3485d799e36ecc10f87ab52664e85e62c195",
"content_id": "f108290aa9ffabbab0f8a45182894c23fa462507",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 235,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 17,
"path": "/C10/ex01/includes/cat.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef CAT_H\n# define CAT_H\n\n# include <stdbool.h>\n\n# define IN 0\n# define OUT 1\n# define ERR 2\n# define BUFFER_SIZE 1024\n\nvoid\tstdin_loop();\n\nbool\tcat(char **paths, int count, int offset);\n\nvoid\tstr_write(int fd, char *str);\n\n#endif\n"
},
{
"alpha_fraction": 0.6027204394340515,
"alphanum_fraction": 0.6083489656448364,
"avg_line_length": 18.740739822387695,
"blob_id": "76c3308ca1a8171e4387544a63a08cd0d7041c25",
"content_id": "62e353bc1231fc03fc64c6274eb21f8bee7bbb74",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2132,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 108,
"path": "/C10/ex02/srcs/ft_args_parser.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <libgen.h>\n\n#include \"ft_atoi.h\"\n#include \"ft_console_io.h\"\n#include \"ft_args_parser.h\"\n#include \"ft_tail.h\"\n\n#define EXEC args[0]\n#define DERROR display_error\n#define DERR_A_USG(e, er, c, l) DERROR(e, er, c, l); display_usage(EXEC)\n\n#define ERR_ILLEGAL_OPT \"illegal option\"\n#define ERR_ILLEGAL_OFF \"illegal offset\"\n#define ERR_OPT_REQ_ARG \"option requires an argument\"\n\nbool\tparse_args(char **args, int count)\n{\n\tt_options\toptions;\n\tint\t\t\tindex;\n\n\toptions.byte_to_read = NOTHING;\n\tindex = 1;\n\twhile (index < count)\n\t{\n\t\tif (args[index][0] == '-' && args[index][1] != '\\0')\n\t\t{\n\t\t\tif (args[index][1] == 'c')\n\t\t\t{\n\t\t\t\tif (!proc_option(args, count, &index, &options))\n\t\t\t\t\treturn (false);\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tDERR_A_USG(EXEC, ERR_ILLEGAL_OPT, (args[index] + 1), true);\n\t\t\t\treturn (false);\n\t\t\t}\n\t\t}\n\t\telse\n\t\t\tbreak ;\n\t\tindex++;\n\t}\n\treturn (process_tail(args, count, index, &options));\n}\n\nbool\tprocess_tail(char **args, int count, int index, t_options *opts)\n{\n\tbool\tresult;\n\tint\t\ttotal;\n\n\tresult = true;\n\ttotal = count - index;\n\tif (total == 0)\n\t\tstdin_tail(opts);\n\telse\n\t\twhile (index < count)\n\t\t{\n\t\t\tif (!tail(EXEC, args[index], opts, total))\n\t\t\t\tresult = false;\n\t\t\tindex++;\n\t\t}\n\treturn (result);\n}\n\nbool\tproc_option(char **args, int count, int *index, t_options *opts)\n{\n\tint\t\tatoi;\n\n\tif (*index + 1 < count)\n\t{\n\t\tif (opts->byte_to_read == NOTHING)\n\t\t{\n\t\t\t*index += 1;\n\t\t\tatoi = ft_abs_strict_atoi(args[*index]);\n\t\t\tif (atoi != -1)\n\t\t\t{\n\t\t\t\topts->byte_to_read = atoi;\n\t\t\t\treturn (true);\n\t\t\t}\n\t\t\tDERROR(EXEC, ERR_ILLEGAL_OFF, args[*index], false);\n\t\t\treturn (false);\n\t\t}\n\t\tdisplay_usage(EXEC);\n\t\treturn (false);\n\t}\n\tDERROR(EXEC, ERR_OPT_REQ_ARG, (char *)(args[*index] + 1), true);\n\tdisplay_usage(EXEC);\n\treturn (false);\n}\n\nvoid\tdisplay_usage(char *exec)\n{\n\twrite_str_err(\"usage: \");\n\twrite_str_err(basename(exec));\n\twrite_str_err(\" [-c #] [file ...]\\n\");\n}\n\nvoid\tdisplay_error(char *exec, char *error, char *cause, bool limit)\n{\n\twrite_str_err(basename(exec));\n\twrite_str_err(\": \");\n\twrite_str_err(error);\n\twrite_str_err(\" -- \");\n\tif (limit)\n\t\twrite_char_err(*cause);\n\telse\n\t\twrite_str_err(cause);\n\twrite_str_err(\"\\n\");\n}\n"
},
{
"alpha_fraction": 0.2503311336040497,
"alphanum_fraction": 0.27417218685150146,
"avg_line_length": 25.964284896850586,
"blob_id": "6e552513bbdc4f3c075c6d95bf83cd31bc706962",
"content_id": "3fbfb392270e13cc9fb27b89525f42499b8f8c25",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1510,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 56,
"path": "/Exam01/work-done/last_word/last_word.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* last_word.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: exam <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/09 18:35:59 by exam #+# #+# */\n/* Updated: 2019/08/09 18:36:01 by exam ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n#include <unistd.h>\n\nbool\tis_space(char c)\n{\n\treturn (c == ' ' || c == '\\t');\n}\n\nvoid\twrite_last_word(char *str)\n{\n\tint\tindex;\n\tbool\tin_word;\n\tchar\t*last_word;\n\n\tindex = 0;\n\tlast_word = str;\n\twhile (*str)\n\t{\n\t\tif (is_space(*str) && in_word)\n\t\t\tin_word = false;\n\t\telse if (!is_space(*str) && !in_word)\n\t\t{\n\t\t\tin_word = true;\n\t\t\tlast_word = str;\n\t\t}\n\t\tstr++;\n\t}\n\tif (last_word == 0)\n\t\treturn ;\n\twhile (*last_word)\n\t{\n\t\tif (is_space(*last_word))\n\t\t\tbreak ;\n\t\twrite(1, last_word, 1);\n\t\tlast_word++;\n\t}\n}\n\nint\t\tmain(int argc, char **argv)\n{\n\tif (argc == 2)\n\t\twrite_last_word(argv[1]);\n\twrite(1, \"\\n\", 1);\n}\n"
},
{
"alpha_fraction": 0.6829268336296082,
"alphanum_fraction": 0.6829268336296082,
"avg_line_length": 12.666666984558105,
"blob_id": "957e8d6d6fb4a88a8c3a023436951ca125b8f89a",
"content_id": "6e74c2cc92b48136a68dbf7e015cde84ba2e885d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 82,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 6,
"path": "/C10/ex02/includes/ft_atoi.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_ATOI_H\n# define FT_ATOI_H\n\nint\t\tft_abs_strict_atoi(char *str);\n\n#endif\n"
},
{
"alpha_fraction": 0.6937984228134155,
"alphanum_fraction": 0.6937984228134155,
"avg_line_length": 17.428571701049805,
"blob_id": "f574e1127b6dcf83099ed54dc0c93c71ba2d514e",
"content_id": "466438276d988234d07fa30417c12d1f5aae245b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 258,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 14,
"path": "/C11/ex05/includes/ft_operation.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_OPERATION_H\n# define FT_OPERATION_H\n\nint\t\tft_operation_add(int a, int b);\n\nint\t\tft_operation_minus(int a, int b);\n\nint\t\tft_operation_devide(int a, int b);\n\nint\t\tft_operation_multiply(int a, int b);\n\nint\t\tft_operation_modulo(int a, int b);\n\n#endif\n"
},
{
"alpha_fraction": 0.3300471603870392,
"alphanum_fraction": 0.3694813549518585,
"avg_line_length": 27.108434677124023,
"blob_id": "731d4733dba43fb18f514796128be6225fcb1b5d",
"content_id": "1cac267be2f3bd2376bc923c57c950322e0e4866",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2333,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 83,
"path": "/C11/ex04/ft_is_sort_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_is_sort.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/16 15:36:30 by ecaceres #+# #+# */\n/* Updated: 2019/08/20 17:04:13 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n#include <stdio.h>\n\nint\t\tft_is_sort(int *tab, int length, int (*f)(int, int))\n{\n\tint\t\tindex;\n\tint\t\tsorted;\n\n\tsorted = 1;\n\tindex = 0;\n\twhile (index < length - 1 && sorted)\n\t{\n\t\tif ((*f)(tab[index], tab[index + 1]) < 0)\n\t\t\tsorted = 0;\n\t\tindex++;\n\t}\n\tif (sorted != 1)\n\t{\n\t\tsorted = 1;\n\t\tindex = 0;\n\t\twhile (index < length - 1)\n\t\t{\n\t\t\tif ((*f)(tab[index], tab[index + 1]) > 0)\n\t\t\t\treturn (0);\n\t\t\tindex++;\n\t\t}\n\t}\n\treturn (1);\n}\n\nint\t\tft_less_than(int a, int b)\n{\n\treturn (b - a);\n}\n\nint\t\tft_more_than(int a, int b)\n{\n\treturn (a - b);\n}\n\nint\t\tmain(void)\n{\n\tstatic int\ttabx[] = { 1, 2, 2, 2, 3, 3, 4, 5, 6, 6, 6, 6, 7 };\n\tint\t\t\tindex;\n\tint\t\t\t*tab;\n\tint\t\t\tlength;\n\n\tlength = 10;\n\ttab = malloc(length * sizeof(int));\n\tindex = 0;\n\twhile (index < length)\n\t{\n\t\ttab[index] = index;\n\t\tindex++;\n\t}\n\tprintf(\"ft_is_sorted(1.........10) = %d\\n\", ft_is_sort(tab, length, &ft_less_than));\n\tindex = 0;\n\twhile (index < length)\n\t{\n\t\ttab[length - 1 - index] = index;\n\t\tindex++;\n\t}\n\tprintf(\"ft_is_sorted(10.........1) = %d\\n\", ft_is_sort(tab, length, &ft_more_than));\n\ttab[5] = 100;\n\tprintf(\"ft_is_sorted(10...100...1) = %d\\n\", ft_is_sort(tab, length, &ft_more_than));\n\ttab[5] = 4;\n\ttab[9] = 100;\n\tprintf(\"ft_is_sorted(10.....1.100) = %d\\n\", ft_is_sort(tab, length, &ft_more_than));\n\tprintf(\"ft_is_sorted(.tabx.......) = %d\\n\", ft_is_sort(tabx, 13, &ft_more_than));\n\tprintf(\"ft_is_sorted(.tabx.......) = %d\\n\", ft_is_sort(tabx, 13, &ft_less_than));\n}\n"
},
{
"alpha_fraction": 0.625,
"alphanum_fraction": 0.6299999952316284,
"avg_line_length": 21.22222137451172,
"blob_id": "f7e6e8a5ececbe925f5c396bec24c9e1573f076d",
"content_id": "a3eec2f9669062c6365c7a9d469c5c7b6218d366",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 200,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 9,
"path": "/C12/ex04/ft_list_push_back.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include \"ft_list.h\"\n\nvoid\tft_list_push_back(t_list **begin_list, void *data)\n{\n\tif (*begin_list == 0)\n\t\t*begin_list = ft_create_elem(data);\n\telse\n\t\tft_list_push_back(&((*begin_list)->next), data);\n}\n"
},
{
"alpha_fraction": 0.2991202473640442,
"alphanum_fraction": 0.33079180121421814,
"avg_line_length": 23.014083862304688,
"blob_id": "8f4afdfafd6b544bfb09c01270cee42aae9fa7a1",
"content_id": "d119b4326e06429fa795a78a41c598140950ec41",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1705,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 71,
"path": "/ExamFinal/work-done/union/union.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* union.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: exam <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/23 11:07:43 by exam #+# #+# */\n/* Updated: 2019/08/23 11:07:44 by exam ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <unistd.h>\n#include <stdbool.h>\n\nint\t\tft_str_length(char *str)\n{\n\tint\tlength;\n\n\tlength = 0;\n\twhile (str[length])\n\t\tlength++;\n\treturn (length);\n}\n\nvoid\tft_union_print_if(bool already[255], char c)\n{\n\tif (!already[(int)c])\n\t{\n\t\talready[(int)c] = true;\n\t\twrite(1, &c, 1);\n\t}\n}\n\nvoid\tft_union_do_str(bool already[255], char *str)\n{\n\tint\tindex;\n\tint\tlength;\n\n\tindex = 0;\n\tlength = ft_str_length(str);\n\twhile (index < length)\n\t{\n\t\tft_union_print_if(already, str[index]);\n\t\tindex++;\n\t}\n}\n\nvoid\tft_union(char *str1, char *str2)\n{\n\tint\tindex;\n\tbool\talready[255];\n\n\tindex = 0;\n\twhile (index < 255)\n\t{\n\t\talready[index] = false;\n\t\tindex++;\n\t}\n\tft_union_do_str(already, str1);\n\tft_union_do_str(already, str2);\n}\n\nint\t\tmain(int argc, char **argv)\n{\n\tif (argc == 3)\n\t{\n\t\tft_union(argv[1], argv[2]);\n\t}\n\twrite(1, \"\\n\", 1);\n}\n"
},
{
"alpha_fraction": 0.30588236451148987,
"alphanum_fraction": 0.3327731192111969,
"avg_line_length": 23.79166603088379,
"blob_id": "4ba8a6b253951a38c11fb0ebd5d6da69e0dbbb81",
"content_id": "61ffdba8452349ee296a70d05bd4d9932fc405ea",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1785,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 72,
"path": "/C05/ex07/ft_find_next_prime_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_find_next_prime.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/06 14:52:17 by ecaceres #+# #+# */\n/* Updated: 2019/08/06 14:52:19 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n#include <stdio.h>\n#include <limits.h>\n\nint\tft_sqrt(int nb)\n{\n\tunsigned int\t\tsqrt;\n\tunsigned int\t\tindex;\n\n\tif (nb < 0)\n\t\treturn (0);\n\tif (nb <= 1)\n\t\treturn (nb);\n\tindex = 0;\n\twhile ((sqrt = index * index) <= (unsigned int)nb)\n\t\tindex++;\n\tindex -= 1;\n\treturn (index);\n}\n\nint\tft_is_prime(int nb)\n{\n\tint\tindex;\n\tint\tsqrt;\n\n\tif (nb <= 1)\n\t\treturn (0);\n\tif (nb <= 3)\n\t\treturn (1);\n\tif (nb % 2 == 0 || nb % 3 == 0)\n\t\treturn (0);\n\tindex = 2;\n\tsqrt = ft_sqrt(nb);\n\twhile ((index <= sqrt) && (nb % index != 0))\n\t\tindex++;\n\treturn (index > sqrt);\n}\n\nint\tft_find_next_prime(int nb)\n{\n\twhile (true)\n\t{\n\t\tif (ft_is_prime(nb))\n\t\t\treturn (nb);\n\t\tnb++;\n\t}\n}\n\nint\tmain(void)\n{\n\tint\tindex;\n\n\tindex = -2;\n\twhile (index < 100)\n\t{\n\t\tprintf(\"find_next_prime(%d) = %d\\n\", index, ft_find_next_prime(index));\n\t\tindex++;\n\t}\n\tprintf(\"find_next_prime(%d) = %d\\n\", INT_MAX, ft_find_next_prime(INT_MAX));\n}\n"
},
{
"alpha_fraction": 0.28839921951293945,
"alphanum_fraction": 0.3130265772342682,
"avg_line_length": 29.860000610351562,
"blob_id": "97cbb69f1b85e370557afad7ecc8618460faec2b",
"content_id": "27bffc230bc83ee3a0c31c17b14ee5e66611c277",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1543,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 50,
"path": "/C12/ex07/ft_list_at_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_list_at.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/22 16:30:36 by ecaceres #+# #+# */\n/* Updated: 2019/08/22 16:30:36 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n\n#include \"ft_list.h\"\n\n#include <stdio.h>\n\nt_list\t*ft_list_at(t_list *begin_list, unsigned int nbr)\n{\n\tif (begin_list == 0)\n\t\treturn (NULL);\n\tif (nbr == 0)\n\t\treturn (begin_list);\n\treturn (ft_list_at(begin_list->next, nbr - 1));\n}\n\nint\t\tmain(void)\n{\n\tint\t\tindex;\n\tint\t\t*malloced_index;\n\tt_list\t*list;\n\n\tindex = 1;\n\tlist = ft_create_elem(0);\n\twhile (index < 10)\n\t{\n\t\tmalloced_index = malloc(sizeof(int));\n\t\t*malloced_index = index;\n\t\tft_list_push_back(&list, (void *)malloced_index);\n\t\tindex++;\n\t}\n\tindex = 1;\n\twhile (index < 10)\n\t{\n\t\tprintf(\"data at index %d is %d\\n\", index,\n\t\t\t*((int *)ft_list_at(list, index)->data));\n\t\tindex++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.6090302467346191,
"alphanum_fraction": 0.6198050379753113,
"avg_line_length": 19.73404312133789,
"blob_id": "4465244cd0e7041f39d30d2d6013dc2d07af580d",
"content_id": "726fd98f9c0899384a4bef28cef8bc0904f25db8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1949,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 94,
"path": "/C10/ex03/srcs/ft_file_utils.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <errno.h>\n#include <libgen.h>\n#include <stdlib.h>\n#include <fcntl.h>\n#include <string.h>\n#include <stdio.h>\n#include <unistd.h>\n\n#include \"ft_file_utils.h\"\n#include \"ft_console_io.h\"\n#include \"ft_args_parser.h\"\n#include \"ft_strncpy.h\"\n\n#define IS_STD(fd) (fd >= 0 && fd <= 2)\n#define HAS_ERR (errno != 0)\n\nint\t\tft_open_file(t_options *options)\n{\n\treturn (open(options->curr_path, O_RDONLY));\n}\n\nint\t\tft_close_file(int fd)\n{\n\treturn (close(fd));\n}\n\nbool\tft_show_error(t_options *options)\n{\n\tif (errno == 0)\n\t\treturn (false);\n\twrite_str_err(basename(options->executable));\n\twrite_str_err(\": \");\n\twrite_str_err(options->curr_path);\n\twrite_str_err(\": \");\n\twrite_str_err(strerror(errno));\n\twrite_str_err(\"\\n\");\n\terrno = 0;\n\treturn (true);\n}\n\nchar\t*ft_read_multiple(t_options *opts, int count, UINT *len, int total)\n{\n\tunsigned int\tok_read;\n\tunsigned int\told_length;\n\tchar\t\t\t*dest;\n\tint\t\t\t\tindex;\n\tint\t\t\t\tfd;\n\n\tok_read = 0;\n\tdest = NULL;\n\tindex = 0;\n\twhile (index < count)\n\t{\n\t\topts->curr_path = opts->paths[index];\n\t\tfd = ft_open_file(opts);\n\t\tok_read += errno == 0 ? 1 : 0;\n\t\tif (errno != 0)\n\t\t\tft_show_error(opts);\n\t\told_length = *len;\n\t\tdest = ft_extend_array(dest, ft_read_full(fd, len), old_length, len);\n\t\tif ((index == total - 1 && HAS_ERR && ok_read == 0) || errno == EISDIR)\n\t\t\tft_show_error(opts);\n\t\tindex++;\n\t\tft_close_file(fd);\n\t\terrno = 0;\n\t}\n\treturn (dest);\n}\n\nchar\t*ft_read_full(int fd, unsigned int *length)\n{\n\tunsigned int\ttotal_read;\n\tunsigned int\tbyte_read;\n\tchar\t\t\tbuffer[BUFFER_SIZE_1K];\n\tchar\t\t\t*old;\n\tchar\t\t\t*dest;\n\n\ttotal_read = 0;\n\tdest = malloc(0);\n\twhile ((byte_read = read(fd, buffer, BUFFER_SIZE_1K)) != 0)\n\t{\n\t\tif (errno != 0)\n\t\t\treturn (dest);\n\t\told = dest;\n\t\tif (!(dest = malloc((total_read + byte_read) * sizeof(char *))))\n\t\t\treturn (0);\n\t\tft_strncpy(dest, old, total_read);\n\t\tft_strncpy(dest + total_read, buffer, byte_read);\n\t\ttotal_read += byte_read;\n\t\tfree(old);\n\t}\n\t*length += total_read;\n\treturn (dest);\n}\n"
},
{
"alpha_fraction": 0.5869565010070801,
"alphanum_fraction": 0.5905796885490417,
"avg_line_length": 15.235294342041016,
"blob_id": "ddae8e9f8db8a91491de099920aca62ad8f48689",
"content_id": "40287ce210b05d44ebf79a8cbd98c60fc279cf97",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 276,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 17,
"path": "/C11/ex01/ft_map.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdlib.h>\n\nint\t*ft_map(int *tab, int length, int (*f)(int))\n{\n\tint\tindex;\n\tint\t*results;\n\n\tif (!(results = malloc(length * sizeof(int))))\n\t\treturn (NULL);\n\tindex = 0;\n\twhile (index < length)\n\t{\n\t\tresults[index] = (*f)(tab[index]);\n\t\tindex++;\n\t}\n\treturn (results);\n}\n"
},
{
"alpha_fraction": 0.678260862827301,
"alphanum_fraction": 0.678260862827301,
"avg_line_length": 15.428571701049805,
"blob_id": "00a44dcb668d4079caf8a389962834dc484cd943",
"content_id": "a661fe1c5b724794502ae4b77b6da80c650be2d3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 115,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 7,
"path": "/C11/ex05/includes/ft_str.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_STR_H\n# define FT_STR_H\n\nvoid\tft_str_write_to(int fd, char *str);\nvoid\tft_str_write(char *str);\n\n#endif\n"
},
{
"alpha_fraction": 0.6232557892799377,
"alphanum_fraction": 0.6279069781303406,
"avg_line_length": 14.357142448425293,
"blob_id": "57c86e1a9cef11d51e1b4123e681b98e8cc33e1e",
"content_id": "a3a92950c08c865bfa8072af320e9d1ebcec503b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 215,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 14,
"path": "/C10/ex02/srcs/ft_strncpy.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n\nchar\t*ft_str_sized_copy(char *dest, char *src, unsigned int src_size)\n{\n\tunsigned int\tindex;\n\n\tindex = 0;\n\twhile (index < src_size)\n\t{\n\t\tdest[index] = src[index];\n\t\tindex++;\n\t}\n\treturn (dest);\n}\n"
},
{
"alpha_fraction": 0.6800947785377502,
"alphanum_fraction": 0.6800947785377502,
"avg_line_length": 18.18181800842285,
"blob_id": "f57c1da68eb1e66b1e26fd7e6fa4d40a8f5f1e14",
"content_id": "1df4c365755b3a17bda42edc8492d157678097b2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 422,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 22,
"path": "/C10/ex03/includes/ft_args_parser.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_ARGS_PARSER_H\n# define FT_ARGS_PARSER_H\n\n# include <stdbool.h>\n\ntypedef struct\ts_options\n{\n\tchar\t*executable;\n\tchar\t**paths;\n\tchar\t*curr_path;\n\tint\t\tdisp_cont;\n}\t\t\t\tt_options;\n\nbool\t\t\tparse_args(char **args, int count);\n\nbool\t\t\tprocess_files(char **args, int count, int i, t_options *opts);\n\nvoid\t\t\tdisplay_usage(t_options *opt);\n\nvoid\t\t\tdisplay_error(t_options *opt, char *err, char *cse, bool limit);\n\n#endif\n"
},
{
"alpha_fraction": 0.6219780445098877,
"alphanum_fraction": 0.6329670548439026,
"avg_line_length": 16.5,
"blob_id": "e4ecffb776a9373e093f62460d8b727e85477b6e",
"content_id": "1b0467b74dbae6ffd1d6571a9a33916055842761",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 455,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 26,
"path": "/C10/ex00/srcs/main.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <fcntl.h>\n#include <stdbool.h>\n\n#include \"display_file.h\"\n\nbool\tparse_arguments(int argc, char **argv)\n{\n\tif (argc < 2)\n\t\tstr_write(ERR, MSG_ERR_NO_FILE);\n\telse if (argc > 2)\n\t\tstr_write(ERR, MSG_ERR_TOO_MANY_ARGS);\n\telse\n\t{\n\t\tif (!display_file(argv[1]))\n\t\t\tstr_write(ERR, MSG_ERR_CANNOT_READ);\n\t\telse\n\t\t\treturn (true);\n\t}\n\treturn (false);\n}\n\nint\t\tmain(int argc, char **argv)\n{\n\treturn (parse_arguments(argc, argv) ? 0 : 1);\n}\n"
},
{
"alpha_fraction": 0.5263158082962036,
"alphanum_fraction": 0.5789473652839661,
"avg_line_length": 10.399999618530273,
"blob_id": "49f6c9090cf14bb5ad6e086b23bc7f335a1833ff",
"content_id": "465b30513c665d487a7f5ceea7503e6690bcab62",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 57,
"license_type": "no_license",
"max_line_length": 19,
"num_lines": 5,
"path": "/Shell01/ex03/test/create_20_file.sh",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\nfor i in {1..20}\ndo\n touch \"dummy_$i\"\ndone\n"
},
{
"alpha_fraction": 0.5882353186607361,
"alphanum_fraction": 0.5980392098426819,
"avg_line_length": 10.333333015441895,
"blob_id": "c8f943e2062780dd2a23de5f200efd331a1adc95",
"content_id": "f4666c490841c1f653e8b00dca4bebfcd40e96a7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 102,
"license_type": "no_license",
"max_line_length": 24,
"num_lines": 9,
"path": "/C04/ex00/ft_strlen.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "int\tft_strlen(char *str)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (str[index])\n\t\tindex++;\n\treturn (index);\n}\n"
},
{
"alpha_fraction": 0.6000000238418579,
"alphanum_fraction": 0.6000000238418579,
"avg_line_length": 59,
"blob_id": "333831c8847947b11ab1859a4f9d6653a4bfc70f",
"content_id": "b7acaa2351fd711acc838d3b14857c30bb3dfa2c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 60,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 1,
"path": "/Shell00/ex06/git_ignore.sh",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "find . -exec git check-ignore {} + | xargs -I{} basename {}\n"
},
{
"alpha_fraction": 0.6086956262588501,
"alphanum_fraction": 0.6149068474769592,
"avg_line_length": 11.384614944458008,
"blob_id": "beb7e1b8c28b732ec946e70d817d2a4adf5b563b",
"content_id": "0facad29fa15015a814e5c636cbedc0a1ebc74ce",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 161,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 13,
"path": "/C11/ex05/srcs/ft_char_write.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <unistd.h>\n\n#include \"ft_io.h\"\n\nvoid\tft_char_write_to(int fd, char c)\n{\n\twrite(fd, &c, 1);\n}\n\nvoid\tft_char_write(char c)\n{\n\tft_char_write_to(OUT, c);\n}\n"
},
{
"alpha_fraction": 0.507569432258606,
"alphanum_fraction": 0.5202129483222961,
"avg_line_length": 18.327974319458008,
"blob_id": "ab5223a565058ee460947126cf731f6a97e5c951",
"content_id": "66a9a053b298827393cdf0f8856d2b9c010380a5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 6011,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 311,
"path": "/ExamFinal/work-done/str_maxlenoc/str_maxlenoc.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* str_maxlenoc.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: exam <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/23 13:51:29 by exam #+# #+# */\n/* Updated: 2019/08/23 14:51:43 by exam ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <unistd.h>\n#include <stdlib.h>\n#include <stdbool.h>\n#include <limits.h>\n\nint\t\tft_str_length(char *str)\n{\n\tint\tlength;\n\n\tlength = 0;\n\twhile (str[length])\n\t\tlength++;\n\treturn (length);\n}\n\nvoid\tft_str_write(char *str)\n{\n\twhile (*str)\n\t\twrite(1, str++, 1);\n}\n\nchar\t*ft_str_n_duplicate(char *str, int n)\n{\n\tint\t\tindex;\n\tchar\t*duplicate;\n\n\tif (!(duplicate = (char *)malloc((n + 1) * sizeof(char))))\n\t\treturn (NULL);\n\tindex = 0;\n\twhile (str[index] && index < n)\n\t{\n\t\tduplicate[index] = str[index];\n\t\tindex++;\n\t}\n\twhile (index < n + 1)\n\t{\n\t\tduplicate[index] = '\\0';\n\t\tindex++;\n\t}\n\treturn (duplicate);\n}\n\nint\tx_cmp_n_size(char *find, char *where, int n)\n{\n\tint\t\tindex;\n\n\tindex = 0;\n\twhile (find[index] == where[index] && find[index] && n--)\n\t\tindex++;\n\tif (find[index] != '\\0')\n\t\treturn (0);\n\treturn (1);\n}\n\nint\tft_compute_score(char *find, char **strs, int strs_size)\n{\n\tint\t\tindex;\n\tint\t\tsize;\n\tint\t\toffset;\n\tint\t\tend_offset;\n\tchar\t*str;\n\tint\tscore;\n\tint\tcurr_biggest_score;\n\tint\tbiggest_score;\n\n\tindex = 0;\n\tbiggest_score = -1;\n\twhile (index < strs_size)\n\t{\n\t\tstr = strs[index];\n\t\tsize = ft_str_length(str);\n\t\toffset = 0;\n\t\tcurr_biggest_score = -1;\n\t\twhile (offset < size)\n\t\t{\n\t\t\tend_offset = size;\n\t\t\twhile (end_offset > offset)\n\t\t\t{\n\t\t\t\tscore = x_cmp_n_size(find, str + offset, end_offset - offset + 1);\n\t\t\t\tif (score > curr_biggest_score)\n\t\t\t\t\tcurr_biggest_score += score;\n\t\t\t\tend_offset--;\n\t\t\t}\n\t\t\toffset++;\n\t\t}\n\t\tbiggest_score += curr_biggest_score;\n\t\tindex++;\n\t}\n\treturn (biggest_score);\n}\n\nvoid\tft_do_on_all_comb(char **strs, int strs_size, char **copies, int *scores)\n{\n\tint\t\tsize;\n\tint\t\toffset;\n\tint\t\tend_offset;\n\tint\t\tjndex;\n\n\tsize = ft_str_length(strs[0]);\n\toffset = 0;\n\tjndex = 0;\n\twhile (offset < size)\n\t{\n\t\tend_offset = size;\n\t\twhile (end_offset > offset)\n\t\t{\n\t\t\tcopies[jndex] = ft_str_n_duplicate(strs[0] + offset, end_offset - offset);\n\t\t\tscores[jndex] = ft_compute_score(copies[jndex], strs + 1, strs_size - 1);\n\t\t\tend_offset--;\n\t\t\tjndex++;\n\t\t}\n\t\toffset++;\n\t}\n}\n\nint\tft_count_possibility(char *str)\n{\n\tint\t\tcount;\n\tint\t\tsize;\n\tint\t\toffset;\n\tint\t\tend_offset;\n\n\tcount = 0;\n\tsize = ft_str_length(str);\n\toffset = 0;\n\twhile (offset < size)\n\t{\n\t\tend_offset = size;\n\t\twhile (end_offset-- > offset)\n\t\t\tcount++;\n\t\toffset++;\n\t}\n\treturn (count);\n}\n\nvoid\tft_swap_int(int *a, int *b)\n{\n\tint c;\n\n\tc = *a;\n\t*a = *b;\n\t*b = c;\n}\n\nvoid ft_swap_str(char **a, char **b)\n{\n\tchar *c;\n\n\tc = *a;\n\t*a = *b;\n\t*b = c;\n}\n\nvoid\tbubble_sort(int size, char **copies, int *scores)\n{\n\tint\t\tindex;\n\tint\t\tjndex;\n\tbool\tswapped;\n\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tswapped = false;\n\t\tjndex = index + 1;\n\t\twhile (jndex < size)\n\t\t{\n\t\t\tif (scores[index] < scores[jndex])\n\t\t\t{\n\t\t\t\tft_swap_int(&(scores[index]), &(scores[jndex]));\n\t\t\t\tft_swap_str(&(copies[index]), &(copies[jndex]));\n\t\t\t\tswapped = true;\n\t\t\t}\n\t\t\tjndex++;\n\t\t}\n\t\tindex++;\n\t\tif (!swapped)\n\t\t\tbreak ;\n\t}\n}\n\nbool\tis_fitting_on_everyone(char *what, char **strs, int size)\n{\n\tint\t\tindex;\n\tchar\t*haystack;\n\tchar\t*needle;\n\n\tif (ft_str_length(what) == 0)\n\t\treturn (true);\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tneedle = what;\n\t\thaystack = strs[index];\n\t\tif (ft_str_length(haystack) != 0)\n\t\t{\n\t\t\twhile (true)\n\t\t\t{\n\t\t\t\tif (*needle == '\\0')\n\t\t\t\t\tbreak ;\n\t\t\t\tif (*needle == *haystack)\n\t\t\t\t\tneedle++;\n\t\t\t\telse\n\t\t\t\t\tneedle = what;\n\t\t\t\tif (*haystack == '\\0')\n\t\t\t\t\treturn (false);\n\t\t\t\thaystack++;\n\t\t\t}\n\t\t}\n\t\tindex++;\n\t}\n\treturn (true);\n}\n\nvoid\tprint_best(int size, char **copies, char **originals, int originals_size, int *scores)\n{\n\tint\tindex;\n\tint\tjndex;\n\tint\tvalid_score;\n\tint\tvalidated;\n\tchar\t**final_copies;\n\tint\tlongest;\n\n\tindex = 0;\n\tjndex = 0;\n\tvalidated = 0;\n\tvalid_score = INT_MAX;\n\tif (!(final_copies = (char **)malloc(size * sizeof(char *))))\n\t\treturn ;\n\twhile (index < size)\n\t{\n\t\tif (is_fitting_on_everyone(copies[index], originals, originals_size))\n\t\t{\n\t\t\tif (valid_score == INT_MAX)\n\t\t\t\tvalid_score = scores[index];\n\t\t\tif (scores[index] != valid_score)\n\t\t\t\tbreak ;\n\t\t\tfinal_copies[jndex] = copies[index];\n\t\t\tvalidated++;\n\t\t\tjndex++;\n\t\t}\n\t\tindex++;\n\t}\n\tif (validated == 0)\n\t\treturn ;\n\tindex = 0;\n\tlongest = -1;\n\tjndex = -1;\n\twhile (index < validated)\n\t{\n\t\tif (longest < ft_str_length(final_copies[index]))\n\t\t{\n\t\t\tlongest = ft_str_length(final_copies[index]);\n\t\t\tjndex = index;\n\t\t}\n\t\tindex++;\n\t}\n\tif (jndex == -1)\n\t\treturn ;\n\tft_str_write(final_copies[jndex]);\n\tfree(final_copies);\n}\n\nvoid\tclear_results(int size, char **copies, int *scores)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tfree(copies[index]);\n\t\tindex++;\n\t}\n\tfree(copies);\n\tfree(scores);\n}\n\nvoid\tft_str_maxlenoc(char **strs, int size)\n{\n\tint\t\tpossibilities;\n\tchar\t**copies;\n\tint\t\t*scores;\n\n\tpossibilities = ft_count_possibility(strs[0]);\n\tcopies = (char **)malloc(possibilities * sizeof(char *));\n\tscores = (int *)malloc(possibilities * sizeof(int));\n\tif (!scores || !copies)\n\t\treturn ;\n\tft_do_on_all_comb(strs, size, copies, scores);\n\tbubble_sort(possibilities, copies, scores);\n\tprint_best(possibilities, copies, strs + 1, size - 1, scores);\n\tclear_results(possibilities, copies, scores);\n}\n\nint\t\tmain(int argc, char **argv)\n{\n\tif (argc > 1)\n\t\tft_str_maxlenoc(argv + 1, argc - 1);\n\twrite(1, \"\\n\", 1);\n}\n"
},
{
"alpha_fraction": 0.6093777418136597,
"alphanum_fraction": 0.6164286732673645,
"avg_line_length": 51.53703689575195,
"blob_id": "dd112f899da83717e778135753e4ad536a0c4c5c",
"content_id": "1fdb7be26253be33c6be23ed2be9b3a7840fc204",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 5673,
"license_type": "no_license",
"max_line_length": 223,
"num_lines": 108,
"path": "/C10/ex03/Makefile",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "C_YELLOW_B=\\033[1;33m\nC_LIGHT_RED_B=\\033[0;91m\nC_WHITE=\\033[0;97m\nC_RESET=\\033[0;39m\n\nSRCS\t= ./srcs/ft_args_parser.c ./srcs/ft_console_io.c ./srcs/ft_dumper.c ./srcs/ft_equal_buffer.c ./srcs/ft_file_utils.c ./srcs/ft_file_utils2.c ./srcs/ft_hexdump.c ./srcs/ft_strncpy.c ./srcs/ft_write_number.c ./srcs/main.c\nOBJS\t= ${SRCS:.c=.o}\nINCS\t= includes\nNAME\t= ft_hexdump\nCC\t\t= gcc\nRM\t\t= rm -f\nCFLAGS\t= -Wall -Wextra -Werror\n\n.c.o :\n\t${CC} ${CFLAGS} -c $< -o ${<:.c=.o} -I${INCS}\n\n${NAME} : ${OBJS}\n\t${CC} ${CFLAGS} ${OBJS} -o ${NAME}\n\nall : ${NAME}\n\nclean :\n\t${RM} ${OBJS}\n\nfclean : clean\n\t${RM} ${NAME}\n\nnorm :\n\tnorminette -R CheckForbiddenSourceHeader */*.[ch]\n\t\ntest : re\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)Single file: ./Makefile$(C_RESET)\" ;\n\t@hexdump ./Makefile | cat -e > result-off.txt ;\n\t@./ft_hexdump ./Makefile | cat -e > result-usr.txt ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)Single file with -C: ./Makefile$(C_RESET)\" ;\n\t@hexdump -C ./Makefile | cat -e > result-off.txt ;\n\t@./ft_hexdump -C ./Makefile | cat -e > result-usr.txt ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)Single file with -C -C: ./Makefile$(C_RESET)\" ;\n\t@hexdump -C -C ./Makefile | cat -e > result-off.txt ;\n\t@./ft_hexdump -C -C ./Makefile | cat -e > result-usr.txt ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)STDIN: x < ./Makefile$(C_RESET)\" ;\n\t@hexdump < ./Makefile | cat -e > result-off.txt ;\n\t@./ft_hexdump < ./Makefile | cat -e > result-usr.txt ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)Multiple file: ./Makefile ./Makefile$(C_RESET)\" ;\n\t@hexdump ./Makefile ./Makefile | cat -e > result-off.txt ;\n\t@./ft_hexdump ./Makefile ./Makefile | cat -e > result-usr.txt ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)Multiple file with -C: ./Makefile ./Makefile$(C_RESET)\" ;\n\t@hexdump -C ./Makefile ./Makefile | cat -e > result-off.txt ;\n\t@./ft_hexdump -C ./Makefile ./Makefile | cat -e > result-usr.txt ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)Multiple file with -C -C: ./Makefile ./Makefile$(C_RESET)\" ;\n\t@hexdump -C -C ./Makefile ./Makefile | cat -e > result-off.txt ;\n\t@./ft_hexdump -C -C ./Makefile ./Makefile | cat -e > result-usr.txt ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)Invalid file: ./a$(C_RESET)\" ;\n\t@hexdump -C -C ./a > out 2>result-off.txt || true ;\n\t@./ft_hexdump -C -C ./a > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)Valid and invalid file: ./Makefile ./a ./Makefile$(C_RESET)\" ;\n\t@hexdump ./Makefile ./a ./Makefile > out 2>result-off.txt || true ;\n\t@./ft_hexdump ./Makefile ./a ./Makefile > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)Only invalid file: ./a ./b ./c$(C_RESET)\" ;\n\t@hexdump ./a ./b ./c > out 2>result-off.txt || true ;\n\t@./ft_hexdump ./a ./b ./c > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)On directory: ./srcs/$(C_RESET)\" ;\n\t@hexdump ./srcs/ > out 2>result-off.txt || true ;\n\t@./ft_hexdump ./srcs/ > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)On 2 directory: ./srcs/ ./srcs/$(C_RESET)\" ;\n\t@hexdump ./srcs/ ./srcs/ > out 2>result-off.txt || true ;\n\t@./ft_hexdump ./srcs/ ./srcs/ > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)Illegal argument: -a$(C_RESET)\" ;\n\t@hexdump -a > out 2>result-off.txt || true ;\n\t@./ft_hexdump -a > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)Read only file: read_only.txt$(C_RESET)\" ;\n\t@echo \"Hey! This file is supposed to not be redeable...\" > read_only.txt\n\t@chmod -xrw read_only.txt\n\t@hexdump read_only.txt > out 2>result-off.txt || true ;\n\t@./ft_hexdump read_only.txt > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@rm -f result-off.txt result-usr.txt out read_only.txt;\n\nre : fclean all\n\n.PHONY: all clean fclean re .c.o test"
},
{
"alpha_fraction": 0.6902654767036438,
"alphanum_fraction": 0.6902654767036438,
"avg_line_length": 15.142857551574707,
"blob_id": "fa53d51baa6a68e7a11d081b41d05947fa6598fe",
"content_id": "cc31f50ed5746b998c9acb745c54ace6a0d13d1c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 113,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 7,
"path": "/C11/ex05/includes/ft_char.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_CHAR_H\n# define FT_CHAR_H\n\nvoid\tft_char_write_to(int fd, char c);\nvoid\tft_char_write(char c);\n\n#endif\n"
},
{
"alpha_fraction": 0.25668835639953613,
"alphanum_fraction": 0.2812725901603699,
"avg_line_length": 33.57500076293945,
"blob_id": "2841bfb84be897d1c6250e74c2d460b6e66acfcf",
"content_id": "e404240a1814c0d209d2645dc873b213cb7bd9e4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1383,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 40,
"path": "/C12/ex05/ft_list_push_strs_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_list_push_strs.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/22 15:00:46 by ecaceres #+# #+# */\n/* Updated: 2019/08/22 15:00:46 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"ft_list.h\"\n\n#include <stdio.h>\n\nt_list\t*ft_list_push_strs(int size, char **strs)\n{\n\tt_list\t*list;\n\n\tif (size == 0)\n\t\treturn (0);\n\tlist = ft_create_elem((void *)strs[size - 1]);\n\tlist->next = ft_list_push_strs(size - 1, strs);\n\treturn (list);\n}\n\nint\t\tmain(int argc, char **argv)\n{\n\tint\t\tindex;\n\tt_list\t*current;\n\n\tindex = 0;\n\tcurrent = ft_list_push_strs(argc, argv);\n\twhile (current != 0)\n\t{\n\t\tprintf(\"list[%d] = $%s$\\n\", index++, (char *)current->data);\n\t\tcurrent = current->next;\n\t}\n}\n"
},
{
"alpha_fraction": 0.4647058844566345,
"alphanum_fraction": 0.5117647051811218,
"avg_line_length": 27.33333396911621,
"blob_id": "66f652afb08843f6775ff239e558eed62e36ca7c",
"content_id": "a6b052727e5ced2ab74a694f4d2ba4c407bb7415",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 170,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 6,
"path": "/C11/ex03/ft_count_if.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "int\tft_count_if(char **tab, int length, int (*f)(char*))\n{\n\tif (length == 0)\n\t\treturn (0);\n\treturn (((*f)(tab[0]) != 0 ? 1 : 0) + ft_count_if(tab + 1, length - 1, f));\n}\n"
},
{
"alpha_fraction": 0.29768604040145874,
"alphanum_fraction": 0.31832394003868103,
"avg_line_length": 25.649999618530273,
"blob_id": "fef9b21a2d004032def60a53b0ab105e734cc70e",
"content_id": "714b37ccb38e5c347b83938c5fbb04d4b21c167e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1599,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 60,
"path": "/C11/ex01/ft_map_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_map.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/16 11:22:59 by ecaceres #+# #+# */\n/* Updated: 2019/08/16 11:22:59 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdio.h>\n\n#include <stdlib.h>\n\nint\t*ft_map(int *tab, int length, int (*f)(int))\n{\n\tint\tindex;\n\tint\t*results;\n\n\tif (!(results = malloc(length * sizeof(int))))\n\t\treturn (NULL);\n\tindex = 0;\n\twhile (index < length)\n\t{\n\t\tresults[index] = (*f)(tab[index]);\n\t\tindex++;\n\t}\n\treturn (results);\n}\n\nint\tft_square(int number)\n{\n\treturn (number * number);\n}\n\nint\tmain(void)\n{\n\tint\tindex;\n\tint\t*tab;\n\tint\tlength;\n\tint\t*results;\n\n\tlength = 10;\n\ttab = malloc(length * sizeof(int));\n\tindex = 0;\n\twhile (index < length)\n\t{\n\t\ttab[index] = index;\n\t\tindex++;\n\t}\n\tresults = ft_map(tab, length, &ft_square);\n\tindex = 0;\n\twhile (index < length)\n\t{\n\t\tprintf(\"[mapped] square(%d) = %d\\n\", index, results[index]);\n\t\tindex++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.2995080053806305,
"alphanum_fraction": 0.3198032081127167,
"avg_line_length": 28.563636779785156,
"blob_id": "938af2b17042a7a1641a8e50998dc4e186aeb093",
"content_id": "09975430d3c58c55fb10df9772c2410e6a63dd5c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1626,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 55,
"path": "/C07/ex00/ft_strdup_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_strdup.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/07 17:06:22 by ecaceres #+# #+# */\n/* Updated: 2019/08/07 17:06:23 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n#include <stdio.h>\n#include <string.h>\n\nint\t\tft_str_length(char *str)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (str[index])\n\t\tindex++;\n\treturn (index);\n}\n\nchar\t*ft_strdup(char *src)\n{\n\tint\t\tindex;\n\tchar\t*dest;\n\n\tindex = 0;\n\tif ((dest = (char *)malloc(ft_str_length(src) * sizeof(char) + 1)) == NULL)\n\t\treturn (0);\n\twhile (src[index])\n\t{\n\t\tdest[index] = src[index];\n\t\tindex++;\n\t}\n\tdest[index] = '\\0';\n\treturn (dest);\n}\n\nint\t\tmain(void)\n{\n\tchar\t*str;\n\tchar\t*allocated;\n\n\tstr = \"Hello World with malloc()\";\n\tprintf(\"x : base : $%s$ @ %p\\n\", str, str);\n\tallocated = strdup(str);\n\tprintf(\"c : alloc : $%s$ @ %p\\n\", allocated, allocated);\n\tallocated = ft_strdup(str);\n\tprintf(\"ft : alloc : $%s$ @ %p\\n\", allocated, allocated);\n}\n"
},
{
"alpha_fraction": 0.40918728709220886,
"alphanum_fraction": 0.45265018939971924,
"avg_line_length": 28.17525863647461,
"blob_id": "cc49d459599e3e3be3628cbf79aa3e04da8b9743",
"content_id": "70508aad841931e5051037c0621659abc4b59f3a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2830,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 97,
"path": "/C07/ex04/dev/ft_convert_base.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_convert_base.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/08 13:25:01 by ecaceres #+# #+# */\n/* Updated: 2019/08/08 17:54:01 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n#include <stdlib.h>\n#include <stdio.h>\n#include <unistd.h>\n\n#define NO_MATCH -1\n\nint\t\tft_str_length(char *str);\nint\t\tcompute_number_length(unsigned int number, int radix, bool negative);\nbool\tis_space(char c);\nbool\tis_base_valid(char *str);\n\nint\t\tresolve_base(char *base, char match)\n{\n\tint\t\tindex;\n\n\tindex = 0;\n\twhile (base[index])\n\t{\n\t\tif (base[index] == match)\n\t\t\treturn (index);\n\t\tindex++;\n\t}\n\treturn (NO_MATCH);\n}\n\nchar\t*ft_buffer_base(char *base, unsigned int number, bool negative)\n{\n\tint\t\tindex;\n\tint\t\tradix;\n\tint\t\tlength;\n\tchar\t*string;\n\n\tradix = ft_str_length(base);\n\tlength = compute_number_length(number, radix, negative);\n\tif (!(string = (char *)malloc((length + 1) * sizeof(char))))\n\t\treturn (0);\n\tif (negative)\n\t\tstring[0] = '-';\n\tindex = negative ? 1 : 0;\n\twhile (index < length)\n\t{\n\t\tstring[length - (!negative) - index++] = base[number % radix];\n\t\tnumber /= radix;\n\t}\n\tstring[length] = '\\0';\n\treturn (string);\n}\n\nchar\t*ft_convert_base(char *nbr, char *base_from, char *base_to)\n{\n\tint\t\t\t\tradix;\n\tint\t\t\t\tminus;\n\tint\t\t\t\tresolved;\n\tunsigned int\tresult;\n\n\tif (!is_base_valid(base_from) || !is_base_valid(base_to))\n\t\treturn (NULL);\n\tradix = ft_str_length(base_from);\n\tresult = 0;\n\tminus = 1;\n\twhile (is_space(*nbr))\n\t\tnbr++;\n\twhile (*nbr == '+' || *nbr == '-')\n\t{\n\t\tif (*(nbr++) == '-')\n\t\t\tminus *= -1;\n\t}\n\twhile ((resolved = resolve_base(base_from, *nbr)) != NO_MATCH)\n\t{\n\t\tresult *= radix;\n\t\tresult += resolved;\n\t\tnbr++;\n\t}\n\tminus = result == 0 ? 1 : minus;\n\treturn (ft_buffer_base(base_to, result, (minus > 0 ? false : true)));\n}\n\nint\t\tmain(void)\n{\n\tprintf(\"result : $%s$\\n\", ft_convert_base(\"2147483647\", \"0123456789\", \"0123456789abcdef\"));\n\tprintf(\"result : $%s$\\n\", ft_convert_base(\"---------7fffffff\", \"0123456789abcdef\", \"01\"));\n\tprintf(\"result : $%s$\\n\", ft_convert_base(\"---+--0001023a\", \"0123456789\", \"0123456789\"));\n\tprintf(\"result : $%s$\\n\", ft_convert_base(\"-0\", \"0123456789\", \"abcdefghij\"));\n}\n"
},
{
"alpha_fraction": 0.682539701461792,
"alphanum_fraction": 0.682539701461792,
"avg_line_length": 14.75,
"blob_id": "b150adb257d77c747b80dbd3432b02f7d46fd6a6",
"content_id": "377a28433d5658fcd80fe7dce9c9f17743b132fb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 126,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 8,
"path": "/C10/ex03/includes/ft_equal.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_EQUAL_H\n# define FT_EQUAL_H\n\n# include <stdbool.h>\n\nbool\tft_is_equal(char *a, char *b, unsigned int size);\n\n#endif\n"
},
{
"alpha_fraction": 0.31439393758773804,
"alphanum_fraction": 0.33522728085517883,
"avg_line_length": 31.32653045654297,
"blob_id": "d874226a937c54f62e1256f608daf4fb07ed168e",
"content_id": "559dbbf109256593819372195aee15fd002c4c09",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1584,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 49,
"path": "/C12/ex06/ft_list_clear_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_list_clear.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/22 15:15:11 by ecaceres #+# #+# */\n/* Updated: 2019/08/22 15:15:11 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n\n#include \"ft_list.h\"\n\n#include <stdio.h>\n\nvoid\tft_list_clear(t_list *begin_list, void (*free_fct)(void *))\n{\n\tif (begin_list->next != 0)\n\t\tft_list_clear(begin_list->next, free_fct);\n\t(*free_fct)(begin_list->data);\n\tfree(begin_list);\n}\n\nvoid\tprint_element(void *element)\n{\n\tprintf(\"element:: %d\\n\", *((int *)element));\n\tfree(element);\n}\n\nint\t\tmain(void)\n{\n\tint\t\t*index;\n\tint\t\t*malloced_index;\n\tt_list\t*list;\n\n\tindex = malloc(sizeof(int));\n\t*index = 0;\n\tlist = ft_create_elem(index);\n\twhile ((*index += 1) < 10)\n\t{\n\t\tmalloced_index = malloc(sizeof(int));\n\t\t*malloced_index = *index;\n\t\tft_list_push_back(&list, (void *)malloced_index);\n\t}\n\tft_list_clear(list, &print_element);\n}\n"
},
{
"alpha_fraction": 0.6097561120986938,
"alphanum_fraction": 0.6219512224197388,
"avg_line_length": 13.909090995788574,
"blob_id": "f0a8af90365112ce3e7c4c9a720ef930a355a1ca",
"content_id": "b2dc101420e7d1b10800955ffb0127f5dca54e12",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 164,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 11,
"path": "/C10/ex02/srcs/main.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n\n#include \"ft_tail.h\"\n#include \"ft_args_parser.h\"\n\nint\t\tmain(int argc, char **argv)\n{\n\tif (!parse_args(argv, argc))\n\t\treturn (1);\n\treturn (0);\n}\n"
},
{
"alpha_fraction": 0.5251798629760742,
"alphanum_fraction": 0.5539568066596985,
"avg_line_length": 18.85714340209961,
"blob_id": "c061b5fefd3c532967abc13090c6c9d5cd1cecd6",
"content_id": "261a1078a7440bb0355de0e5df540b5a0bac5c64",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 139,
"license_type": "no_license",
"max_line_length": 53,
"num_lines": 7,
"path": "/C11/ex00/ft_foreach.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "void\tft_foreach(int *tab, int length, void (*f)(int))\n{\n\tif (length <= 0)\n\t\treturn ;\n\t(*f)(tab[0]);\n\tft_foreach(tab + 1, length - 1, f);\n}\n"
},
{
"alpha_fraction": 0.5607476830482483,
"alphanum_fraction": 0.573208749294281,
"avg_line_length": 14.285714149475098,
"blob_id": "bf0ec589703fae6398b9dcc6268c0a95642221ec",
"content_id": "c430ff0cdbcd1110ae843a238ad509367a4c556d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 321,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 21,
"path": "/C07/ex01/ft_range.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdlib.h>\n\nint\t\t*ft_range(int min, int max)\n{\n\tint\trange;\n\tint\tindex;\n\tint\t*buffer;\n\n\tif (min >= max)\n\t\treturn (0);\n\trange = max - min - 1;\n\tif ((buffer = malloc(range * sizeof(int))) == NULL)\n\t\treturn (0);\n\tindex = 0;\n\twhile (index <= range)\n\t{\n\t\tbuffer[index] = min + index;\n\t\tindex++;\n\t}\n\treturn (buffer);\n}\n"
},
{
"alpha_fraction": 0.5308057069778442,
"alphanum_fraction": 0.549763023853302,
"avg_line_length": 14.071428298950195,
"blob_id": "f40c34eaa95045430640d378826b43abeb3fb388",
"content_id": "3acb87d0a3263ba169e925e0e834a88c92ce703c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 211,
"license_type": "no_license",
"max_line_length": 39,
"num_lines": 14,
"path": "/C01/ex07/ft_rev_int_tab.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "void\tft_rev_int_tab(int *tab, int size)\n{\n\tint index;\n\tint tmp;\n\n\tindex = 0;\n\twhile (index < size / 2)\n\t{\n\t\ttmp = tab[index];\n\t\ttab[index] = tab[size - 1 - index];\n\t\ttab[size - 1 - index] = tmp;\n\t\tindex++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.3031880259513855,
"alphanum_fraction": 0.32465842366218567,
"avg_line_length": 31.70212745666504,
"blob_id": "fdcf053bc77de975d8702f5668f56f960c9323b2",
"content_id": "261fb5565ee40e19e8102a9889e4e3824cf0d48e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1537,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 47,
"path": "/C12/ex09/ft_list_foreach_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_list_foreach.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/22 17:38:24 by ecaceres #+# #+# */\n/* Updated: 2019/08/22 17:38:24 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"ft_list.h\"\n\n#include <stdlib.h>\n#include <stdio.h>\n\nvoid\tft_list_foreach(t_list *begin_list, void (*f)(void *))\n{\n\tif (begin_list == 0)\n\t\treturn ;\n\t(*f)(begin_list->data);\n\tft_list_foreach(begin_list->next, f);\n}\n\nvoid\tprint_element(void *element)\n{\n\tprintf(\"element:: %d\\n\", *((int *)element));\n}\n\nint\t\tmain(void)\n{\n\tint\t\t*index;\n\tint\t\t*malloced_index;\n\tt_list\t*list;\n\n\tindex = malloc(sizeof(int));\n\t*index = 0;\n\tlist = ft_create_elem(index);\n\twhile ((*index += 1) < 10)\n\t{\n\t\tmalloced_index = malloc(sizeof(int));\n\t\t*malloced_index = *index;\n\t\tft_list_push_back(&list, (void *)malloced_index);\n\t}\n\tft_list_foreach(list, &print_element);\n}\n"
},
{
"alpha_fraction": 0.49358972907066345,
"alphanum_fraction": 0.5256410241127014,
"avg_line_length": 11,
"blob_id": "4474e35fbcf01294f20dc62256a34a3768178b48",
"content_id": "7d73b00f1e7103c7785348d4d6612b4bba38e84a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 156,
"license_type": "no_license",
"max_line_length": 31,
"num_lines": 13,
"path": "/C10/ex01/srcs/main.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include \"cat.h\"\n\nint\tmain(int argc, char **argv)\n{\n\tif (argc < 2)\n\t\tstdin_loop();\n\telse\n\t{\n\t\tif (!cat(argv, argc - 1, 1))\n\t\t\treturn (1);\n\t}\n\treturn (0);\n}\n"
},
{
"alpha_fraction": 0.6768559217453003,
"alphanum_fraction": 0.6790392994880676,
"avg_line_length": 18.913043975830078,
"blob_id": "5c9b549e284aeb2e7efbba9bc82663071789a21c",
"content_id": "c95f462ff7d9b2b7a34e4154319a4df9d233e58a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 458,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 23,
"path": "/C10/ex02/includes/ft_args_parser.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_ARGS_PARSER_H\n# define FT_ARGS_PARSER_H\n\n# include <stdbool.h>\n\n# define NOTHING -1\n\ntypedef struct\ts_options\n{\n\tint\tbyte_to_read;\n}\t\t\t\tt_options;\n\nbool\t\t\tparse_args(char **args, int count);\n\nbool\t\t\tprocess_tail(char **args, int count, int i, t_options *opts);\n\nbool\t\t\tproc_option(char **args, int count, int *i, t_options *opts);\n\nvoid\t\t\tdisplay_usage(char *exec);\n\nvoid\t\t\tdisplay_error(char *exec, char *error, char *cause, bool limit);\n\n#endif\n"
},
{
"alpha_fraction": 0.5162355303764343,
"alphanum_fraction": 0.5514584183692932,
"avg_line_length": 26.953845977783203,
"blob_id": "44f81ec20b2043ac9a0d661033a1fc49bc871ea5",
"content_id": "1424f2bdbd834d5030a049aaa4b51a839c03e7dc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 1817,
"license_type": "no_license",
"max_line_length": 144,
"num_lines": 65,
"path": "/C11/ex05/Makefile",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "C_YELLOW_B=\\033[1;33m\nC_LIGHT_RED_B=\\033[0;91m\nC_WHITE=\\033[0;97m\nC_RESET=\\033[0;39m\n\nSRCS\t= ./srcs/ft_atoi.c ./srcs/ft_char_write.c ./srcs/ft_is.c ./srcs/ft_number_write.c ./srcs/ft_operation.c ./srcs/ft_str_write.c ./srcs/main.c\nOBJS\t= ${SRCS:.c=.o}\nINCS\t= includes\nNAME\t= do-op\nCC\t\t= gcc\nRM\t\t= rm -f\nCFLAGS\t= -Wall -Wextra -Werror\n\n.c.o :\n\t${CC} ${CFLAGS} -c $< -o ${<:.c=.o} -I${INCS}\n\n${NAME} : ${OBJS}\n\t${CC} ${CFLAGS} ${OBJS} -o ${NAME}\n\nall : ${NAME}\n\nclean :\n\t${RM} ${OBJS}\n\nfclean : clean\n\t${RM} ${NAME}\n\nnorm :\n\tnorminette */*.[ch]\n\nfind_sources :\n\t@find srcs -type f -name \"*.c\" | xargs -I{} echo ./{} | tr '\\n' ' '\n\t\ntest : re\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing with empty$(C_RESET)\" ;\n\t@./${NAME} | cat -e ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing with: 1 + 1$(C_RESET)\" ;\n\t@./${NAME} 1 + 1 | cat -e ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing with: 42amis - --+-20toto12$(C_RESET)\" ;\n\t@./${NAME} 42amis - --+-20toto12 | cat -e ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing with: 1 p 1$(C_RESET)\" ;\n\t@./${NAME} 1 p 1 | cat -e ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing with: 1 + toto3$(C_RESET)\" ;\n\t@./${NAME} 1 + toto3 | cat -e ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing with: toto3 + 4$(C_RESET)\" ;\n\t@./${NAME} toto3 + 4 | cat -e ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing with: foo plus bar$(C_RESET)\" ;\n\t@./${NAME} foo plus bar | cat -e ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing with: 25 / 0$(C_RESET)\" ;\n\t@./${NAME} 25 / 0 | cat -e ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing with: 25 % 0$(C_RESET)\" ;\n\t@./${NAME} 25 % 0 | cat -e ;\n\t@##\n\nre : fclean all\n\n.PHONY: all clean fclean re .c.o test\n"
},
{
"alpha_fraction": 0.4188481569290161,
"alphanum_fraction": 0.4365686774253845,
"avg_line_length": 25.414894104003906,
"blob_id": "ea1fbcbbdc9497daf62a8bdac8a65a4cab54e3dd",
"content_id": "ff3edf7d3f2f452aa380ce40e342c04284f8babb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2483,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 94,
"path": "/Rush01/ex00/solver.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* solver.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/10 12:53:11 by ecaceres #+# #+# */\n/* Updated: 2019/08/10 12:53:15 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n\n#include \"solver.h\"\n#include \"boolean.h\"\n#include \"constraint.h\"\n#include \"checker.h\"\n#include \"bruteforce_search.h\"\n#include \"pretty_print.h\"\n\nint\t\t**create_grid(int size)\n{\n\tint\tindex;\n\tint\tjndex;\n\tint\t**grid;\n\n\tgrid = (int **)malloc((size + 1) * sizeof(int *));\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tgrid[index] = (int *)malloc((size + 1) * sizeof(int));\n\t\tjndex = 0;\n\t\twhile (jndex < size + 1)\n\t\t{\n\t\t\tgrid[index][jndex] = 0;\n\t\t\tjndex++;\n\t\t}\n\t\tindex++;\n\t}\n\tgrid[size] = 0;\n\treturn (grid);\n}\n\nvoid\tfree_grid(int **grid, int size)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tfree(grid[index]);\n\t\tindex++;\n\t}\n\tfree(grid);\n}\n\nt_bool\tvalidate_constraint(int size, t_constr cstr[4])\n{\n\tint\tmax;\n\tint\tindex;\n\n\tmax = size + 1;\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tif ((cstr[UP].v[index] + cstr[DOWN].v[index]) > max)\n\t\t\treturn (false);\n\t\tif ((cstr[LEFT].v[index] + cstr[RIGHT].v[index]) > max)\n\t\t\treturn (false);\n\t\tindex++;\n\t}\n\treturn (true);\n}\n\nt_bool\tsolve(int grid_size, char *raw_args)\n{\n\tt_constr\tcnstr[4];\n\tint\t\t\t**grid;\n\tt_bool\t\tsuccess;\n\n\tcnstr[UP] = create_constraint(grid_size, raw_args, grid_size * 0);\n\tcnstr[DOWN] = create_constraint(grid_size, raw_args, grid_size * 1);\n\tcnstr[LEFT] = create_constraint(grid_size, raw_args, grid_size * 2);\n\tcnstr[RIGHT] = create_constraint(grid_size, raw_args, grid_size * 3);\n\tgrid = create_grid(grid_size);\n\tif (!validate_constraint(grid_size, cnstr))\n\t\treturn (false);\n\tsuccess = search_solution(grid, grid_size, cnstr);\n\tif (success)\n\t\tprint_grid(grid, grid_size);\n\tfree_grid(grid, grid_size);\n\treturn (success);\n}\n"
},
{
"alpha_fraction": 0.31263983249664307,
"alphanum_fraction": 0.33445191383361816,
"avg_line_length": 26.9375,
"blob_id": "4b09af415b28072fd705d946b739442329451b54",
"content_id": "4541735c6abe91a064e89a1dec940862cee2880e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1788,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 64,
"path": "/Final Project/srcs/utilities.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* utilities.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/15 13:15:42 by ecaceres #+# #+# */\n/* Updated: 2019/08/21 20:11:06 by fyusuf-a ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <utilities.h>\n#include <stdlib.h>\n#include <stdio.h>\n\nchar\t*ft_str_n_copy(char *dest, char *src, int n)\n{\n\tint\t\tindex;\n\n\tindex = 0;\n\twhile (index < n && src[index] != '\\0')\n\t{\n\t\tdest[index] = src[index];\n\t\tindex++;\n\t}\n\twhile (index < n)\n\t{\n\t\tdest[index] = '\\0';\n\t\tindex++;\n\t}\n\treturn (dest);\n}\n\nchar\t*ft_extend_array(char *orig, char *n_cont, UINT old_len, UINT len)\n{\n\tchar *dest;\n\n\tif (!(dest = malloc((len + 1) * sizeof(char))))\n\t\treturn (NULL);\n\tif (orig != NULL)\n\t\tft_str_n_copy(dest, orig, old_len);\n\tft_str_n_copy(dest + old_len, n_cont, (UINT)(len - old_len));\n\tif (orig != NULL)\n\t\tfree(orig);\n\treturn (dest);\n}\n\nt_bool\tft_atoi_n_strict(char *str, UINT n, UINT *result)\n{\n\tUINT\tindex;\n\n\tindex = 0;\n\t*result = 0;\n\twhile (index < n)\n\t{\n\t\tif ('0' <= str[index] && str[index] <= '9')\n\t\t\t*result = *result * 10 + str[index] - '0';\n\t\telse\n\t\t\treturn (false);\n\t\tindex++;\n\t}\n\treturn (true);\n}\n"
},
{
"alpha_fraction": 0.6086956262588501,
"alphanum_fraction": 0.6217391490936279,
"avg_line_length": 18.16666603088379,
"blob_id": "03678018ea34d0799ba1bd0d2ec726c5f17ecabe",
"content_id": "2debde81062edaf3f3686a2756705e005bff19b9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 230,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 12,
"path": "/C12/ex07/ft_list_at.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdlib.h>\n\n#include \"ft_list.h\"\n\nt_list\t*ft_list_at(t_list *begin_list, unsigned int nbr)\n{\n\tif (begin_list == 0)\n\t\treturn (NULL);\n\tif (nbr == 0)\n\t\treturn (begin_list);\n\treturn (ft_list_at(begin_list->next, nbr - 1));\n}\n"
},
{
"alpha_fraction": 0.5894736647605896,
"alphanum_fraction": 0.5964912176132202,
"avg_line_length": 24.909090042114258,
"blob_id": "1f03b5cc4a51469bda63e5001eee26a4e792354f",
"content_id": "68860fc1ba488019dd9145093ea8a63845b33ad1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 285,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 11,
"path": "/C12/ex10/ft_list_foreach_if.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include \"ft_list.h\"\n\nvoid\tft_list_foreach_if(t_list *begin_list, void (*f)(void *),\n\t\t\t\tvoid *data_ref, int (*cmp)())\n{\n\tif (begin_list == 0)\n\t\treturn ;\n\tif ((*cmp)(begin_list->data, data_ref) == 0)\n\t\t(*f)(begin_list->data);\n\tft_list_foreach_if(begin_list->next, f, data_ref, cmp);\n}\n"
},
{
"alpha_fraction": 0.6162790656089783,
"alphanum_fraction": 0.6511628031730652,
"avg_line_length": 9.75,
"blob_id": "594370a99fb2d0bbc412a6e33cb9957142b18d23",
"content_id": "f96cf838547e28ff54b0d682d5e7828a8a560f0c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 86,
"license_type": "no_license",
"max_line_length": 16,
"num_lines": 8,
"path": "/C11/ex05/includes/ft_io.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_IO_H\n# define FT_IO_H\n\n# define IN 0\n# define OUT 1\n# define ERR 2\n\n#endif\n"
},
{
"alpha_fraction": 0.3194510042667389,
"alphanum_fraction": 0.3459536135196686,
"avg_line_length": 21.720430374145508,
"blob_id": "bd89f0c5a10b568c803974115ec90660292badc3",
"content_id": "6fdc1eac1b6f21f4f6f3ea853cd472cab3a26ef8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2113,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 93,
"path": "/C11/ex06/ft_sort_string_tab_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_sort_string_tab.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/20 10:39:42 by ecaceres #+# #+# */\n/* Updated: 2019/08/20 10:39:42 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n\n#include <stdio.h>\n#include <stdlib.h>\n#include <string.h>\n\nvoid\tft_swap(char **a, char **b)\n{\n\tchar *c;\n\n\tc = *a;\n\t*a = *b;\n\t*b = c;\n}\n\nint\t\tft_strcmp(char *s1, char *s2)\n{\n\twhile (*s1 != '\\0' && (*s1 == *s2))\n\t{\n\t\ts1++;\n\t\ts2++;\n\t}\n\treturn (*(unsigned char*)s1 - *(unsigned char*)s2);\n}\n\nvoid\tft_sort_string_tab(char **tab)\n{\n\tint\t\tindex;\n\tint\t\tsize;\n\tbool\tswapped;\n\n\tsize = 0;\n\twhile (tab[size])\n\t\tsize++;\n\twhile (true)\n\t{\n\t\tindex = 0;\n\t\tswapped = false;\n\t\twhile (index < size - 1)\n\t\t{\n\t\t\tif (ft_strcmp(tab[index], tab[index + 1]) > 0)\n\t\t\t{\n\t\t\t\tft_swap(&tab[index], &tab[index + 1]);\n\t\t\t\tswapped = true;\n\t\t\t}\n\t\t\tindex++;\n\t\t}\n\t\tif (!swapped)\n\t\t\tbreak ;\n\t}\n}\n\nint\t\tmain(void)\n{\n\tint\t\tindex;\n\tchar\t**tab;\n\n\ttab = malloc(8 * sizeof(char *));\n\ttab[0] = strdup(\"hello\");\n\ttab[1] = strdup(\"world\");\n\ttab[2] = strdup(\"a\");\n\ttab[3] = strdup(\"aa\");\n\ttab[4] = strdup(\"ba\");\n\ttab[5] = strdup(\"ab\");\n\ttab[6] = strdup(\"z\");\n\ttab[7] = 0;\n\tindex = 0;\n\twhile (tab[index])\n\t{\n\t\tprintf(\"tab[%d] <@ %p> = $%s$\\n\", index, tab + index, tab[index]);\n\t\tindex++;\n\t}\n\tft_sort_string_tab(tab);\n\tprintf(\"\\n\");\n\tindex = 0;\n\twhile (tab[index])\n\t{\n\t\tprintf(\"tab[%d] <@ %p> = $%s$\\n\", index, tab + index, tab[index]);\n\t\tindex++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.4167194366455078,
"alphanum_fraction": 0.43920204043388367,
"avg_line_length": 22.744359970092773,
"blob_id": "16d4ecc19de8d59c27238eab641a4d8752744b81",
"content_id": "98c226c8a7493d134d38544334eb2d9beb68c72e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 3158,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 133,
"path": "/C08/ex05/ft_show_tab_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_show_tab.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/11 18:01:29 by ecaceres #+# #+# */\n/* Updated: 2019/08/11 20:33:17 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <unistd.h>\n#include <stdlib.h>\n#include <stdio.h>\n\n#include \"ft_stock_str.h\"\n\nint\t\t\t\t\tft_str_length(char *str)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (str[index])\n\t\tindex++;\n\treturn (index);\n}\n\nchar\t\t\t\t*ft_strdup(char *src)\n{\n\tint\t\tindex;\n\tchar\t*dest;\n\n\tindex = 0;\n\tif ((dest = (char *)malloc(ft_str_length(src) * sizeof(char) + 1)) == NULL)\n\t\treturn (0);\n\twhile (src[index])\n\t{\n\t\tdest[index] = src[index];\n\t\tindex++;\n\t}\n\tdest[index] = '\\0';\n\treturn (dest);\n}\n\nstruct s_stock_str\t*ft_strs_to_tab(int ac, char **av)\n{\n\tint\t\t\t\t\tindex;\n\tstruct s_stock_str\t*array;\n\tstruct s_stock_str\t*created;\n\n\tac = ac + 0;\n\tif (!(array = malloc((ac + 1) * sizeof(struct s_stock_str))))\n\t\treturn (NULL);\n\tindex = 0;\n\twhile (index < ac)\n\t{\n\t\tif (!(created = (t_stock_str *)malloc(sizeof(struct s_stock_str))))\n\t\t\treturn (NULL);\n\t\tarray[index] = *created;\n\t\tarray[index].size = ft_str_length(av[index]);\n\t\tarray[index].str = av[index];\n\t\tarray[index].copy = ft_strdup(av[index]);\n\t\tindex++;\n\t}\n\tarray[index] = (struct s_stock_str){0, 0, 0};\n\treturn (array);\n}\n\nvoid\t\t\t\tft_put_string(char *str)\n{\n\twhile (*str)\n\t{\n\t\twrite(1, str, 1);\n\t\tstr++;\n\t}\n}\n\nvoid\t\t\t\tft_put_number(int number)\n{\n\tif (number > 9)\n\t\tft_put_number(number / 10);\n\twrite(1, &\"0123456789\"[number % 10], 1);\n}\n\nvoid\t\t\t\tft_show_tab(struct s_stock_str *par)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (par[index].str != 0)\n\t{\n\t\tft_put_string(par[index].str);\n\t\tft_put_string(\"\\n\");\n\t\tft_put_number(par[index].size);\n\t\tft_put_string(\"\\n\");\n\t\tft_put_string(par[index].copy);\n\t\tft_put_string(\"\\n\");\n\t\tindex++;\n\t}\n}\n\nint\t\t\t\t\tmain(void)\n{\n\tint\t\t\t\t\tindex;\n\tint\t\t\t\t\tac;\n\tchar\t\t\t\t**av;\n\tstruct s_stock_str\t*structs;\n\n\tac = 10;\n\tav = (char **)malloc((ac + 1) * sizeof(char *));\n\tindex = 0;\n\twhile (index < ac)\n\t{\n\t\tav[index] = (char *)malloc((2 + 1) * sizeof(char));\n\t\tav[index][0] = 'x';\n\t\tav[index][1] = (char)(index + 'a');\n\t\tav[index][2] = '\\0';\n\t\tindex++;\n\t}\n\tstructs = ft_strs_to_tab(ac, av);\n\tindex = 0;\n\twhile (index < ac)\n\t{\n\t\tprintf(\"%d\\n\", index);\n\t\tprintf(\"\\t| original : $%s$ @ %p\\n\", structs[index].str, structs[index].str);\n\t\tprintf(\"\\t| copied : $%s$ @ %p\\n\", structs[index].copy, structs[index].copy);\n\t\tprintf(\"\\t| size : %d\\n\", structs[index].size);\n\t\tindex++;\n\t}\n\tprintf(\"---- SHOW TAB\\n\");\n\tft_show_tab(structs);\n}\n"
},
{
"alpha_fraction": 0.25756555795669556,
"alphanum_fraction": 0.28312036395072937,
"avg_line_length": 26.537036895751953,
"blob_id": "ce491abdb2902acb50cfcb608721e0395911451e",
"content_id": "38aa3a471fd555e4efe28f3a7b9bf44c83f0281c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1487,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 54,
"path": "/C02/ex07/ft_strupcase_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_strupcase.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/01 20:44:05 by ecaceres #+# #+# */\n/* Updated: 2019/08/01 20:44:09 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n#include <stdio.h>\n\nchar\t*ft_strupcase(char *str)\n{\n\tint\t\tindex;\n\tchar\tcurr;\n\n\tindex = 0;\n\twhile (true)\n\t{\n\t\tcurr = str[index];\n\t\tif (curr == '\\0')\n\t\t{\n\t\t\tbreak ;\n\t\t}\n\t\tif (curr >= 'a' && curr <= 'z')\n\t\t{\n\t\t\tstr[index] = curr - 32;\n\t\t}\n\t\tindex++;\n\t}\n\treturn (str);\n}\n\nint\t\tmain(void)\n{\n\tchar\t*str_base;\n\tchar\tstr_upper[6];\n\tint\t\tindex;\n\n\tstr_base = \"heLl0\";\n\tindex = 0;\n\twhile (index < 6)\n\t{\n\t\tstr_upper[index] = str_base[index] + 0;\n\t\tindex++;\n\t}\n\tft_strupcase(&str_upper[0]);\n\tprintf(\"should be normal : %s\\n\", str_base);\n\tprintf(\"should be upper case: %s\\n\", str_upper);\n}\n"
},
{
"alpha_fraction": 0.41196516156196594,
"alphanum_fraction": 0.43203333020210266,
"avg_line_length": 23.68224334716797,
"blob_id": "66782af77f71989b750d0489fda87dbd90e52822",
"content_id": "6ca1bd920c52efe7510697a280e95fa1d9dcc331",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2641,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 107,
"path": "/ExamFinal/work-done/count_alpha/count_alpha.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* count_alpha.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: exam <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/23 13:12:41 by exam #+# #+# */\n/* Updated: 2019/08/23 13:12:42 by exam ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n#include <unistd.h>\n#include <stdio.h>\n\n#define LETTER_COUNT 26\n#define NOT_COUNTED_YET -1\n#define CASE_OFFSET 0x20\n\nbool\tft_is_uppercase(char c)\n{\n\treturn (c >= 'A' && c <= 'Z');\n}\n\nint\t\tft_count_alpha_recursive_count(char *str, char target)\n{\n\tint\t\tcount;\n\tchar\tcurrent;\n\n\tcount = 0;\n\twhile ((current = *str))\n\t{\n\t\tif (ft_is_uppercase(current))\n\t\t\tcurrent += CASE_OFFSET;\n\t\tif (target == current)\n\t\t\tcount++;\n\t\tstr++;\n\t}\n\treturn (count);\n}\n\nvoid\tft_count_alpha_print(int occurences[LETTER_COUNT + 1],\n\t\t\t\tchar order[LETTER_COUNT + 1])\n{\n\tint\tindex;\n\tint\tsize;\n\n\tsize = 0;\n\twhile (order[size] != '\\0')\n\t\tsize++;\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tprintf(\"%d%c\", occurences[order[index] - 'a'], order[index]);\n\t\tif (index != size - 1)\n\t\t\tprintf(\", \");\n\t\tindex++;\n\t}\n}\n\nchar\tft_count_alpha_validate(char c, int occurences[LETTER_COUNT + 1])\n{\n\tif (!ft_is_uppercase(c) && !((c >= 'a' && c <= 'z')))\n\t\treturn ('\\0');\n\tif (ft_is_uppercase(c))\n\t\tc += CASE_OFFSET;\n\tif (occurences[(int)c - 'a'] != NOT_COUNTED_YET)\n\t\treturn ('\\0');\n\treturn (c);\n}\n\nvoid\tft_count_alpha(char *str)\n{\n\tint\t\tindex;\n\tint\t\toccurences[LETTER_COUNT + 1];\n\tchar\t\toccurences_order[LETTER_COUNT + 1];\n\tchar\tcurrent;\n\n\tindex = 0;\n\twhile (index < LETTER_COUNT + 1)\n\t{\n\t\toccurences[index] = NOT_COUNTED_YET;\n\t\toccurences_order[index] = '\\0';\n\t\tindex++;\n\t}\n\tindex = 0;\n\twhile (*str)\n\t{\n\t\tcurrent = ft_count_alpha_validate(*str, occurences);\n\t\tif (current != '\\0')\n\t\t{\n\t\t\toccurences[(int)current - 'a'] = ft_count_alpha_recursive_count(str, current);\n\t\t\toccurences_order[index] = current;\n\t\t\tindex++;\n\t\t}\n\t\tstr++;\n\t}\n\tft_count_alpha_print(occurences, occurences_order);\n}\n\nint\t\tmain(int argc, char **argv)\n{\n\tif (argc == 2)\n\t\tft_count_alpha(argv[1]);\n\tprintf(\"\\n\");\n}\n"
},
{
"alpha_fraction": 0.27524206042289734,
"alphanum_fraction": 0.31396958231925964,
"avg_line_length": 40.31428527832031,
"blob_id": "df5c56e43b3c0121a1cc13e314ad35a60920095d",
"content_id": "46ca25e3ae575b49601f56e8ba09c711aff9878a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1446,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 35,
"path": "/C05/ex03/ft_recursive_power_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_recursive_power.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/06 12:48:16 by ecaceres #+# #+# */\n/* Updated: 2019/08/06 12:48:19 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdio.h>\n\nint\tft_recursive_power_recursive(int number, int power, int result)\n{\n\tif (power-- > 0)\n\t\treturn (ft_recursive_power_recursive(number, power, result *= number));\n\treturn (result);\n}\n\nint\tft_recursive_power(int nb, int power)\n{\n\tif (power < 0)\n\t\treturn (0);\n\treturn (ft_recursive_power_recursive(nb, power, 1));\n}\n\nint\tmain(void)\n{\n\tprintf(\"10^2 = %d\\n\", ft_recursive_power(10, 2));\n\tprintf(\"10^0 = %d\\n\", ft_recursive_power(10, 0));\n\tprintf(\"10^-1 = %d\\n\", ft_recursive_power(10, -1));\n\tprintf(\"10^5 = %d\\n\", ft_recursive_power(10, 5));\n}\n"
},
{
"alpha_fraction": 0.6169331073760986,
"alphanum_fraction": 0.6240379214286804,
"avg_line_length": 17.766666412353516,
"blob_id": "72fe1c23bd7fd75e2853fda8c00e972574245b54",
"content_id": "30eff5423732a649fbe2ebc972f44e3682f9e1c7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1689,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 90,
"path": "/C10/ex03/srcs/ft_args_parser.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdio.h>\n#include <libgen.h>\n\n#include \"ft_args_parser.h\"\n#include \"ft_console_io.h\"\n#include \"ft_hexdump.h\"\n\n#define ERR_ILLG_OPT \"illegal option\"\n\n#define HAS_MORE (index + 1 < count)\n\nint\t\tcount_same_letter(t_options *options, char letter, char *str)\n{\n\tint\t\tcount;\n\tint\t\tindex;\n\n\tcount = 0;\n\tindex = 1;\n\twhile (str[index])\n\t{\n\t\tif (str[index] == letter)\n\t\t\tcount++;\n\t\telse\n\t\t{\n\t\t\tdisplay_error(options, ERR_ILLG_OPT, str + index, true);\n\t\t\tdisplay_usage(options);\n\t\t\treturn (-1);\n\t\t}\n\t\tindex++;\n\t}\n\treturn (count);\n}\n\nbool\tparse_args(char **args, int count)\n{\n\tt_options\toptions;\n\tint\t\t\tindex;\n\tint\t\t\toffset;\n\tchar\t\t*arg;\n\n\toptions.executable = args[0];\n\toptions.disp_cont = 0;\n\tindex = 1;\n\twhile (index < count)\n\t{\n\t\targ = args[index];\n\t\tif (arg[0] == '-' && arg[1] != '\\0')\n\t\t{\n\t\t\toffset = count_same_letter(&options, 'C', arg);\n\t\t\tif (offset == -1)\n\t\t\t\treturn (false);\n\t\t\toptions.disp_cont++;\n\t\t}\n\t\telse\n\t\t\tbreak ;\n\t\tindex++;\n\t}\n\treturn (process_files(args, count, index, &options));\n}\n\nbool\tprocess_files(char **args, int count, int index, t_options *options)\n{\n\tint\t\ttotal;\n\n\ttotal = count - index;\n\tif (total == 0)\n\t\treturn (ft_stdin_hexdump(options));\n\toptions->paths = args + index;\n\treturn (ft_hexdump(options, total));\n}\n\nvoid\tdisplay_usage(t_options *options)\n{\n\twrite_str_err(\"usage: \");\n\twrite_str_err(basename(options->executable));\n\twrite_str_err(\" [-C] [file ...]\\n\");\n}\n\nvoid\tdisplay_error(t_options *options, char *error, char *cause, bool limit)\n{\n\twrite_str_err(basename(options->executable));\n\twrite_str_err(\": \");\n\twrite_str_err(error);\n\twrite_str_err(\" -- \");\n\tif (limit)\n\t\twrite_char_err(*cause);\n\telse\n\t\twrite_str_err(cause);\n\twrite_str_err(\"\\n\");\n}\n"
},
{
"alpha_fraction": 0.7126436829566956,
"alphanum_fraction": 0.7126436829566956,
"avg_line_length": 20.75,
"blob_id": "4426b21b6746374383d89dd7b294b1a5efdce315",
"content_id": "eabb608896c1b2176d1bb31410acde84aaf3438e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 261,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 12,
"path": "/C10/ex02/includes/ft_tail_utils.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_TAIL_UTILS_H\n# define FT_TAIL_UTILS_H\n\nint\t\topen_file(char *executable, char *path);\n\nint\t\tclose_file(char *executable, char *path, int fd);\n\nvoid\tshow_error(char *executable, char *path);\n\nchar\t*read_full(int fd, unsigned long *total_read);\n\n#endif\n"
},
{
"alpha_fraction": 0.6513761281967163,
"alphanum_fraction": 0.6605504751205444,
"avg_line_length": 17.16666603088379,
"blob_id": "50ad8a7a6d67dfa7a6ee9078e503cb6b8d37f89e",
"content_id": "f5036c8e0bff08945ca79f20006bcc0b80a66d47",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 109,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 6,
"path": "/C10/ex02/srcs/ft_abs.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "unsigned int\tft_abs_int(int number)\n{\n\tif (number < 0)\n\t\treturn (-number);\n\treturn ((unsigned int)number);\n}\n"
},
{
"alpha_fraction": 0.3319838047027588,
"alphanum_fraction": 0.3562752902507782,
"avg_line_length": 28.305084228515625,
"blob_id": "9fa0f67696f5e275eac81a82bec82aaabc6d97da",
"content_id": "9d404bf794982e323624b51793fd8b02bd142c47",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1729,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 59,
"path": "/C12/ex13/ft_list_merge_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_list_merge.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/22 18:35:50 by ecaceres #+# #+# */\n/* Updated: 2019/08/22 18:35:51 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"ft_list.h\"\n\n#include <stdlib.h>\n#include <stdio.h>\n\nvoid\tft_list_merge(t_list **begin_list1, t_list *begin_list2)\n{\n\tt_list\t*current;\n\tt_list\t*previous;\n\n\tcurrent = *begin_list1;\n\twhile (current)\n\t{\n\t\tprevious = current;\n\t\tcurrent = current->next;\n\t}\n\tprevious->next = begin_list2;\n}\n\nint\t\tmain(void)\n{\n\tint\t\tindex;\n\tint\t\t*malloced_index;\n\tt_list\t*list;\n\tt_list\t*list2;\n\n\tindex = 0;\n\tlist = ft_create_elem(0);\n\tlist2 = ft_create_elem(0);\n\twhile (index < 10)\n\t{\n\t\tmalloced_index = malloc(sizeof(int));\n\t\t*malloced_index = index;\n\t\tft_list_push_back(&list, (void *)malloced_index);\n\t\tft_list_push_back(&list2, (void *)malloced_index);\n\t\tindex++;\n\t}\n\tft_list_merge(&list->next, list2->next);\n\tindex = 0;\n\tlist = list->next;\n\twhile (list)\n\t{\n\t\tprintf(\"list[%d]: %d\\n\", index, *((int *)list->data));\n\t\tindex++;\n\t\tlist = list->next;\n\t}\n}\n"
},
{
"alpha_fraction": 0.3055403232574463,
"alphanum_fraction": 0.3422929346561432,
"avg_line_length": 22.986841201782227,
"blob_id": "8550db33f6217516aa9cd73fd0591fde99276e34",
"content_id": "0abc46e2bfa1207f3caf2d060100dc69838d0265",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1823,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 76,
"path": "/Exam02/work-done/inter/inter.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* inter.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: exam <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/16 18:27:24 by exam #+# #+# */\n/* Updated: 2019/08/16 18:27:24 by exam ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <unistd.h>\n#include <stdbool.h>\n\nint\tft_strlen(char *str)\n{\n\tint\tlength;\n\n\tlength = 0;\n\twhile (str[length])\n\t\tlength++;\n\treturn (length);\n}\n\nbool\thas_already(char c, bool already[255])\n{\n\treturn (already[(unsigned char)c]);\n}\n\nvoid\tset_already(char c, bool already[255])\n{\n\talready[(unsigned char)c] = true;\n}\n\nvoid\tinter(char *str1, char *str2)\n{\n\tint\t\tindex;\n\tint\t\tjndex;\n\tint\t\tlen1;\n\tint\t\tlen2;\n\tbool\talready[255];\n\n\tlen1 = ft_strlen(str1);\n\tlen2 = ft_strlen(str2);\n\tindex = 0;\n\twhile (index < 255)\n\t{\n\t\talready[index] = false;\n\t\tindex++;\n\t}\n\tindex = 0;\n\tjndex = 0;\n\twhile (index < len1)\n\t{\n\t\tjndex = 0;\n\t\twhile (jndex < len2)\n\t\t{\n\t\t\tif (str1[index] == str2[jndex])\n\t\t\t{\n\t\t\t\tif (!has_already(str1[index], already))\n\t\t\t\t\twrite(1, &str1[index], 1);\n\t\t\t\tset_already(str1[index], already);\n\t\t\t}\n\t\t\tjndex++;\n\t\t}\n\t\tindex++;\n\t}\n}\n\nint\t\tmain(int argc, char **argv)\n{\n\tif (argc == 3)\n\t\tinter(argv[1], argv[2]);\n\twrite(1, \"\\n\", 1);\n}\n"
},
{
"alpha_fraction": 0.34364640712738037,
"alphanum_fraction": 0.36353591084480286,
"avg_line_length": 26.846153259277344,
"blob_id": "0366abd9fb041052124351783f790151dbf7ce34",
"content_id": "23f9466360abf9ac9ffcbb6b4cb54f81b10aae75",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1810,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 65,
"path": "/C12/ex08/ft_list_reverse_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_list_reverse.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/22 16:45:58 by ecaceres #+# #+# */\n/* Updated: 2019/08/22 16:45:58 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n\n#include \"ft_list.h\"\n\n#include <stdio.h>\n\nvoid\tft_list_reverse(t_list **begin_list)\n{\n\tt_list\t*next;\n\tt_list\t*previous;\n\tt_list\t*current;\n\n\tif (*begin_list == 0)\n\t\treturn ;\n\tcurrent = *begin_list;\n\twhile (current != NULL)\n\t{\n\t\tnext = current->next;\n\t\tcurrent->next = previous;\n\t\tprevious = current;\n\t\tcurrent = next;\n\t}\n\t*begin_list = previous;\n}\n\nint\t\tmain(void)\n{\n\tint\t\tindex;\n\tint\t\t*malloced_index;\n\tt_list\t*list;\n\tt_list\t*current;\n\n\tindex = 1;\n\tlist = ft_create_elem(0);\n\twhile (index < 10)\n\t{\n\t\tmalloced_index = malloc(sizeof(int));\n\t\t*malloced_index = index;\n\t\tft_list_push_back(&list, (void *)malloced_index);\n\t\tprintf(\"list[%d] = %d\\n\", index, *malloced_index);\n\t\tindex++;\n\t}\n\tindex = 1;\n\tft_list_reverse(&(list->next));\n\tprintf(\"Reversed\\n\");\n\tcurrent = list->next;\n\twhile (index < 10)\n\t{\n\t\tprintf(\"list[%d] = %d\\n\", index, *((int *)current->data));\n\t\tindex++;\n\t\tcurrent = current->next;\n\t}\n}\n"
},
{
"alpha_fraction": 0.6017160415649414,
"alphanum_fraction": 0.6111264824867249,
"avg_line_length": 42.54216766357422,
"blob_id": "3d5ab4fbfc03c264597688d06555fadf26ddae16",
"content_id": "07d82790d93d7a85677c870375a8b80451d73abd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 3613,
"license_type": "no_license",
"max_line_length": 168,
"num_lines": 83,
"path": "/C10/ex01/Makefile",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "C_YELLOW_B=\\033[1;33m\nC_LIGHT_RED_B=\\033[0;91m\nC_WHITE=\\033[0;97m\nC_RESET=\\033[0;39m\n\nSRCS\t= ./srcs/main.c ./srcs/cat.c ./srcs/string_utils.c\nOBJS\t= ${SRCS:.c=.o}\nINCS\t= includes\nNAME\t= ft_cat\nCC\t\t= gcc\nRM\t\t= rm -f\nCFLAGS\t= -Wall -Wextra -Werror\n\n.c.o :\n\t${CC} ${CFLAGS} -c $< -o ${<:.c=.o} -I${INCS}\n\n${NAME} : ${OBJS}\n\t${CC} ${CFLAGS} ${OBJS} -o ${NAME}\n\nall : ${NAME}\n\nclean :\n\t${RM} ${OBJS}\n\nfclean : clean\n\t${RM} ${NAME}\n\nnorm :\n\tnorminette -R CheckForbiddenSourceHeader */*.[ch]\n\nre : fclean all\n\ntest : re\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)Single file: ./Makefile$(C_RESET)\" ;\n\t@cat ./Makefile > result-off.txt ;\n\t@./ft_cat ./Makefile > result-usr.txt ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)Multiple file: ./Makefile ./Makefile$(C_RESET)\" ;\n\t@cat ./Makefile ./Makefile > result-off.txt ;\n\t@./ft_cat ./Makefile ./Makefile > result-usr.txt ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)Single file: x < ./Makefile$(C_RESET)\" ;\n\t@cat < ./Makefile > result-off.txt ;\n\t@./ft_cat < ./Makefile > result-usr.txt ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ SHOULD NOT HAVE DIFF ]$(C_RESET) $(C_WHITE)Pipied: echo Hello | x$(C_RESET)\" ;\n\t@echo Hello | cat > result-off.txt ;\n\t@echo Hello | ./ft_cat > result-usr.txt ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)Invalid file: ./a$(C_RESET)\" ;\n\t@cat ./a > out 2>result-off.txt || true ;\n\t@./ft_cat ./a > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)Multiple file with invalid: ./Makefile ./a ./Makefile$(C_RESET)\" ;\n\t@cat ./Makefile ./a ./Makefile > out 2>result-off.txt || true ;\n\t@./ft_cat ./Makefile ./a ./Makefile > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)With folder: ./srcs/$(C_RESET)\" ;\n\t@cat ./srcs/ > out 2>result-off.txt || true ;\n\t@./ft_cat ./srcs/ > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)With a read-only file: ./read-only.txt$(C_RESET)\" ;\n\t@echo \"Hey! I am in read-only!!\" > read-only.txt ;\n\t@chmod -xrw read-only.txt ;\n\t@cat ./read-only.txt > out 2>result-off.txt || true ;\n\t@./ft_cat ./read-only.txt > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_LIGHT_RED_B)[ MUST HAVE DIFF ]$(C_RESET) $(C_WHITE)With file and directory: ./srcs/ ./Makefile$(C_RESET)\" ;\n\t@cat ./srcs/ ./Makefile > out 2>result-off.txt || true ;\n\t@./ft_cat ./srcs/ ./Makefile > out 2>result-usr.txt || true ;\n\t@git --no-pager diff --no-index result-usr.txt result-off.txt || true ;\n\t@##\n\t@rm -f result-off.txt result-usr.txt out read-only.txt;\n\n.PHONY: all clean fclean re .c.o test"
},
{
"alpha_fraction": 0.3360406160354614,
"alphanum_fraction": 0.3614213168621063,
"avg_line_length": 25.62162208557129,
"blob_id": "6ba56fc79862e7e446fda8e2b3bb2cc241330c58",
"content_id": "e58d885b48b67cf064ecb882351ad9864b082773",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1970,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 74,
"path": "/C03/ex05/ft_strlcat_dev2.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_strlcat.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/04 15:48:24 by ecaceres #+# #+# */\n/* Updated: 2019/08/05 12:00:15 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdio.h>\n#include <string.h>\n\nunsigned int\tft_str_length_fast(char *dest)\n{\n\tunsigned int\tcount;\n\n\tcount = 0;\n\twhile (dest[count] != '\\0')\n\t\tcount++;\n\treturn (count);\n}\n\nunsigned int\tft_strlcat(char *dest, char *src, unsigned int size)\n{\n\tchar\t\t\t*dst;\n\tchar\t\t\t*src_start;\n\tunsigned int\tdst_length;\n\tunsigned int\tremaing;\n\n\tdst = dest;\n\tsrc_start = src;\n\tremaing = size;\n\twhile (remaing-- != 0 && *dst != '\\0')\n\t\tdst++;\n\tdst_length = dst - dest;\n\tremaing = size - dst_length;\n\tif (remaing == 0)\n\t\treturn (dst_length + ft_str_length_fast(src));\n\twhile (*src != '\\0')\n\t{\n\t\tif (remaing > 1)\n\t\t{\n\t\t\t*dst++ = *src;\n\t\t\tremaing--;\n\t\t}\n\t\tsrc++;\n\t}\n\t*dst = '\\0';\n\treturn (dst_length + (src - src_start));\n}\n\nint\t\t\t\tmain(void)\n{\n\tchar\t*str_base;\n\tchar\tdest[100];\n\tchar\tdest2[100];\n\tchar\t*src;\n\tint\t\tindex;\n\n\tstr_base = \"Hello\";\n\tsrc = \" World\";\n\tindex = 0;\n\twhile (index < 6)\n\t{\n\t\tdest[index] = str_base[index];\n\t\tdest2[index] = str_base[index];\n\t\tindex++;\n\t}\n\tprintf(\"c : (%lu) $%s$\\n\", strlcat(dest, src, 8), dest);\n\tprintf(\"ft : (%d) $%s$\\n\", ft_strlcat(dest2, src, 8), dest2);\n}\n"
},
{
"alpha_fraction": 0.6148409843444824,
"alphanum_fraction": 0.6219081282615662,
"avg_line_length": 22.58333396911621,
"blob_id": "4d68acd1c9f40ec152184af01ac44fa1c7e2325c",
"content_id": "aa40d3cf96e31d131a2c4c27b99ef1243caa5179",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 283,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 12,
"path": "/C12/ex11/ft_list_find.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdlib.h>\n\n#include \"ft_list.h\"\n\nt_list\t*ft_list_find(t_list *begin_list, void *data_ref, int (*cmp)())\n{\n\tif (begin_list == 0)\n\t\treturn (NULL);\n\tif ((*cmp)(begin_list->data, data_ref) == 0)\n\t\treturn (begin_list);\n\treturn (ft_list_find(begin_list->next, data_ref, cmp));\n}\n"
},
{
"alpha_fraction": 0.32343077659606934,
"alphanum_fraction": 0.35697174072265625,
"avg_line_length": 23.552940368652344,
"blob_id": "9efbdd550e9da7b29ac66478fedbd775ff9d0cdc",
"content_id": "a807ef998c468d8f512654b51323c2faf50847e6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2087,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 85,
"path": "/C05/ex06/ft_is_prime_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_is_prime.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/06 13:50:09 by ecaceres #+# #+# */\n/* Updated: 2019/08/06 13:50:11 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdio.h>\n#include <limits.h>\n#include <math.h>\n\nint\tft_sqrt(int nb)\n{\n\tunsigned int\t\tsqrt;\n\tunsigned int\t\tindex;\n\n\tif (nb < 0)\n\t\treturn (0);\n\tif (nb <= 1)\n\t\treturn (nb);\n\tindex = 0;\n\twhile ((sqrt = index * index) <= (unsigned int)nb)\n\t\tindex++;\n\tindex -= 1;\n\treturn (index);\n}\n\nint\tft_is_prime(int nb)\n{\n\tint\tindex;\n\tint\tsqrt;\n\n\tif (nb <= 1)\n\t\treturn (0);\n\tif (nb <= 3)\n\t\treturn (1);\n\tif (nb % 2 == 0 || nb % 3 == 0)\n\t\treturn (0);\n\tindex = 2;\n\tsqrt = ft_sqrt(nb);\n\twhile ((index <= sqrt) && (nb % index != 0))\n\t\tindex++;\n\treturn (index > sqrt);\n}\n\nint\tis_prime(int prime)\n{\n\tint\ti;\n\tint\tsq;\n\n\tif (prime <= 1)\n\t\treturn (0);\n\tsq = (int)sqrt(prime);\n\tfor (i = 2; (i <= sq) && (prime % i != 0); i++);\n\treturn (i > sq);\n}\n\nint\tmain(void)\n{\n\tint\tnumber;\n\tint\tresults[2];\n\tint\tcount;\n\n\tnumber = 0;\n\tcount = 0;\n\twhile (number < 50000)\n\t{\n\t\tresults[0] = is_prime(number);\n\t\tresults[1] = ft_is_prime(number);\n\t\tprintf(\"is_prime(%d) = %d -- %d\\n\", number, results[1], results[0]);\n\t\tif (results[0] != results[1])\n\t\t\treturn (1);\n\t\tnumber++;\n\t\tif (results[0])\n\t\t\tcount++;\n\t}\n\tprintf(\"is_prime(%d) = %d\\n\", INT_MAX, is_prime(INT_MAX));\n\tprintf(\"first 50'000 number, %d prime\\n\", count);\n\treturn (0);\n}\n"
},
{
"alpha_fraction": 0.545098066329956,
"alphanum_fraction": 0.5568627715110779,
"avg_line_length": 10.086956977844238,
"blob_id": "04f3a277e380c307d8b5edd67244ec9110291017",
"content_id": "5a56b288b0b029f1ec5e6dedc10f47205d821a5c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 255,
"license_type": "no_license",
"max_line_length": 25,
"num_lines": 23,
"path": "/C01/ex05/ft_putstr.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <unistd.h>\n#include <stdbool.h>\n\nvoid\tft_putchar(char c)\n{\n\twrite(1, &c, 1);\n}\n\nvoid\tft_putstr(char *str)\n{\n\tchar next_char;\n\n\twhile (true)\n\t{\n\t\tnext_char = *str;\n\t\tif (next_char == '\\0')\n\t\t{\n\t\t\tbreak ;\n\t\t}\n\t\tft_putchar(next_char);\n\t\tstr++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.39839571714401245,
"alphanum_fraction": 0.41488415002822876,
"avg_line_length": 26.703702926635742,
"blob_id": "e54e0e08aad100c2ba1ac0661317ad7c71cf64d5",
"content_id": "0d5cc9d397e030e6247086e38bf6f2babd012953",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2244,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 81,
"path": "/Rush02/ex00/srcs/main.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* main.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/17 11:02:43 by ecaceres #+# #+# */\n/* Updated: 2019/08/17 11:02:43 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <unistd.h>\n\n#include \"ft_number_dictionary.h\"\n#include \"ft_boolean.h\"\n#include \"ft_io.h\"\n#include \"ft_to.h\"\n#include \"ft_str.h\"\n\nt_error\t\tprocess(char *to_convert, t_dict *dict)\n{\n\tULNG\tnumber;\n\tt_bool\tput_space;\n\n\tnumber = ft_atoi_strict(to_convert);\n\tif (number == (ULNG)-1)\n\t\treturn (invalid_number);\n\tif (!ft_itow(dict, number, 0, false))\n\t\treturn (fail_convert);\n\tput_space = false;\n\tft_itow(dict, number, &put_space, true);\n\treturn (none);\n}\n\nvoid\t\tshow_error(t_error error)\n{\n\tif (error == none)\n\t\treturn ;\n\tif (error == dict_parsing)\n\t\tft_str_write_to(ERR, \"Dict Error\");\n\telse\n\t\tft_str_write_to(ERR, \"Error\");\n\tft_str_write_to(ERR, \"\\n\");\n}\n\nt_error\t\tmain_delegate(int argc, char **argv, char **to_conv, t_dict *dict)\n{\n\tif (argc == 3)\n\t{\n\t\t*dict = ft_load_dictionary(argv[1]);\n\t\t*to_conv = argv[2];\n\t}\n\telse if (argc == 2)\n\t{\n\t\t*dict = ft_load_default_dictionary();\n\t\t*to_conv = argv[1];\n\t}\n\telse\n\t\treturn (generic);\n\treturn (none);\n}\n\nint\t\t\tmain(int argc, char **argv)\n{\n\tt_error\terror;\n\tt_dict\tdict;\n\tchar\t*to_convert;\n\n\terror = main_delegate(argc, argv, &to_convert, &dict);\n\tif (error == none && !dict.valid)\n\t\terror = dict_parsing;\n\tif (error == none)\n\t\terror = process(to_convert, &dict);\n\tshow_error(error);\n\tif (error == none)\n\t\tft_str_write_to(OUT, \"\\n\");\n\tif (dict.valid)\n\t\tft_free_dictionary(&dict);\n\treturn (error ? 1 : 0);\n}\n"
},
{
"alpha_fraction": 0.5837036967277527,
"alphanum_fraction": 0.6014814972877502,
"avg_line_length": 24.980770111083984,
"blob_id": "5ea4b0a65c0273ee02bcb2b3f9132aed9bc5a581",
"content_id": "75906bb4f8800e1a96832b10833943b736349622",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 1350,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 52,
"path": "/C10/ex00/Makefile",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "C_YELLOW_B=\\033[1;33m\nC_LIGHT_RED_B=\\033[0;91m\nC_WHITE=\\033[0;97m\nC_RESET=\\033[0;39m\n\nSRCS\t= ./srcs/main.c ./srcs/display_file.c ./srcs/string_utils.c\nOBJS\t= ${SRCS:.c=.o}\nINCS\t= includes\nNAME\t= ft_display_file\nCC\t\t= gcc\nRM\t\t= rm -f\nCFLAGS\t= -Wall -Wextra -Werror\n\n.c.o :\n\t${CC} ${CFLAGS} -c $< -o ${<:.c=.o} -I${INCS}\n\n${NAME} : ${OBJS}\n\t${CC} ${CFLAGS} ${OBJS} -o ${NAME}\n\nall : ${NAME}\n\nclean :\n\t${RM} ${OBJS}\n\nfclean : clean\n\t${RM} ${NAME}\n\nnorm :\n\tnorminette -R CheckForbiddenSourceHeader */*.[ch]\n\nre : fclean all\n\ntest : re\n\t@echo \"I am a file content\" > dummy_file.txt\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)No file$(C_RESET)\" ;\n\t@./ft_display_file || true;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Single file: ./dummy_file.txt$(C_RESET)\" ;\n\t@./ft_display_file ./dummy_file.txt || true;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Multiple file: ./dummy_file.txt ./dummy_file.txt$(C_RESET)\" ;\n\t@./ft_display_file ./dummy_file.txt ./dummy_file.txt || true;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Invalid file: ./a$(C_RESET)\" ;\n\t@./ft_display_file ./a || true;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)On directory: ./srcs/$(C_RESET)\" ;\n\t@./ft_display_file ./srcs/ || true;\n\t@##\n\t@rm -f result-off.txt result-usr.txt dummy_file.txt ;\n\n.PHONY: all clean fclean re .c.o test"
},
{
"alpha_fraction": 0.26446837186813354,
"alphanum_fraction": 0.2880215346813202,
"avg_line_length": 29.95833396911621,
"blob_id": "2b40f00efc1c4f790a1478549da40cedfb3fc4db",
"content_id": "a57749738972e4b492ffa4f5c70d6592d9797659",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1486,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 48,
"path": "/Final Project/srcs/main.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* main.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/19 12:09:25 by ecaceres #+# #+# */\n/* Updated: 2019/08/21 20:08:54 by fyusuf-a ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"bsq.h\"\n\nvoid\t\tprocess_args(t_grid *grid, t_solution *sol, int argc, char **argv)\n{\n\tint\t\t\ti;\n\n\ti = 1;\n\twhile (i < argc)\n\t{\n\t\tif (ft_load_grid(argv[i], grid))\n\t\t\tft_process_grid(grid, sol);\n\t\telse\n\t\t\twrite(ERR, \"map error\\n\", 10);\n\t\ti++;\n\t\tif (i != argc)\n\t\t\twrite(OUT, \"\\n\", 1);\n\t\tft_free_grid(grid);\n\t}\n}\n\nint\t\t\tmain(int argc, char **argv)\n{\n\tt_grid\t\tgrid;\n\tt_solution\tsolution;\n\n\tif (argc < 2)\n\t{\n\t\tif (ft_parse_grid(IN, &grid))\n\t\t\tft_process_grid(&grid, &solution);\n\t\telse\n\t\t\twrite(ERR, \"map error\\n\", 10);\n\t\tft_free_grid(&grid);\n\t}\n\telse\n\t\tprocess_args(&grid, &solution, argc, argv);\n}\n"
},
{
"alpha_fraction": 0.5185185074806213,
"alphanum_fraction": 0.5802469253540039,
"avg_line_length": 14.428571701049805,
"blob_id": "ad6e8ed989f2fcba3e3ae663d6ca0704529b103e",
"content_id": "306c3abc56696cd3c9725202d38e9278ffa5b81f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 324,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 21,
"path": "/C11/ex05/srcs/ft_number_write.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include \"ft_char.h\"\n\nvoid\tft_number_write(int number)\n{\n\tif (number == -2147483648)\n\t{\n\t\tft_number_write(number / 10);\n\t\tft_char_write('8');\n\t}\n\telse if (number < 0)\n\t{\n\t\tft_char_write('-');\n\t\tft_number_write(-number);\n\t}\n\telse\n\t{\n\t\tif (number > 9)\n\t\t\tft_number_write(number / 10);\n\t\tft_char_write('0' + number % 10);\n\t}\n}\n"
},
{
"alpha_fraction": 0.2689504325389862,
"alphanum_fraction": 0.2951894998550415,
"avg_line_length": 31.66666603088379,
"blob_id": "67274ed32a1f9ca0a6194c7aac3cf9fa85bc2837",
"content_id": "45e815bb3177e6a6dd86bcc2c8a39c3bef573995",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1372,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 42,
"path": "/C05/ex01/ft_recursive_factorial_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_recursive_factorial.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/06 12:02:04 by ecaceres #+# #+# */\n/* Updated: 2019/08/06 12:02:05 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdio.h>\n\nint\tft_recursive_factorial_recursive(int factorial, int number)\n{\n\tfactorial *= number--;\n\tif (number > 0)\n\t\treturn (ft_recursive_factorial_recursive(factorial, number));\n\treturn (factorial);\n}\n\nint\tft_recursive_factorial(int nb)\n{\n\tif (nb < 0)\n\t\treturn (0);\n\tif (nb == 0)\n\t\treturn (1);\n\treturn (ft_recursive_factorial_recursive(1, nb));\n}\n\nint\tmain(void)\n{\n\tint n;\n\n\tn = -2;\n\twhile (n < 7)\n\t{\n\t\tprintf(\"fact(%d) = %d\\n\", n, ft_recursive_factorial(n));\n\t\tn++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.6187845468521118,
"alphanum_fraction": 0.6215469837188721,
"avg_line_length": 21.625,
"blob_id": "4fd5d41bfec9a29759e8bb35b25ea4266a40b726",
"content_id": "9d9a9ca98036ced775bae2dae354b11c4421757a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 362,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 16,
"path": "/C10/ex03/srcs/ft_file_utils2.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdlib.h>\n\n#include \"ft_file_utils.h\"\n#include \"ft_strncpy.h\"\n\nchar\t*ft_extend_array(char *orig, char *n_cont, UINT old_len, UINT *len)\n{\n\tchar *dest;\n\n\tif (!(dest = malloc((*len + 1) * sizeof(char))))\n\t\treturn (NULL);\n\tif (orig != NULL)\n\t\tft_strncpy(dest, orig, old_len);\n\tft_strncpy(dest + old_len, n_cont, (UINT)(*len - old_len));\n\treturn (dest);\n}\n"
},
{
"alpha_fraction": 0.2547728717327118,
"alphanum_fraction": 0.2843976318836212,
"avg_line_length": 27.660377502441406,
"blob_id": "a77e0343fcb8f7d6e0b245a8e222ee2c668532bb",
"content_id": "3b46371c5555bfcacee2adc41e046b6e81df9370",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1519,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 53,
"path": "/C03/ex03/ft_strncat_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_strncat.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/04 13:57:35 by ecaceres #+# #+# */\n/* Updated: 2019/08/04 13:57:37 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdio.h>\n#include <string.h>\n\nchar\t*ft_strncat(char *dest, char *src, unsigned int nb)\n{\n\tchar\t*dst;\n\n\tdst = dest;\n\twhile (*dst != '\\0')\n\t\tdst++;\n\twhile (*src != '\\0' && nb > 0)\n\t{\n\t\t*dst = *(unsigned char *)src;\n\t\tdst++;\n\t\tsrc++;\n\t\tnb--;\n\t}\n\t*dst = '\\0';\n\treturn (dest);\n}\n\nint\t\tmain(void)\n{\n\tchar\t*str_base;\n\tchar\tdest[100];\n\tchar\tdest2[100];\n\tchar\t*src;\n\tint\t\tindex;\n\n\tstr_base = \"Hello\";\n\tsrc = \" World\";\n\tindex = 0;\n\twhile (index < 6)\n\t{\n\t\tdest[index] = str_base[index];\n\t\tdest2[index] = str_base[index];\n\t\tindex++;\n\t}\n\tprintf(\"c : %s$\\n\", strncat(dest, src, 4));\n\tprintf(\"ft : %s$\\n\", ft_strncat(dest2, src, 4));\n}\n"
},
{
"alpha_fraction": 0.6031249761581421,
"alphanum_fraction": 0.612500011920929,
"avg_line_length": 16.77777862548828,
"blob_id": "e39c6bc790a29a432641f9cae9e8a916be431707",
"content_id": "c257066b12d4b59c3167fd69df6877946b497068",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1280,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 72,
"path": "/C10/ex02/srcs/ft_tail_out.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <fcntl.h>\n#include <string.h>\n#include <unistd.h>\n#include <errno.h>\n#include <libgen.h>\n#include <stdio.h>\n\n#include \"ft_tail.h\"\n#include \"ft_tail_utils.h\"\n#include \"ft_console_io.h\"\n\nint\t\tcount_line(unsigned long byte_read, char *content)\n{\n\tunsigned long\tindex;\n\tint\t\t\t\tline_count;\n\n\tindex = 0;\n\tline_count = 0;\n\twhile (index < byte_read)\n\t{\n\t\tif (content[index] == '\\n' && index != byte_read - 1)\n\t\t\tline_count++;\n\t\tindex++;\n\t}\n\treturn (line_count);\n}\n\nvoid\ttail_by_lines(int fd)\n{\n\tunsigned long\tbyte_read;\n\tunsigned long\tindex;\n\tint\t\t\t\tline_count;\n\tchar\t\t\t*content;\n\tunsigned int\tlast_index;\n\n\tcontent = read_full(fd, &byte_read);\n\tline_count = count_line(byte_read, content);\n\tindex = -1;\n\twhile (++index < byte_read)\n\t{\n\t\tif (content[index] == '\\n')\n\t\t{\n\t\t\tif (--line_count < DEFAULT_LINE_COUNT)\n\t\t\t{\n\t\t\t\twhile (index < byte_read - 1)\n\t\t\t\t{\n\t\t\t\t\twrite(1, content + index + 1, 1);\n\t\t\t\t\tindex++;\n\t\t\t\t}\n\t\t\t\tbreak ;\n\t\t\t}\n\t\t\tlast_index = index + 1;\n\t\t}\n\t}\n}\n\nvoid\ttail_by_byte_count(int fd, unsigned int byte_to_read)\n{\n\tunsigned long\tindex;\n\tunsigned long\tbyte_read;\n\tchar\t\t\t*content;\n\n\tcontent = read_full(fd, &byte_read);\n\tindex = 0;\n\twhile (index < byte_read - byte_to_read)\n\t\tindex++;\n\twhile (index < byte_read)\n\t{\n\t\twrite(1, content + index, 1);\n\t\tindex++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.5541561841964722,
"alphanum_fraction": 0.5692695379257202,
"avg_line_length": 13.703703880310059,
"blob_id": "91c33edad386674adb67a37b2384551fb9970b9f",
"content_id": "4833b8b6deaf2a9fb27b48a3afdb3cae7922b794",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 397,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 27,
"path": "/C10/ex02/srcs/ft_atoi.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdbool.h>\n#include <unistd.h>\n\n#include \"ft_is.h\"\n\nbool\tft_is_operator(char c)\n{\n\treturn (ft_is_in_string(c, \"+-\"));\n}\n\nint\t\tft_abs_strict_atoi(char *str)\n{\n\tint\tresult;\n\n\tresult = 0;\n\twhile (ft_is_whitespace(*str) || ft_is_operator(*str))\n\t\tstr++;\n\twhile (ft_is_number(*str))\n\t{\n\t\tresult *= 10;\n\t\tresult += *str - '0';\n\t\tstr++;\n\t}\n\tif (*str != '\\0')\n\t\treturn (-1);\n\treturn (result);\n}\n"
},
{
"alpha_fraction": 0.47708332538604736,
"alphanum_fraction": 0.4937500059604645,
"avg_line_length": 10.162790298461914,
"blob_id": "802f2b8ac427a10493e5304f66c89f6dbce847d0",
"content_id": "04a3386e4cb66624227e0ffefc4db7456bd19325",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 480,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 43,
"path": "/C00/ex08/ft_print_combn_should_be.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <unistd.h>\n#include <stdbool.h>\n\nvoid\tft_putchar(char c)\n{\n\twrite(1, &c, 1);\n}\n\nvoid\tft_write_comb(char a, char b, bool last)\n{\n\tft_putchar(a);\n\tft_putchar(b);\n\tif (last)\n\t{\n\t\tft_putchar(',');\n\t\tft_putchar(' ');\n\t}\n}\n\nvoid\tft_print_comb(void)\n{\n\tchar a;\n\tchar b;\n\tbool last;\n\n\ta = '0';\n\twhile (a <= '8')\n\t{\n\t\tb = a + 1;\n\t\twhile (b <= '9')\n\t\t{\n\t\t\tlast = !(a == '8' && b == '9');\n\t\t\tft_write_comb(a, b, last);\n\t\t\tb++;\n\t\t}\n\t\ta++;\n\t}\n}\n\nint\t\tmain(void)\n{\n\tft_print_comb();\n}\n"
},
{
"alpha_fraction": 0.6926316022872925,
"alphanum_fraction": 0.703157901763916,
"avg_line_length": 20.590909957885742,
"blob_id": "8d9d5f1a482a74e7a9d267931d9e3583c1e3b467",
"content_id": "b282c406095dee9c6f0b9c35d42caa319d0d58eb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 475,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 22,
"path": "/C10/ex03/includes/ft_file_utils.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_FILE_UTILS_H\n# define FT_FILE_UTILS_H\n\n# include \"ft_args_parser.h\"\n\n# define BUFFER_SIZE_1K 1024\n# define UINT unsigned int\n# define T_OPT t_options\n\nint\t\tft_open_file(t_options *options);\n\nint\t\tft_close_file(int fd);\n\nbool\tft_show_error(t_options *options);\n\nchar\t*ft_read_multiple(T_OPT *opts, int count, UINT *len, int total);\n\nchar\t*ft_read_full(int fd, unsigned int *length);\n\nchar\t*ft_extend_array(char *ori, char *n_cont, UINT old_len, UINT *len);\n\n#endif\n"
},
{
"alpha_fraction": 0.5618999600410461,
"alphanum_fraction": 0.5901970863342285,
"avg_line_length": 20.053192138671875,
"blob_id": "2d1691efc86e230b21e0896ba951ad55df1ec823",
"content_id": "0e9c2871622a4c9d57ac9d5cef34abb0676a58e3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1979,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 94,
"path": "/C10/ex03/srcs/ft_dumper.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <unistd.h>\n\n#include \"ft_dumper.h\"\n#include \"ft_console_io.h\"\n#include \"ft_write_number.h\"\n#include \"ft_equal.h\"\n\nvoid\tft_dump_content(char *content, int index, UINT line, int max)\n{\n\twrite(OUT, \" |\", 3);\n\tindex = 0;\n\twhile (index < max)\n\t{\n\t\tft_write_safe_char(&content[line * 16 + index]);\n\t\tindex++;\n\t}\n\twrite(OUT, \"|\", 1);\n}\n\nvoid\tft_dump_line(t_options *options, char *content, UINT line, int max)\n{\n\tint\t\tindex;\n\tbool\tdisp_content;\n\n\tdisp_content = options->disp_cont != 0;\n\tft_write_number(line * 16, 16, 6 + disp_content);\n\twrite(1, \" \", 1 + disp_content);\n\tindex = 0;\n\twhile (index < 16)\n\t{\n\t\tif (index >= max)\n\t\t\twrite(OUT, \" \", 2);\n\t\telse\n\t\t\tft_write_number(content[line * 16 + index], 16, 1);\n\t\tif (index != 16 - 1)\n\t\t\twrite(OUT, \" \", 1 + (disp_content && index == 7));\n\t\tindex++;\n\t}\n\tif (disp_content)\n\t\tft_dump_content(content, index, line, max);\n\twrite(OUT, \"\\n\", 1);\n}\n\nvoid\tft_repeat_line(t_options *options, char *content, UINT line, int max)\n{\n\tint\tindex;\n\n\tindex = 0;\n\tif (options->disp_cont < 2)\n\t\tft_dump_line(options, content, line, max);\n\telse\n\t\twhile (index < options->disp_cont)\n\t\t{\n\t\t\tft_dump_line(options, content, line, max);\n\t\t\tindex++;\n\t\t}\n}\n\nvoid\tft_do_dump(t_options *options, char *content, UINT length, UINT lines)\n{\n\tunsigned int\ti;\n\tint\t\t\t\tmax;\n\tchar\t\t\t*last_line;\n\tbool\t\t\tstar_flag;\n\n\ti = -1;\n\tlast_line = 0;\n\tstar_flag = false;\n\twhile (++i < lines)\n\t{\n\t\tmax = (i == lines - 1) ? length % 16 : 16;\n\t\tif (last_line != 0 && ft_is_equal(content + (i * 16), last_line, 16))\n\t\t{\n\t\t\twrite(OUT, \"*\\n\", !star_flag ? 2 : 0);\n\t\t\tstar_flag = true;\n\t\t}\n\t\telse\n\t\t{\n\t\t\tstar_flag = false;\n\t\t\tif (max != 0)\n\t\t\t\tft_repeat_line(options, content, i, max);\n\t\t}\n\t\tlast_line = content + (i * 16);\n\t}\n\tft_write_number((i - 1) * 16 + max, 16, 6 + (options->disp_cont != 0));\n}\n\nvoid\tft_dump(t_options *options, char *content, UINT length, UINT lines)\n{\n\tif (length == 0)\n\t\treturn ;\n\tft_do_dump(options, content, length, lines);\n\twrite(OUT, \"\\n\", 1);\n}\n"
},
{
"alpha_fraction": 0.3603896200656891,
"alphanum_fraction": 0.37847867608070374,
"avg_line_length": 23.5,
"blob_id": "d87e8175f22f775eb179907ab48acb239925d162",
"content_id": "d1683a56191f7137563fb68c669f8005a5acdfc9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2156,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 88,
"path": "/C12/ex14/ft_list_sort_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_list_sort.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/22 18:51:12 by ecaceres #+# #+# */\n/* Updated: 2019/08/22 18:51:13 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n#include <stdbool.h>\n\n#include \"ft_list.h\"\n\n#include <stdio.h>\n\nvoid\tft_swap_void(void **a, void **b)\n{\n\tvoid *c;\n\n\tc = *a;\n\t*a = *b;\n\t*b = c;\n}\n\nvoid\tft_list_sort(t_list **begin_list, int (*cmp)())\n{\n\tt_list\t*current;\n\tt_list\t*end;\n\tbool\tswapped;\n\n\tif (*begin_list == 0)\n\t\treturn ;\n\tswapped = true;\n\tend = 0;\n\twhile (swapped)\n\t{\n\t\tswapped = false;\n\t\tcurrent = *begin_list;\n\t\twhile (current->next != end)\n\t\t{\n\t\t\tif ((*cmp)(current->data, current->next->data) > 0)\n\t\t\t{\n\t\t\t\tft_swap_void(&(current->data), &(current->next->data));\n\t\t\t\tswapped = true;\n\t\t\t}\n\t\t\tcurrent = current->next;\n\t\t}\n\t\tend = current;\n\t}\n}\n\nint\t\tless_than(void *a, void *b)\n{\n\treturn ((*(int *)a) > (*(int *)b));\n}\n\nint\t\tmain(void)\n{\n\tint\t\tindex;\n\tint\t\t*malloced_index;\n\tt_list\t*list;\n\tt_list\t*current;\n\n\tindex = 1;\n\tlist = ft_create_elem(0);\n\twhile (index < 10)\n\t{\n\t\tmalloced_index = malloc(sizeof(int));\n\t\t*malloced_index = 9 - index;\n\t\tft_list_push_back(&list, (void *)malloced_index);\n\t\tprintf(\"list[%d] = %d\\n\", index, *malloced_index);\n\t\tindex++;\n\t}\n\tindex = 1;\n\tft_list_sort(&(list->next), &less_than);\n\tprintf(\"Sorted\\n\");\n\tcurrent = list->next;\n\twhile (index < 10)\n\t{\n\t\tprintf(\"list[%d] = %d\\n\", index, *((int *)current->data));\n\t\tindex++;\n\t\tcurrent = current->next;\n\t}\n}\n"
},
{
"alpha_fraction": 0.6891495585441589,
"alphanum_fraction": 0.7067448496818542,
"avg_line_length": 16.947368621826172,
"blob_id": "86dd564a858ecbad39010b92e49fe84a7a036fb9",
"content_id": "9e249c65b305ee3a19a5d637cd49e65e33229097",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 341,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 19,
"path": "/C10/ex02/includes/ft_tail.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_TAIL_H\n# define FT_TAIL_H\n\n# include <stdbool.h>\n\n# include \"ft_args_parser.h\"\n\n# define DEFAULT_LINE_COUNT 10\n# define DEFAULT_BUFFER_SIZE 1024\n\nvoid\tstdin_tail(t_options *opts);\n\nbool\ttail(char *exec, char *path, t_options *opts, int total_count);\n\nbool\tdo_tail(int fd, t_options *opts);\n\nvoid\twrite_header(char *path);\n\n#endif\n"
},
{
"alpha_fraction": 0.504643976688385,
"alphanum_fraction": 0.5170278549194336,
"avg_line_length": 12.744680404663086,
"blob_id": "40733c70279d57c9b2e8daa1984b98734a52c77e",
"content_id": "643fa902078c9f9f58f2027dd0be2952b3603b59",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 646,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 47,
"path": "/Rush00/ex00/rush02.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdbool.h>\n\nvoid\tft_putchar(char c);\n\nvoid\tprint_at(int line, int column, int max_l, int max_c)\n{\n\tif (line == 0)\n\t{\n\t\tif (column == 0 || column == max_c)\n\t\t\tft_putchar('A');\n\t\telse\n\t\t\tft_putchar('B');\n\t}\n\telse if (line == max_l)\n\t{\n\t\tif (column == 0 || column == max_c)\n\t\t\tft_putchar('C');\n\t\telse\n\t\t\tft_putchar('B');\n\t}\n\telse\n\t{\n\t\tif (column == 0 || column == max_c)\n\t\t\tft_putchar('B');\n\t\telse\n\t\t\tft_putchar(' ');\n\t}\n}\n\nvoid\trush(int x, int y)\n{\n\tint\tline;\n\tint\tcolumn;\n\n\tline = 0;\n\twhile (line < y)\n\t{\n\t\tcolumn = 0;\n\t\twhile (column < x)\n\t\t{\n\t\t\tprint_at(line, column, y - 1, x - 1);\n\t\t\tcolumn++;\n\t\t}\n\t\tft_putchar('\\n');\n\t\tline++;\n\t}\n}\n"
},
{
"alpha_fraction": 0.5029820799827576,
"alphanum_fraction": 0.5149105191230774,
"avg_line_length": 12.594594955444336,
"blob_id": "fbdb83cc268c1bffd768892a2bc5e8a82d4d410d",
"content_id": "a44109e11c2a46ce011faaa3d35c2fc19de0a3f4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 503,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 37,
"path": "/C11/ex07/ft_advanced_sort_string_tab.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdbool.h>\n\nvoid\tft_swap(char **a, char **b)\n{\n\tchar *c;\n\n\tc = *a;\n\t*a = *b;\n\t*b = c;\n}\n\nvoid\tft_advanced_sort_string_tab(char **tab, int (*cmp)(char *, char *))\n{\n\tint\t\tindex;\n\tint\t\tsize;\n\tbool\tswapped;\n\n\tsize = 0;\n\twhile (tab[size])\n\t\tsize++;\n\twhile (true)\n\t{\n\t\tindex = 0;\n\t\tswapped = false;\n\t\twhile (index < size - 1)\n\t\t{\n\t\t\tif ((*cmp)(tab[index], tab[index + 1]) > 0)\n\t\t\t{\n\t\t\t\tft_swap(&tab[index], &tab[index + 1]);\n\t\t\t\tswapped = true;\n\t\t\t}\n\t\t\tindex++;\n\t\t}\n\t\tif (!swapped)\n\t\t\tbreak ;\n\t}\n}\n"
},
{
"alpha_fraction": 0.6306584477424622,
"alphanum_fraction": 0.6347736716270447,
"avg_line_length": 16.672727584838867,
"blob_id": "155f27e6c2d5d399bccd366008b63d5954e55eb0",
"content_id": "5d07f813873db8ab70e37d561920a44a323bf437",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 972,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 55,
"path": "/C10/ex02/srcs/ft_tail.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdbool.h>\n#include <stdio.h>\n#include <libgen.h>\n#include <fcntl.h>\n#include <unistd.h>\n#include <string.h>\n#include <errno.h>\n#include <unistd.h>\n#include <stdlib.h>\n\n#include \"ft_tail.h\"\n#include \"ft_tail_utils.h\"\n#include \"ft_tail_out.h\"\n#include \"ft_console_io.h\"\n#include \"ft_args_parser.h\"\n#include \"ft_strncpy.h\"\n\nvoid\tstdin_tail(t_options *opts)\n{\n\tdo_tail(IN, opts);\n}\n\nbool\ttail(char *exec, char *path, t_options *opts, int total_count)\n{\n\tstatic int\tcurrent_id = 0;\n\tint\t\t\tfd;\n\n\tfd = open_file(exec, path);\n\tif (fd < 0)\n\t\treturn (false);\n\tif (total_count > 1)\n\t{\n\t\tif (current_id++ != 0)\n\t\t\twrite_str_out(\"\\n\");\n\t\twrite_header(path);\n\t}\n\tdo_tail(fd, opts);\n\treturn (true);\n}\n\nbool\tdo_tail(int fd, t_options *opts)\n{\n\tif (opts->byte_to_read == NOTHING)\n\t\ttail_by_lines(fd);\n\telse\n\t\ttail_by_byte_count(fd, opts->byte_to_read);\n\treturn (true);\n}\n\nvoid\twrite_header(char *path)\n{\n\twrite_str_out(\"==> \");\n\twrite_str_out(path);\n\twrite_str_out(\" <==\\n\");\n}\n"
},
{
"alpha_fraction": 0.5501858592033386,
"alphanum_fraction": 0.5724906921386719,
"avg_line_length": 16.933332443237305,
"blob_id": "2478979cea52cf70bd353969472f561ea5d5813a",
"content_id": "6fe729438f4f50f4f5130d458bcfb0e5a8fa5be6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 269,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 15,
"path": "/C05/ex05/ft_sqrt.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "int\tft_sqrt(int nb)\n{\n\tunsigned int\t\tsqrt;\n\tunsigned int\t\tindex;\n\n\tif (nb < 0)\n\t\treturn (0);\n\tif (nb <= 1)\n\t\treturn (nb);\n\tindex = 0;\n\twhile ((sqrt = index * index) <= (unsigned int)nb)\n\t\tindex++;\n\tindex -= 1;\n\treturn (index * index == (unsigned int)nb ? index : 0);\n}\n"
},
{
"alpha_fraction": 0.4339427351951599,
"alphanum_fraction": 0.44877544045448303,
"avg_line_length": 25.596330642700195,
"blob_id": "ff9ed68aff044ca5d8b8f6ba401199960a11af3d",
"content_id": "3bef732d7d77c62036bc34cf9e677f2d375fd638",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2899,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 109,
"path": "/Final Project/srcs/bsq_parse.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* bsq_parse.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/19 12:24:26 by ecaceres #+# #+# */\n/* Updated: 2019/08/21 20:10:34 by fyusuf-a ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"bsq.h\"\n#include \"utilities.h\"\n\nt_bool\tft_parse_header_line(t_grid *grid, char *line, UINT length)\n{\n\tt_bool\tresult;\n\n\tif (length < 4)\n\t\treturn (false);\n\tgrid->translate[fill] = line[length - 1];\n\tgrid->translate[obstacle] = line[length - 2];\n\tgrid->translate[empty] = line[length - 3];\n\tresult = ft_atoi_n_strict(line, length - 3, &(grid->h));\n\treturn (result);\n}\n\nt_bool\tft_parse_normal_line(t_grid *grid, char *line, UINT length)\n{\n\tUINT\tindex;\n\tchar\tcurrent;\n\n\tif (length < 1)\n\t\treturn (false);\n\tgrid->w = length;\n\tindex = 0;\n\twhile (index < length)\n\t{\n\t\tcurrent = line[index];\n\t\tif (current != grid->translate[empty]\n\t\t\t\t&& current != grid->translate[obstacle]\n\t\t\t\t&& current != grid->translate[fill])\n\t\t\treturn (false);\n\t\tindex++;\n\t}\n\treturn (true);\n}\n\nt_bool\tft_has_width_changed(t_grid *grid, UINT *curr_w)\n{\n\tif (*curr_w == (UINT)-1)\n\t\t*curr_w = grid->w;\n\treturn (*curr_w != grid->w);\n}\n\nt_bool\tft_process_lines(t_grid *grid, UINT index,\n\t\t\t\t\t\t\tchar *file_content, UINT total)\n{\n\tUINT\tjndex;\n\tUINT\ty;\n\tUINT\tcurr_w;\n\n\ty = 0;\n\tcurr_w = -1;\n\twhile (index < total && y < grid->h)\n\t{\n\t\tjndex = index;\n\t\twhile (file_content[jndex] != '\\n')\n\t\t\tif (jndex++ + 1 >= total)\n\t\t\t\treturn (false);\n\t\tif (ft_parse_normal_line(grid, file_content + index, jndex - index))\n\t\t{\n\t\t\tgrid->map[y++] = (UCHR *)(file_content + index);\n\t\t\tif (ft_has_width_changed(grid, &curr_w))\n\t\t\t\treturn (false);\n\t\t}\n\t\telse\n\t\t\treturn (false);\n\t\tindex = jndex + 1;\n\t}\n\treturn (y == grid->h);\n}\n\nt_bool\tft_parse_grid(int fd, t_grid *grid)\n{\n\tchar\t*file_content;\n\tUINT\ttotal;\n\tUINT\tindex;\n\n\tgrid->map = 0;\n\tif (!ft_read_full(fd, &file_content, &total))\n\t\treturn (false);\n\tgrid->source = file_content;\n\tindex = 0;\n\twhile (index < total)\n\t{\n\t\tif (file_content[index] == '\\n')\n\t\t{\n\t\t\tif (!ft_parse_header_line(grid, file_content, index))\n\t\t\t\treturn (false);\n\t\t\tif (!(grid->map = malloc(sizeof(char *) * grid->h)))\n\t\t\t\treturn (false);\n\t\t\treturn (ft_process_lines(grid, index + 1, file_content, total));\n\t\t}\n\t\tindex++;\n\t}\n\treturn (false);\n}\n"
},
{
"alpha_fraction": 0.4463712275028229,
"alphanum_fraction": 0.4601798355579376,
"avg_line_length": 21.72992706298828,
"blob_id": "ff461d5b1c33504658bb2e85f32fe565fcd15605",
"content_id": "7b74f466c0bfab2763a609c37b8a23ae3714ea0a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 3114,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 137,
"path": "/Rush01/ex00/checker.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* checker.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/10 17:03:23 by ecaceres #+# #+# */\n/* Updated: 2019/08/10 17:03:26 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"boolean.h\"\n#include \"constraint.h\"\n#include \"checker.h\"\n\nt_bool\thas_row_duplicate_value(int **grid, int size, int row)\n{\n\tint\tindex;\n\tint\tjndex;\n\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tjndex = index + 1;\n\t\twhile (jndex < size)\n\t\t{\n\t\t\tif (grid[row][index] == grid[row][jndex])\n\t\t\t\treturn (true);\n\t\t\tjndex++;\n\t\t}\n\t\tindex++;\n\t}\n\treturn (false);\n}\n\nt_bool\tvalidate_row(int **grid, t_constr row_constr, int row, int direction)\n{\n\tint\tcolumn;\n\tint\tseen;\n\tint\tvision;\n\tint\tmemory;\n\tint\tcurrent;\n\n\tif (has_row_duplicate_value(grid, row_constr.size, row))\n\t\treturn (false);\n\tcolumn = 0;\n\tseen = 0;\n\tvision = row_constr.v[row];\n\tmemory = -1;\n\twhile (column < row_constr.size)\n\t{\n\t\tcurrent = grid[row][column];\n\t\tif (direction == DIR_R_T_L)\n\t\t\tcurrent = grid[row][row_constr.size - 1 - column];\n\t\tif (current > memory)\n\t\t{\n\t\t\tmemory = current;\n\t\t\tseen++;\n\t\t}\n\t\tcolumn++;\n\t}\n\treturn (seen == vision);\n}\n\nt_bool\thas_column_duplicate_value(int **grid, int size, int column)\n{\n\tint\tindex;\n\tint\tjndex;\n\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tjndex = index + 1;\n\t\twhile (jndex < size)\n\t\t{\n\t\t\tif (grid[index][column] == grid[jndex][column])\n\t\t\t\treturn (true);\n\t\t\tjndex++;\n\t\t}\n\t\tindex++;\n\t}\n\treturn (false);\n}\n\nt_bool\tvalidate_column(int **grid, t_constr col_constr, int col, int direction)\n{\n\tint\trow;\n\tint\tseen;\n\tint\tvision;\n\tint\tmemory;\n\tint\tcurrent;\n\n\tif (has_column_duplicate_value(grid, col_constr.size, col))\n\t\treturn (false);\n\trow = 0;\n\tseen = 0;\n\tvision = col_constr.v[col];\n\tmemory = -1;\n\twhile (row < col_constr.size)\n\t{\n\t\tcurrent = grid[row][col];\n\t\tif (direction == DIR_D_T_U)\n\t\t\tcurrent = grid[col_constr.size - 1 - row][col];\n\t\tif (current > memory)\n\t\t{\n\t\t\tmemory = current;\n\t\t\tseen++;\n\t\t}\n\t\trow++;\n\t}\n\treturn (seen == vision);\n}\n\nt_bool\tcheck_grid_validity(int **grid, int size, t_constr cnstr[4])\n{\n\tint\trow;\n\tint\tcolumn;\n\n\trow = 0;\n\twhile (row < size)\n\t{\n\t\tif (!validate_row(grid, cnstr[LEFT], row, DIR_L_T_R)\n\t\t\t|| !validate_row(grid, cnstr[RIGHT], row, DIR_R_T_L))\n\t\t\treturn (false);\n\t\tcolumn = 0;\n\t\twhile (column < size)\n\t\t{\n\t\t\tif (!validate_column(grid, cnstr[UP], column, DIR_U_T_D)\n\t\t\t\t|| !validate_column(grid, cnstr[DOWN], column, DIR_D_T_U))\n\t\t\t\treturn (false);\n\t\t\tcolumn++;\n\t\t}\n\t\trow++;\n\t}\n\treturn (true);\n}\n"
},
{
"alpha_fraction": 0.590062141418457,
"alphanum_fraction": 0.6024844646453857,
"avg_line_length": 39.25,
"blob_id": "3f58e0665d92b0537623913185f65bf8c1a0f10b",
"content_id": "f4cb48b1bb98857049348e5312c4e5ba7cc6b863",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 161,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 4,
"path": "/C09/ex00/libft_creator.sh",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "rm -f libft.a\nfind . -name \"*.c\" -type f -maxdepth 1 -exec gcc -Wall -Werror -Wextra -c {} \\;\nar -rcs libft.a *.o\nfind . -name \"*.o\" -type f -maxdepth 1 -delete\n"
},
{
"alpha_fraction": 0.25207754969596863,
"alphanum_fraction": 0.28324100375175476,
"avg_line_length": 27.8799991607666,
"blob_id": "73385c8d9a11dbba10ff055ecb6aacca11551e80",
"content_id": "2a6e22d9d55f37087e5fcf9f4c9e802bfb7858a1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1444,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 50,
"path": "/C02/ex03/ft_str_is_numeric_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_str_is_numeric.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/01 18:13:25 by ecaceres #+# #+# */\n/* Updated: 2019/08/01 20:10:24 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n#include <stdio.h>\n\nint\t\tft_str_is_numeric(char *str)\n{\n\tint\t\tindex;\n\tbool\tvalid;\n\tchar\tcurr;\n\n\tindex = 0;\n\tvalid = true;\n\twhile (true)\n\t{\n\t\tcurr = str[index];\n\t\tif (curr == '\\0')\n\t\t{\n\t\t\tbreak ;\n\t\t}\n\t\tif (!(curr >= '0' && curr <= '9'))\n\t\t{\n\t\t\tvalid = false;\n\t\t\tbreak ;\n\t\t}\n\t\tindex++;\n\t}\n\treturn (valid);\n}\n\nint\t\tmain(void)\n{\n\tchar *str_valid;\n\tchar *str_invalid;\n\n\tstr_valid = \"123456\";\n\tstr_invalid = \"123A56\";\n\tprintf(\"should be 1: %d\\n\", ft_str_is_numeric(str_valid));\n\tprintf(\"should be 0: %d\\n\", ft_str_is_numeric(str_invalid));\n}\n"
},
{
"alpha_fraction": 0.2518218755722046,
"alphanum_fraction": 0.274493932723999,
"avg_line_length": 55.1363639831543,
"blob_id": "03e53e5684b55e8d9bfb7546b3390bb941aa2141",
"content_id": "6cdc970666c90f30146d7719e6c57176235d95a6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1235,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 22,
"path": "/Final Project/includes/solution.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* solution.h :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: fyusuf-a <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/19 18:57:54 by fyusuf-a #+# #+# */\n/* Updated: 2019/08/19 18:59:14 by fyusuf-a ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#ifndef SOLUTION_H\n# define SOLUTION_H\n\nstatic void\t\tactualize_sol(t_solution *sol, t_solution *intent,\n\t\t\t\t\tconst t_grid *grid);\nstatic t_bool\tfits(const t_solution *sol, const t_grid *grid);\nstatic t_bool\tfits_succ(const t_solution *intent, const t_grid *grid);\nstatic t_bool\tdont_fit_basic(const t_solution *intent, const t_grid *grid);\n\n#endif\n"
},
{
"alpha_fraction": 0.5964912176132202,
"alphanum_fraction": 0.6076555252075195,
"avg_line_length": 19.557376861572266,
"blob_id": "157eca75914ad2075614f2d51e00a4eb79be6e47",
"content_id": "5315664af9e05a4a8d9f64e4811e91257901ffd2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1254,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 61,
"path": "/C12/ex15/ft_list_reverse_fun_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdlib.h>\n\n#include \"ft_list.h\"\n\n#include <stdio.h>\n\nvoid\tft_list_reverse_fun(t_list *begin_list)\n{\n\tt_list\t*next;\n\tt_list\t*previous;\n\tt_list\t*current;\n\tvoid\t*previous_value;\n\n\tif (begin_list == 0)\n\t\treturn ;\n\tprevious_value = 0;\n\tcurrent = begin_list;\n\twhile (current != NULL)\n\t{\n\t\tnext = current->next;\n\t\tif (next != 0)\n\t\t\tbreak ;\n\t\tprevious_value = next->data;\n\t\tcurrent->data = previous_value;\n\t\tprintf(\"%p previous = %d\\n\", previous_value, previous_value == 0 ? -1 : *((int *)previous_value));\n\t\tcurrent = next;\n\t}\n\tbegin_list->data = previous_value;\n}\n\nint\t\tmain(void)\n{\n\tint\t\tindex;\n\tint\t\t*malloced_index;\n\t//int\t\t*data;\n\tt_list\t*list;\n\tt_list\t*current;\n\n\tindex = 1;\n\tlist = ft_create_elem(0);\n\twhile (index < 10)\n\t{\n\t\tmalloced_index = malloc(sizeof(int));\n\t\t*malloced_index = index;\n\t\tft_list_push_back(&list, (void *)malloced_index);\n\t\tprintf(\"%p list[%d] = %d\\n\", malloced_index, index, *malloced_index);\n\t\tindex++;\n\t}\n\tindex = 1;\n\tft_list_reverse_fun(list->next);\n\tprintf(\"Reversed\\n\");\n\tcurrent = list->next;\n\twhile (index < 10)\n\t{\n\t\t//data = (int *)current->data;\n\t\t//printf(\"list[%d] = %d\\n\", index, data != 0 ? *data : -1);\n\t\tprintf(\"list[%d] = %d\\n\", index, *((int *)current->data));\n\t\tindex++;\n\t\tcurrent = current->next;\n\t}\n}\n"
},
{
"alpha_fraction": 0.27836328744888306,
"alphanum_fraction": 0.3013019263744354,
"avg_line_length": 24.603174209594727,
"blob_id": "ff6d6ee53b18c47642cf90d4481fffc2839e4f8f",
"content_id": "6bc0818c8c65a508ef4d12d763f30f85dde7d633",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1613,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 63,
"path": "/C07/ex01/ft_range_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_range.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/07 18:02:18 by ecaceres #+# #+# */\n/* Updated: 2019/08/07 18:02:19 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n#include <stdio.h>\n\nint\t\t*ft_range(int min, int max)\n{\n\tint\trange;\n\tint\tindex;\n\tint\t*buffer;\n\n\tif (min >= max)\n\t\treturn (0);\n\trange = max - min - 1;\n\tif ((buffer = malloc(range * sizeof(int))) == NULL)\n\t\treturn (0);\n\tindex = 0;\n\twhile (index <= range)\n\t{\n\t\tbuffer[index] = min + index;\n\t\tindex++;\n\t}\n\treturn (buffer);\n}\n\nvoid\tdebug_dump_array(int numbers[], int size)\n{\n\tint index;\n\n\tprintf(\"[ \");\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tprintf(\"%d\", numbers[index]);\n\t\tif (index != size - 1)\n\t\t{\n\t\t\tprintf(\", \");\n\t\t}\n\t\tindex++;\n\t}\n\tprintf(\" ]\");\n}\n\nint\t\tmain(void)\n{\n\tint\tmin;\n\tint\tmax;\n\n\tmin = 5;\n\tmax = 10;\n\tprintf(\"min = %d, max = %d -> \", min, max);\n\tdebug_dump_array(ft_range(min, max), max - min);\n}\n"
},
{
"alpha_fraction": 0.3052208721637726,
"alphanum_fraction": 0.325874924659729,
"avg_line_length": 27.57377052307129,
"blob_id": "3402f0402952fd0c64745e01dced0e4a5cde1ecb",
"content_id": "7d73aeddbe62437aa488ad5ac34e28bc0fe0c701",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1743,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 61,
"path": "/Rush02/ex00/srcs/ft_split.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_split.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: exam <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/16 19:22:42 by exam #+# #+# */\n/* Updated: 2019/08/16 19:22:42 by exam ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n\n#include \"ft_str.h\"\n#include \"ft_is.h\"\n\nint\t\tft_split_whitespace_count_word(char *str)\n{\n\tint\t\tcount;\n\n\tcount = 0;\n\twhile (*str)\n\t{\n\t\twhile (ft_is_whitespace(*str))\n\t\t\tstr++;\n\t\tif (*str == '\\0')\n\t\t\tbreak ;\n\t\twhile (!ft_is_whitespace(*str) && *str != '\\0')\n\t\t\tstr++;\n\t\tcount++;\n\t}\n\treturn (count);\n}\n\nchar\t**ft_split_whitespace(char *str)\n{\n\tint\t\tindex;\n\tint\t\tword_count;\n\tchar\t*word_start;\n\tchar\t**array;\n\n\tword_count = ft_split_whitespace_count_word(str);\n\tif (!(array = malloc((word_count + 1) * sizeof(char *))))\n\t\treturn (NULL);\n\tindex = 0;\n\twhile (*str)\n\t{\n\t\twhile (ft_is_whitespace(*str))\n\t\t\tstr++;\n\t\tif (*str == '\\0')\n\t\t\tbreak ;\n\t\tword_start = str;\n\t\twhile (!ft_is_whitespace(*str) && *str != '\\0')\n\t\t\tstr++;\n\t\tarray[index] = ft_str_n_duplicate(word_start, str - word_start);\n\t\tindex++;\n\t}\n\tarray[index] = 0;\n\treturn (array);\n}\n"
},
{
"alpha_fraction": 0.3194372057914734,
"alphanum_fraction": 0.34184470772743225,
"avg_line_length": 20.561798095703125,
"blob_id": "bf73c65535678d83e1fbfcf50b6f41bf6c7f8513",
"content_id": "86572b9e3080486dacd96492639f6da07a5228f6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1919,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 89,
"path": "/C01/ex08/ft_sort_int_tab_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_sort_int_tab.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/01 15:04:01 by ecaceres #+# #+# */\n/* Updated: 2019/08/01 15:04:04 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdbool.h>\n#include <stdio.h>\n\nvoid\tft_swap(int *a, int *b)\n{\n\tint c;\n\n\tc = *a;\n\t*a = *b;\n\t*b = c;\n}\n\nvoid\tft_sort_int_tab(int *tab, int size)\n{\n\tint\t\tread_index;\n\tint\t\tindex;\n\tbool\tswapped;\n\n\tread_index = 0;\n\twhile (true)\n\t{\n\t\tswapped = false;\n\t\tindex = 0;\n\t\twhile (index < size)\n\t\t{\n\t\t\tif (tab[index] > tab[index + 1])\n\t\t\t{\n\t\t\t\tft_swap(&tab[index], &tab[index + 1]);\n\t\t\t\tswapped = true;\n\t\t\t}\n\t\t\tindex++;\n\t\t}\n\t\tif (!swapped)\n\t\t{\n\t\t\tbreak ;\n\t\t}\n\t\tread_index++;\n\t}\n}\n\nvoid\tdebug_dump_array(int numbers[], int size)\n{\n\tint index;\n\n\tprintf(\"[ \");\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tprintf(\"%d\", numbers[index]);\n\t\tif (index != size - 1)\n\t\t{\n\t\t\tprintf(\", \");\n\t\t}\n\t\tindex++;\n\t}\n\tprintf(\" ]\");\n}\n\nint\t\tmain(void)\n{\n\tint numbers[9];\n\tint *first_pointer;\n\tint index;\n\n\tindex = 0;\n\twhile (index < 9)\n\t{\n\t\tnumbers[index] = 9 - (index + 1);\n\t\tindex++;\n\t}\n\tfirst_pointer = &numbers[0];\n\tprintf(\"before: \");\n\tdebug_dump_array(numbers, 9);\n\tft_sort_int_tab(first_pointer, 9);\n\tprintf(\"\\nafter : \");\n\tdebug_dump_array(numbers, 9);\n}\n"
},
{
"alpha_fraction": 0.5894039869308472,
"alphanum_fraction": 0.5894039869308472,
"avg_line_length": 11.583333015441895,
"blob_id": "74c4a75a201c7e0cad2d6858065e58c460db2acb",
"content_id": "2916ef8118188f516a0d32e0c633614344bf84f7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 302,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 24,
"path": "/C11/ex05/srcs/ft_operation.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "int\t\tft_operation_add(int a, int b)\n{\n\treturn (a + b);\n}\n\nint\t\tft_operation_minus(int a, int b)\n{\n\treturn (a - b);\n}\n\nint\t\tft_operation_devide(int a, int b)\n{\n\treturn (a / b);\n}\n\nint\t\tft_operation_multiply(int a, int b)\n{\n\treturn (a * b);\n}\n\nint\t\tft_operation_modulo(int a, int b)\n{\n\treturn (a % b);\n}\n"
},
{
"alpha_fraction": 0.2710978388786316,
"alphanum_fraction": 0.2920089662075043,
"avg_line_length": 43.63333511352539,
"blob_id": "3425735a479fd10120625db92f82575b65c7be09",
"content_id": "5dbb11a105f7fa092d0773c9a9428106fe32df44",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1339,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 30,
"path": "/Rush02/ex00/includes/ft_str.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_str.h :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/17 12:12:14 by ecaceres #+# #+# */\n/* Updated: 2019/08/17 12:12:14 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#ifndef FT_STR_H\n# define FT_STR_H\n\nint\t\tft_str_join_get_final_length(char **strings, int size, int sep_length);\nchar\t*ft_str_join(int size, char **strs, char *sep);\n\nint\t\tft_str_length(char *str);\n\nchar\t*ft_str_duplicate(char *src);\nchar\t*ft_str_n_duplicate(char *src, int n);\n\nchar\t*ft_str_copy(char *dest, char *src);\nchar\t*ft_str_n_copy(char *dest, char *src, int n);\n\nvoid\tft_str_write_to(int fd, char *str);\nvoid\tft_str_write(char *str);\n\n#endif\n"
},
{
"alpha_fraction": 0.27448534965515137,
"alphanum_fraction": 0.29382407665252686,
"avg_line_length": 25.278688430786133,
"blob_id": "240b68ffbf5e89b76f91fe48bf2feefdcd85b47d",
"content_id": "2cd2514b399d8d2240686170147720a23fc27acb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1603,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 61,
"path": "/Final Project/srcs/bsq_grid.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* bsq_grid.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/19 12:21:09 by ecaceres #+# #+# */\n/* Updated: 2019/08/21 20:09:40 by fyusuf-a ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"bsq.h\"\n#include \"utilities.h\"\n\nt_bool\tft_load_grid(char *path, t_grid *grid)\n{\n\tint\t\tfd;\n\tt_bool\tresult;\n\n\tfd = open(path, O_RDONLY);\n\tif (fd < 0)\n\t\treturn (false);\n\tresult = ft_parse_grid(fd, grid);\n\tclose(fd);\n\treturn (result);\n}\n\nvoid\tft_free_grid(t_grid *grid)\n{\n\tfree(grid->map);\n\tfree(grid->source);\n}\n\nvoid\tft_process_grid(t_grid *grid, t_solution *sol)\n{\n\tUINT x;\n\tUINT y;\n\n\tif (find_solution(sol, grid))\n\t{\n\t\tx = sol->x;\n\t\twhile (x < sol->x + sol->size)\n\t\t{\n\t\t\ty = sol->y;\n\t\t\twhile (y < sol->y + sol->size)\n\t\t\t{\n\t\t\t\tgrid->map[y][x] = grid->translate[fill];\n\t\t\t\ty++;\n\t\t\t}\n\t\t\tx++;\n\t\t}\n\t\ty = 0;\n\t\twhile (y < grid->h)\n\t\t{\n\t\t\twrite(OUT, grid->map[y], grid->w);\n\t\t\twrite(OUT, \"\\n\", 1);\n\t\t\ty++;\n\t\t}\n\t}\n}\n"
},
{
"alpha_fraction": 0.6456692814826965,
"alphanum_fraction": 0.6614173054695129,
"avg_line_length": 15.933333396911621,
"blob_id": "10b21dc10bfd977db6ce3deeccfede6ee020928f",
"content_id": "1231aeb5b713e4d14f1301cbd905e4ceb18758ab",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 254,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 15,
"path": "/C12/ex13/ft_list_merge.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include \"ft_list.h\"\n\nvoid\tft_list_merge(t_list **begin_list1, t_list *begin_list2)\n{\n\tt_list\t*current;\n\tt_list\t*previous;\n\n\tcurrent = *begin_list1;\n\twhile (current)\n\t{\n\t\tprevious = current;\n\t\tcurrent = current->next;\n\t}\n\tprevious->next = begin_list2;\n}\n"
},
{
"alpha_fraction": 0.6398305296897888,
"alphanum_fraction": 0.6440678238868713,
"avg_line_length": 20.454545974731445,
"blob_id": "781e353ca99cd037d004e3e29ac7823a5cf9212a",
"content_id": "cd5fa037b5cfb4800045c2184736c6027b26b564",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 236,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 11,
"path": "/C12/ex06/ft_list_clear.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdlib.h>\n\n#include \"ft_list.h\"\n\nvoid\tft_list_clear(t_list *begin_list, void (*free_fct)(void *))\n{\n\tif (begin_list->next != 0)\n\t\tft_list_clear(begin_list->next, free_fct);\n\t(*free_fct)(begin_list->data);\n\tfree(begin_list);\n}\n"
},
{
"alpha_fraction": 0.6951219439506531,
"alphanum_fraction": 0.6951219439506531,
"avg_line_length": 12.666666984558105,
"blob_id": "75efb97ecf5056476e04b184e93fcd446b3af323",
"content_id": "4780761c40ae8b34f25255d1763f3d72341117e9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 82,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 6,
"path": "/C10/ex02/includes/ft_abs.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_ABS_H\n# define FT_ABS_H\n\nunsigned int ft_abs_int (int number);\n\n#endif\n"
},
{
"alpha_fraction": 0.40186914801597595,
"alphanum_fraction": 0.4112149477005005,
"avg_line_length": 16.83333396911621,
"blob_id": "a26862c3cb30d1967c3f1e06759e5b66fcbb6892",
"content_id": "beb9e20be7d4c3244e3ca58f2d686c75daad95e4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 214,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 12,
"path": "/C11/ex05/srcs/ft_is.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include \"ft_boolean.h\"\n\nt_bool\tft_is_number(char c)\n{\n\treturn (c >= '0' && c <= '9');\n}\n\nt_bool\tft_is_whitespace(char c)\n{\n\treturn (c == ' ' || c == '\\t' || c == '\\n' || c == '\\v' || c == '\\r'\n\t\t\t|| c == '\\f');\n}\n"
},
{
"alpha_fraction": 0.4713114798069,
"alphanum_fraction": 0.48770493268966675,
"avg_line_length": 13.352941513061523,
"blob_id": "a8c0d3ec45193e8e908ce34726e0ba0dcb51024e",
"content_id": "632f835a9694a470782b5823652758a0dc48757b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 244,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 17,
"path": "/C03/ex03/ft_strncat.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "char\t*ft_strncat(char *dest, char *src, unsigned int nb)\n{\n\tchar\t*dst;\n\n\tdst = dest;\n\twhile (*dst != '\\0')\n\t\tdst++;\n\twhile (*src != '\\0' && nb > 0)\n\t{\n\t\t*dst = *(unsigned char *)src;\n\t\tdst++;\n\t\tsrc++;\n\t\tnb--;\n\t}\n\t*dst = '\\0';\n\treturn (dest);\n}\n"
},
{
"alpha_fraction": 0.5344827771186829,
"alphanum_fraction": 0.5413793325424194,
"avg_line_length": 12.181818008422852,
"blob_id": "9314cb2b048db241d1fbc41619f69db7a8be820b",
"content_id": "29eec9773abbb79336dd816873048ae217d43c98",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 290,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 22,
"path": "/C10/ex02/srcs/ft_is.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdbool.h>\n\nbool\tft_is_in_string(char c, char *str)\n{\n\twhile (*str)\n\t{\n\t\tif (*str == c)\n\t\t\treturn (true);\n\t\tstr++;\n\t}\n\treturn (false);\n}\n\nbool\tft_is_whitespace(char c)\n{\n\treturn (ft_is_in_string(c, \"\\t\\n\\v\\f\\r \"));\n}\n\nbool\tft_is_number(char c)\n{\n\treturn (c >= '0' && c <= '9');\n}\n"
},
{
"alpha_fraction": 0.468963623046875,
"alphanum_fraction": 0.4820891320705414,
"avg_line_length": 22.901960372924805,
"blob_id": "ef7ee9732cd982f3844ff827b4fecf7308cf258b",
"content_id": "043755ba99b15aea3c59e3dada8bb1823ef0426e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 3657,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 153,
"path": "/Rush02/ex00/srcs/ft_number_dictionary_io.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_number_dictionary_io.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/17 11:24:47 by ecaceres #+# #+# */\n/* Updated: 2019/08/17 11:24:47 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n#include <stdio.h>\n#include <unistd.h>\n#include <fcntl.h>\n#include <ft_short_types.h>\n\n#include \"ft_number_dictionary.h\"\n#include \"ft_boolean.h\"\n#include \"ft_str.h\"\n#include \"ft_to.h\"\n#include \"ft_array.h\"\n#include \"ft_is.h\"\n#include \"ft_file_utils.h\"\n#include \"ft_split.h\"\n\nvoid\t\t\tft_read_line(t_dict_entry *entry, int fd, t_parse_error *error)\n{\n\tchar\t\t\tbuffer[SIZE_1B];\n\tchar\t\t\t*line;\n\tUINT\t\t\tbyte_read;\n\tUINT\t\t\ttotal;\n\n\ttotal = 0;\n\twhile ((byte_read = read(fd, buffer, SIZE_1B)) > 0)\n\t{\n\t\tif (byte_read == (UINT)-1)\n\t\t{\n\t\t\tentry->str = 0;\n\t\t\t*error = failed;\n\t\t\tbreak ;\n\t\t}\n\t\tline = ft_extend_array(line, buffer, total, total + byte_read);\n\t\ttotal += byte_read;\n\t\tif (buffer[0] == '\\n')\n\t\t{\n\t\t\t*error = ft_process_line(entry, line, total);\n\t\t\tbyte_read = (UINT)-2;\n\t\t\tbreak ;\n\t\t}\n\t}\n\tif (byte_read == 0)\n\t\t*error = reached_eof;\n}\n\nchar\t\t\t*ft_clean_line(char *str)\n{\n\tint\t\tsize;\n\tchar\t**split;\n\n\tsplit = ft_split_whitespace(str);\n\tfree(str);\n\tsize = 0;\n\twhile (split[size])\n\t\tsize++;\n\treturn (ft_str_join(size, split, \" \"));\n}\n\nt_parse_error\tft_process_line(t_dict_entry *entry, char *line, UINT length)\n{\n\tUINT\tindex;\n\tchar\t*number;\n\n\tindex = 0;\n\twhile (ft_is_number(line[index]))\n\t\tindex++;\n\tif (line[index] == '\\n')\n\t\treturn (empty_line);\n\tif (index == 0 || index >= length)\n\t\treturn (failed);\n\tnumber = ft_str_n_duplicate(line, index);\n\twhile (line[index] == ' ')\n\t\tindex++;\n\tif (line[index] != ':')\n\t\treturn (failed);\n\tindex++;\n\twhile (line[index] == ' ')\n\t\tindex++;\n\tentry->value = ft_atoi_strict(number);\n\tentry->str = ft_clean_line(\n\t\t\tft_str_n_duplicate(line + index, length - index - 1));\n\tif (ft_str_length(entry->str) == 0)\n\t\treturn (failed);\n\tfree(number);\n\treturn (parsing_ok);\n}\n\nint\t\t\t\tft_count_valid_line(char *path)\n{\n\tint\t\t\t\tfd;\n\tint\t\t\t\tcount;\n\tt_dict_entry\t*entry;\n\tt_parse_error\terror;\n\n\tfd = ft_open_file(path);\n\tif (fd < 0)\n\t\treturn (INVALID);\n\tcount = 0;\n\twhile (true)\n\t{\n\t\terror = parsing_ok;\n\t\tif (!(entry = malloc(sizeof(t_dict_entry))))\n\t\t\treturn (INVALID);\n\t\tft_read_line(entry, fd, &error);\n\t\tif (error == failed)\n\t\t\treturn (INVALID);\n\t\tif (error == parsing_ok)\n\t\t\tcount++;\n\t\tif (entry->str == 0 || error == reached_eof)\n\t\t\tbreak ;\n\t\tfree(entry);\n\t}\n\tft_close_file(fd);\n\treturn (count);\n}\n\nt_bool\t\t\tft_load_valid_line(char *path, int size, t_dict *dict)\n{\n\tint\t\t\t\tfd;\n\tint\t\t\t\tindex;\n\tt_dict_entry\t*entry;\n\tt_parse_error\terror;\n\n\tfd = ft_open_file(path);\n\tif (fd < 0)\n\t\treturn (false);\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\terror = parsing_ok;\n\t\tentry = &dict->entries[index];\n\t\tft_read_line(entry, fd, &error);\n\t\tif (error == failed)\n\t\t\treturn (false);\n\t\tif (error == parsing_ok)\n\t\t\tindex++;\n\t\tif ((entry->str == 0 || error == reached_eof) && error != empty_line)\n\t\t\tbreak ;\n\t}\n\tft_close_file(fd);\n\treturn (true);\n}\n"
},
{
"alpha_fraction": 0.5445544719696045,
"alphanum_fraction": 0.5544554591178894,
"avg_line_length": 15.15999984741211,
"blob_id": "f9c8b0ebdd0c869ffee038689d67b108d3afc552",
"content_id": "83feee54a18f3292e9e2fcaabf88e5901ff0d0e4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 404,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 25,
"path": "/C03/ex04/ft_strstr.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdbool.h>\n\nchar\t*ft_strstr(char *str, char *to_find)\n{\n\tchar *haystack;\n\tchar *needle;\n\n\tif (*to_find == '\\0')\n\t\treturn (str);\n\thaystack = str;\n\tneedle = to_find;\n\twhile (true)\n\t{\n\t\tif (*needle == '\\0')\n\t\t\treturn ((char *)(haystack - (needle - to_find)));\n\t\tif (*haystack == *needle)\n\t\t\tneedle++;\n\t\telse\n\t\t\tneedle = to_find;\n\t\tif (*haystack == '\\0')\n\t\t\tbreak ;\n\t\thaystack++;\n\t}\n\treturn (0);\n}\n"
},
{
"alpha_fraction": 0.6717557311058044,
"alphanum_fraction": 0.6717557311058044,
"avg_line_length": 12.100000381469727,
"blob_id": "49d67a9f15936dc7d151c750df409fd3b84e2448",
"content_id": "734f98adfa3d15fd72a83862d367341a1ce23b31",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 131,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 10,
"path": "/C11/ex05/includes/ft_is.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_IS_H\n# define FT_IS_H\n\n# include \"ft_boolean.h\"\n\nt_bool\tft_is_number(char c);\n\nt_bool\tft_is_whitespace(char c);\n\n#endif\n"
},
{
"alpha_fraction": 0.39172932505607605,
"alphanum_fraction": 0.424436092376709,
"avg_line_length": 36.47887420654297,
"blob_id": "32afbd94215fe8cf4a1d81a4460557f6dad24883",
"content_id": "1d7a6989c8e93d3c8e439c544e5c869d55fbd4be",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 2660,
"license_type": "no_license",
"max_line_length": 424,
"num_lines": 71,
"path": "/Rush02/ex00/Makefile",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "# **************************************************************************** #\n# #\n# ::: :::::::: #\n# Makefile :+: :+: :+: #\n# +:+ +:+ +:+ #\n# By: ecaceres <[email protected]> +#+ +:+ +#+ #\n# +#+#+#+#+#+ +#+ #\n# Created: 2019/08/13 15:11:33 by ecaceres #+# #+# #\n# Updated: 2019/08/13 15:11:34 by ecaceres ### ########.fr #\n# #\n# **************************************************************************** #\n\nC_YELLOW_B=\\033[1;33m\nC_LIGHT_RED_B=\\033[0;91m\nC_WHITE=\\033[0;97m\nC_RESET=\\033[0;39m\n\nSRCS\t= ./srcs/ft_array_extend.c ./srcs/ft_atoi.c ./srcs/ft_atoi_strict.c ./srcs/ft_debug.c ./srcs/ft_file_utils.c ./srcs/ft_is.c ./srcs/ft_itow.c ./srcs/ft_number_dictionary.c ./srcs/ft_number_dictionary_io.c ./srcs/ft_number_dictionary_sort.c ./srcs/ft_number_dictionary_sort2.c ./srcs/ft_split.c ./srcs/ft_str_copy.c ./srcs/ft_str_duplicate.c ./srcs/ft_str_join.c ./srcs/ft_str_length.c ./srcs/ft_str_write.c ./srcs/main.c\nOBJS\t= ${SRCS:.c=.o}\nINCS\t= includes\nNAME\t= rush-02\nCC\t\t= gcc\nRM\t\t= rm -f\nCFLAGS\t= -Wall -Wextra -Werror\n\n.c.o :\n\t${CC} ${CFLAGS} -c $< -o ${<:.c=.o} -I${INCS}\n\n${NAME} : ${OBJS}\n\t${CC} ${CFLAGS} ${OBJS} -o ${NAME}\n\nall : ${NAME}\n\nrun :\n\t./${NAME}\n\nclean :\n\t${RM} ${OBJS}\n\nfclean : clean\n\t${RM} ${NAME}\n\nnorm :\n\tnorminette */*.[ch]\n\nfind_sources :\n\tfind srcs -type f -name \"*.c\" | xargs -I{} echo ./{} | tr '\\n' ' '\n\nre : fclean all\n\ntest : re\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Number: 42$(C_RESET)\" ;\n\t@./${NAME} 42 | cat -e || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Number: 0$(C_RESET)\" ;\n\t@./${NAME} 0 | cat -e || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Number: 10.4$(C_RESET)\" ;\n\t@./${NAME} 10.4 | cat -e || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Number: 100000$(C_RESET)\" ;\n\t@./${NAME} 100000 | cat -e || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Number: 20, language: FRENCH$(C_RESET)\" ;\n\t@./${NAME} numbers_french.dict 20 | cat -e || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Number: 20, language: WITH_CUSTOM$(C_RESET)\" ;\n\t@./${NAME} numbers_with_custom.dict 20 | cat -e || true ;\n\t@##\n\n.PHONY: all clean fclean re .c.o test"
},
{
"alpha_fraction": 0.7197802066802979,
"alphanum_fraction": 0.7197802066802979,
"avg_line_length": 17.200000762939453,
"blob_id": "05f9f742ebb0df4c21090f46a3fd6fed1e452193",
"content_id": "1f3ff483482d7346aea24c095d4dd8e0213cb779",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 182,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 10,
"path": "/C10/ex03/includes/ft_dumper.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_DUMPER_H\n# define FT_DUMPER_H\n\n# include \"ft_args_parser.h\"\n\n# define UINT unsigned int\n\nvoid\tft_dump(t_options *options, char *content, UINT length, UINT lines);\n\n#endif\n"
},
{
"alpha_fraction": 0.5276482105255127,
"alphanum_fraction": 0.5496335625648499,
"avg_line_length": 14.010000228881836,
"blob_id": "04b93a23434a7e2cf55f7a02bffffbf2979167b6",
"content_id": "d323cf645cdabcf49af2f6468ec8982f5e02874d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1501,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 100,
"path": "/C05/ex08/ft_ten_queens_puzzle.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <unistd.h>\n\n#define BOARD_SIZE 10\n\nbool\tis_queen_at_risk(int board[][BOARD_SIZE], int at_x, int at_y)\n{\n\tint\tx;\n\tint\ty;\n\tint\toffsets[2];\n\n\toffsets[0] = at_y - at_x;\n\toffsets[1] = at_y + at_x;\n\tx = 0;\n\twhile (x < BOARD_SIZE)\n\t{\n\t\ty = 0;\n\t\twhile (y < BOARD_SIZE)\n\t\t{\n\t\t\tif (x == at_x || y == at_y\n\t\t\t\t|| y == x + offsets[0] || y == -x + offsets[1])\n\t\t\t\tif (board[y][x])\n\t\t\t\t\treturn (true);\n\t\t\ty++;\n\t\t}\n\t\tx++;\n\t}\n\treturn (false);\n}\n\nvoid\tclear_column(int board[][BOARD_SIZE], int x)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (index < BOARD_SIZE)\n\t{\n\t\tboard[index++][x] = false;\n\t}\n}\n\nvoid\tprint_queen_position(int board[][BOARD_SIZE])\n{\n\tint\ty;\n\tint\tcolumn;\n\n\ty = 0;\n\twhile (y < BOARD_SIZE)\n\t{\n\t\tcolumn = 0;\n\t\twhile (column < BOARD_SIZE)\n\t\t{\n\t\t\tif (board[y][column])\n\t\t\t{\n\t\t\t\twrite(1, &\"0123456789\"[column], 1);\n\t\t\t\tbreak ;\n\t\t\t}\n\t\t\tcolumn++;\n\t\t}\n\t\ty++;\n\t}\n\twrite(1, \"\\n\", 1);\n}\n\nbool\trecursive_find(int board[][BOARD_SIZE], int x, int *soluce)\n{\n\tint\ty;\n\n\tif (x >= BOARD_SIZE)\n\t\treturn (true);\n\ty = 0;\n\twhile (y < BOARD_SIZE)\n\t{\n\t\tif (!is_queen_at_risk(board, x, y))\n\t\t{\n\t\t\tboard[y][x] = true;\n\t\t\tif (recursive_find(board, x + 1, soluce))\n\t\t\t{\n\t\t\t\t*soluce += 1;\n\t\t\t\tprint_queen_position(board);\n\t\t\t}\n\t\t\tboard[y][x] = false;\n\t\t}\n\t\ty++;\n\t}\n\treturn (false);\n}\n\nint\t\tft_ten_queens_puzzle(void)\n{\n\tint\tsoluce;\n\tint\tboard[BOARD_SIZE][BOARD_SIZE];\n\tint\tcolumn;\n\n\tsoluce = 0;\n\tcolumn = 0;\n\twhile (column++ < BOARD_SIZE)\n\t\tclear_column(board, column - 1);\n\trecursive_find(board, 0, &soluce);\n\treturn (soluce);\n}\n"
},
{
"alpha_fraction": 0.5990298986434937,
"alphanum_fraction": 0.6087307929992676,
"avg_line_length": 16.422534942626953,
"blob_id": "0e387f711fdb0ed9d4ece80de9562a5c97912cc1",
"content_id": "ef56e43fdbf9f8cb23f753083feaaf446c2a98c4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1237,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 71,
"path": "/C07/ex03/ft_strjoin.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#include <stdlib.h>\n\nint\t\tft_str_length(char *str)\n{\n\tint\tindex;\n\n\tindex = 0;\n\twhile (str[index])\n\t\tindex++;\n\treturn (index);\n}\n\nchar\t*ft_strcpy(char *dest, char *src)\n{\n\tint index;\n\n\tindex = 0;\n\twhile (src[index] != '\\0')\n\t{\n\t\tdest[index] = src[index];\n\t\tindex++;\n\t}\n\tdest[index] = '\\0';\n\treturn (dest);\n}\n\nint\t\tft_compute_final_length(char **strings, int size, int sep_length)\n{\n\tint\tfinal_length;\n\tint\tindex;\n\n\tfinal_length = 0;\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tfinal_length += ft_str_length(strings[index]);\n\t\tfinal_length += sep_length;\n\t\tindex++;\n\t}\n\tfinal_length -= sep_length;\n\treturn (final_length);\n}\n\nchar\t*ft_strjoin(int size, char **strs, char *sep)\n{\n\tint\t\tfull_length;\n\tint\t\tindex;\n\tchar\t*read_head;\n\tchar\t*string;\n\n\tif (size == 0)\n\t\treturn ((char *)malloc(sizeof(char)));\n\tfull_length = ft_compute_final_length(strs, size, ft_str_length(sep));\n\tif (!(string = (char *)malloc((full_length + 1) * sizeof(char))))\n\t\treturn (0);\n\tread_head = string;\n\tindex = 0;\n\twhile (index < size)\n\t{\n\t\tft_strcpy(read_head, strs[index]);\n\t\tread_head += ft_str_length(strs[index]);\n\t\tif (index < size - 1)\n\t\t{\n\t\t\tft_strcpy(read_head, sep);\n\t\t\tread_head += ft_str_length(sep);\n\t\t}\n\t\tindex++;\n\t}\n\t*read_head = '\\0';\n\treturn (string);\n}\n"
},
{
"alpha_fraction": 0.33619046211242676,
"alphanum_fraction": 0.3552381098270416,
"avg_line_length": 23.13793182373047,
"blob_id": "4ad54d67705e017ff3ff18d1702278cbf3c8bea9",
"content_id": "92215e64a924b4cb3bc4f5eb9aa8bf0574d3269f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 2100,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 87,
"path": "/Exam02/work-done/ft_split/ft_split.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_split.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: exam <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/16 19:22:42 by exam #+# #+# */\n/* Updated: 2019/08/16 19:22:42 by exam ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n#include <stdbool.h>\n\nbool\tis_whitespace(char c)\n{\n\treturn (c == ' ' || c == '\\t' || c == '\\n');\n}\n\nint\t\tcount_word(char *str)\n{\n\tint\t\tcount;\n\n\tcount = 0;\n\twhile (*str)\n\t{\n\t\twhile (is_whitespace(*str))\n\t\t\tstr++;\n\t\tif (*str == '\\0')\n\t\t\tbreak ;\n\t\twhile (!is_whitespace(*str) && *str != '\\0')\n\t\t\tstr++;\n\t\tcount++;\n\t}\n\treturn (count);\n}\n\nchar\t*ft_strndup(char *str, unsigned int n)\n{\n\tunsigned int\tindex;\n\tunsigned int\tlength;\n\tchar\t\t*dup;\n\n\tlength = 0;\n\twhile (str[length])\n\t\tlength++;\n\tif (length > n)\n\t\tlength = n;\n\tif (!(dup = malloc((length + 1) * sizeof(char))))\n\t\treturn (NULL);\n\tindex = 0;\n\twhile (index < length)\n\t{\n\t\tdup[index] = str[index];\n\t\tindex++;\n\t}\n\tdup[index] = '\\0';\n\treturn (dup);\n}\n\nchar **ft_split(char *str)\n{\n\tint\t\tindex;\n\tint\t\tword_count;\n\tchar\t*word_start;\n\tchar\t**array;\n\n\tword_count = count_word(str);\n\tif (!(array = malloc((word_count + 1) * sizeof(char *))))\n\t\treturn (NULL);\n\tindex = 0;\n\twhile (*str)\n\t{\n\t\twhile (is_whitespace(*str))\n\t\t\tstr++;\n\t\tif (*str == '\\0')\n\t\t\tbreak ;\n\t\tword_start = str;\n\t\twhile (!is_whitespace(*str) && *str != '\\0')\n\t\t\tstr++;\n\t\tarray[index] = ft_strndup(word_start, str - word_start);\n\t\tindex++;\n\t}\n\tarray[index] = 0;\n\treturn (array);\n}\n"
},
{
"alpha_fraction": 0.28740936517715454,
"alphanum_fraction": 0.31114041805267334,
"avg_line_length": 37.89743423461914,
"blob_id": "20951eda20903e92c428e5b104af310f2faeb40c",
"content_id": "45d33ad32484b93841711f2106e1aeb7944436ce",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1517,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 39,
"path": "/C12/ex04/ft_list_push_back_dev.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_list_push_back.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/22 14:48:36 by ecaceres #+# #+# */\n/* Updated: 2019/08/22 14:48:36 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include \"ft_list.h\"\n\n#include <stdio.h>\n\nvoid\tft_list_push_back(t_list **begin_list, void *data)\n{\n\tif (*begin_list == 0)\n\t\t*begin_list = ft_create_elem(data);\n\telse\n\t\tft_list_push_back(&((*begin_list)->next), data);\n}\n\nint\t\tmain(void)\n{\n\tint\t\tdata;\n\tint\t\tdata2;\n\tt_list\t*base_list;\n\n\tdata = 10;\n\tdata2 = 20;\n\tbase_list = ft_create_elem((void *)&data);\n\tft_list_push_back(&base_list, (void *)&data2);\n\tprintf(\"list->next: %p\\n\", base_list->next);\n\tprintf(\"list->data: %d\\n\", *((int *)(base_list->data)));\n\tprintf(\"list->x->next: %p\\n\", base_list->next->next);\n\tprintf(\"list->x->data: %d\\n\", *((int *)(base_list->next->data)));\n}\n"
},
{
"alpha_fraction": 0.4649595618247986,
"alphanum_fraction": 0.4824797809123993,
"avg_line_length": 36.47474670410156,
"blob_id": "ddd57a43fe6a30a74a9ba678225a2655a9d33493",
"content_id": "c3fb139bc3e85f9249790d4185275d4272a6c02b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 3710,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 99,
"path": "/Final Project/Makefile",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "# **************************************************************************** #\n# #\n# ::: :::::::: #\n# Makefile :+: :+: :+: #\n# +:+ +:+ +:+ #\n# By: ecaceres <[email protected]> +#+ +:+ +#+ #\n# +#+#+#+#+#+ +#+ #\n# Created: 2019/08/13 15:11:33 by ecaceres #+# #+# #\n# Updated: 2019/08/21 19:57:49 by fyusuf-a ### ########.fr #\n# #\n# **************************************************************************** #\nC_YELLOW_B=\\033[1;95m\nC_LIGHT_RED_B=\\033[0;91m\nC_WHITE=\\033[0;97m\nC_RESET=\\033[0;39m\n\nSRCS\t= ./srcs/main.c ./srcs/solution.c ./srcs/utilities.c ./srcs/bsq_parse.c ./srcs/bsq_parse2.c ./srcs/bsq_grid.c \nOBJS\t= ${SRCS:.c=.o}\nINCS\t= includes\nNAME\t= bsq\nCC\t\t= gcc\nRM\t\t= rm -f\nCFLAGS\t= -Wall -Wextra -Werror -g\n\n.c.o :\n\t${CC} ${CFLAGS} -c $< -o ${<:.c=.o} -I${INCS}\n\n${NAME} : ${OBJS}\n\t${CC} ${CFLAGS} ${OBJS} -o ${NAME}\n\nall : ${NAME}\n\nrun :\n\t./${NAME}\n\nclean :\n\t${RM} ${OBJS}\n\nfclean : clean\n\t${RM} ${NAME}\n\nnorm :\n\tnorminette */*.[ch]\n\nfind_sources :\n\tfind srcs -type f -name \"*.c\" | xargs -I{} echo ./{} | tr '\\n' ' '\n\t\nmemcheck : re\n\t/Users/ecaceres/apps/valgrind_3.13_high_sierra/vg-in-place --leak-check=full ./bsq ./tests/set/*\n\t\nmemcheck_1 : re\n\t/Users/ecaceres/apps/valgrind_3.13_high_sierra/vg-in-place --leak-check=full ./bsq ./tests/set/grid_1\n\ncheck : re\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing set$(C_RESET)\" ;\n\t@./${NAME} ./tests/set/* || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing an empty file$(C_RESET)\" ;\n\t@./${NAME} ./tests/cases/empty_map || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing an invalid map$(C_RESET)\" ;\n\t@./${NAME} ./tests/cases/invalid_map || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing file with no permission$(C_RESET)\" ;\n\t@./${NAME} ./tests/cases/no_permission || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing directory$(C_RESET)\" ;\n\t@./${NAME} ./tests/cases/ || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing 1x1 maps$(C_RESET)\" ;\n\t@./${NAME} ./tests/cases/one_by_one_* || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing maps with irregular width$(C_RESET)\" ;\n\t@./${NAME} ./tests/cases/map_with_irregular_width* || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing maps with invalid header$(C_RESET)\" ;\n\t@./${NAME} ./tests/cases/map_with_invalid_header* || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing maps with invalid height$(C_RESET)\" ;\n\t@./${NAME} ./tests/cases/map_with_invalid_height* || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing a map without any column$(C_RESET)\" ;\n\t@./${NAME} ./tests/cases/no_colomn_line || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing maps with only one column$(C_RESET)\" ;\n\t@./${NAME} ./tests/cases/one_colomn_line* || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing maps with only one line$(C_RESET)\" ;\n\t@./${NAME} ./tests/cases/one_line_colomn* || true ;\n\t@##\n\t@echo \"[$(C_YELLOW_B)TESTING$(C_RESET)] $(C_WHITE)Testing 2x2 maps:$(C_RESET)\" ;\n\t@./${NAME} ./tests/cases/two_by_two* || true ;\n\t@##\n\nre : fclean all\n\ntest : re\n\n.PHONY: all clean fclean re .c.o test\n"
},
{
"alpha_fraction": 0.5555555820465088,
"alphanum_fraction": 0.6428571343421936,
"avg_line_length": 10.454545021057129,
"blob_id": "e51f15f2958ba09f71ffb55c428a7611bd9a0fa0",
"content_id": "dfafe7678ab5add6b5e4e17ba6be113f02a44f70",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 126,
"license_type": "no_license",
"max_line_length": 18,
"num_lines": 11,
"path": "/Shell01/ex07/test.sh",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "export FT_LINE1=7\nexport FT_LINE2=15\n\n./r_dwssap.sh\n\necho \"-----------\"\n\nexport FT_LINE1=13\nexport FT_LINE2=24\n\n./r_dwssap.sh\n"
},
{
"alpha_fraction": 0.6167512536048889,
"alphanum_fraction": 0.6446700692176819,
"avg_line_length": 19.736841201782227,
"blob_id": "ee1154a1d9046cb63353f6156c8b3505da7db02d",
"content_id": "de6836f3ff61570dc534a379fbd3873547f6f63d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 394,
"license_type": "no_license",
"max_line_length": 28,
"num_lines": 19,
"path": "/Shell00/ex08/create_test_files.sh",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "mkdir test\nmkdir test/test\nmkdir test/test/titi\nmkdir test/titi\nmkdir test/titi/test\n\ntouch \"./test/#1#\"\ntouch \"./test/#et#\"\ntouch \"./test/.7~\"\ntouch \"./test/1~\"\ntouch \"./test/disdonc~\"\ntouch \"./test/test/#2#\"\ntouch \"./test/test/2~\"\ntouch \"./test/test/titi/#4#\"\ntouch \"./test/test/titi/4~\"\ntouch \"./test/titi/#3#\"\ntouch \"./test/titi/3~\"\ntouch \"./test/titi/test/#5#\"\ntouch \"./test/titi/test/5~\"\n"
},
{
"alpha_fraction": 0.7154471278190613,
"alphanum_fraction": 0.7154471278190613,
"avg_line_length": 19.5,
"blob_id": "c292be9ba7062ea37be5ef4515cfbc5e4ab23096",
"content_id": "4ae510882f1876614f2210672316b1aedb0e5f83",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 123,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 6,
"path": "/C10/ex02/includes/ft_strncpy.h",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "#ifndef FT_STRNCPY_H\n# define FT_STRNCPY_H\n\nchar\t*ft_str_sized_copy(char *dest, char *src, unsigned int src_size);\n\n#endif\n"
},
{
"alpha_fraction": 0.3335384726524353,
"alphanum_fraction": 0.3532307744026184,
"avg_line_length": 30.25,
"blob_id": "4cd4925732f0acffaf057544ee7b332760528fdf",
"content_id": "a358425e8e27abe0c817440fe86c607d4cbcc154",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "C",
"length_bytes": 1625,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 52,
"path": "/Rush02/ex00/srcs/ft_number_dictionary.c",
"repo_name": "DiorChoppa/CPiscine",
"src_encoding": "UTF-8",
"text": "/* ************************************************************************** */\n/* */\n/* ::: :::::::: */\n/* ft_number_dictionary.c :+: :+: :+: */\n/* +:+ +:+ +:+ */\n/* By: ecaceres <[email protected]> +#+ +:+ +#+ */\n/* +#+#+#+#+#+ +#+ */\n/* Created: 2019/08/17 11:03:18 by ecaceres #+# #+# */\n/* Updated: 2019/08/17 11:03:18 by ecaceres ### ########.fr */\n/* */\n/* ************************************************************************** */\n\n#include <stdlib.h>\n\n#include \"ft_number_dictionary.h\"\n\nt_dict\tft_load_default_dictionary(void)\n{\n\treturn (ft_load_dictionary(ENGLISH_DICT));\n}\n\nt_dict\tft_load_dictionary(char *path)\n{\n\tint\t\t\t\tsize;\n\tt_dict_entry\t*entries;\n\tt_dict\t\t\tdict;\n\n\tdict = (t_dict){path, false, 0, 0};\n\tsize = ft_count_valid_line(path);\n\tif (size == INVALID)\n\t\treturn (dict);\n\tif (!(entries = malloc((size + 1) * sizeof(t_dict_entry))))\n\t\treturn (dict);\n\tdict.size = size;\n\tdict.entries = entries;\n\tdict.valid = ft_load_valid_line(path, size, &dict);\n\tif (dict.valid)\n\t\tft_sort_dictionary(&dict);\n\treturn (dict);\n}\n\nvoid\tft_free_dictionary(t_dict *dict)\n{\n\tint\t\tindex;\n\n\tindex = 0;\n\twhile (index < dict->size)\n\t{\n\t\tfree(dict->entries[index].str);\n\t\tindex++;\n\t}\n}\n"
}
] | 175 |
iamfaqeehhokyky/transcription
|
https://github.com/iamfaqeehhokyky/transcription
|
b47da92f8c7f1e143cf525d60cc95ba21795e79f
|
93994a1ed2598c1b8526f3247be4dfedd30eb247
|
386f7f677fb6e541bc8cf80e7cce4da55dee2013
|
refs/heads/master
| 2023-08-04T07:30:15.322524 | 2021-10-08T16:57:57 | 2021-10-08T16:57:57 | 415,063,744 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6866196990013123,
"alphanum_fraction": 0.6866196990013123,
"avg_line_length": 19.35714340209961,
"blob_id": "963d701c4a506afdf72b8867cb87b3f6f6197cd7",
"content_id": "2bc887262f90898266252823915a5188c2f408b4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 284,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 14,
"path": "/main.py",
"repo_name": "iamfaqeehhokyky/transcription",
"src_encoding": "UTF-8",
"text": "import speech_recognition as sr\nimport datetime\n\nr = sr.Recognizer()\nwith sr.Microphone() as source:\n print(\"Listening...\")\n\naudio = r.listen(source)\n\ntry:\n text = r.recognize_google(audio)\n print(\"{}\".format(text))\nexcept:\n print(\"Sorry, could not recognize your voice!\")"
}
] | 1 |
StephensCheng/SVM
|
https://github.com/StephensCheng/SVM
|
1cdc3630c185b412554d8f6cf0b7c06a2eef735c
|
f28c8348caef3794d26d0a94d6a8993133be0b37
|
673ff7395c973e836c49fb9ec8a127a4ae5cb949
|
refs/heads/main
| 2023-05-16T07:16:20.774733 | 2021-05-29T05:49:18 | 2021-05-29T05:49:18 | 371,890,774 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5098510980606079,
"alphanum_fraction": 0.5498796701431274,
"avg_line_length": 29.15833282470703,
"blob_id": "c1ae2e257eb561cb134551d637905237372c7ff2",
"content_id": "2de79483308618c34ef65c0bcd289082809703e2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 13515,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 360,
"path": "/SVM.py",
"repo_name": "StephensCheng/SVM",
"src_encoding": "UTF-8",
"text": "import matplotlib.pyplot as plt\r\nimport random\r\nfrom mpl_toolkits.mplot3d import Axes3D\r\nfrom sklearn import datasets, model_selection\r\nfrom sklearn.model_selection import GridSearchCV\r\nfrom sklearn.decomposition import PCA\r\nfrom sklearn import svm\r\nimport numpy as np\r\n\r\n# import some data to play with\r\niris = datasets.load_iris()\r\nX = iris.data[:, :2] # we only take the first two features.\r\ny = iris.target\r\n\r\nX1 = []\r\ny1 = []\r\nfor i, j in zip(X, y):\r\n if j == 0:\r\n continue\r\n X1.append(i)\r\n if j == 2:\r\n y1.append(-1)\r\n else:\r\n y1.append(j)\r\n\r\nnp.save(\"./Data/svm/data.npy\", X1)\r\nnp.save(\"./Data/svm/target.npy\", y1)\r\nx_train, x_test, y_train, y_test = model_selection.train_test_split(X1, y1, random_state=1, train_size=0.7)\r\n#\r\nx_train = np.array(x_train)\r\ny_train = np.array(y_train)\r\nx_test = np.array(x_test)\r\ny_test = np.array(y_test)\r\n\r\nnp.save(\"./Data/svm/train_data.npy\", x_train)\r\nnp.save(\"./Data/svm/train_target.npy\", y_train)\r\nnp.save(\"./Data/svm/test_data.npy\", x_test)\r\nnp.save(\"./Data/svm/test_target.npy\", y_test)\r\n\r\nx_train = np.load(\"./Data/svm/train_data.npy\")\r\ny_train = np.load(\"./Data/svm/train_target.npy\")\r\nx_test = np.load(\"./Data/svm/test_data.npy\")\r\ny_test = np.load(\"./Data/svm/test_target.npy\")\r\n\r\nx_min, x_max = x_train[:, 0].min() - .5, x_train[:, 0].max() + .5\r\ny_min, y_max = x_train[:, 1].min() - .5, x_train[:, 1].max() + .5\r\n\r\nplt.figure(2, figsize=(8, 6))\r\nplt.clf()\r\n\r\n# Plot the training points\r\nplt.scatter(x_train[:, 0], x_train[:, 1], c=y_train, cmap=plt.cm.Set1,\r\n edgecolor='k')\r\nplt.xlabel('Sepal length')\r\nplt.ylabel('Sepal width')\r\n\r\nplt.xlim(x_min, x_max)\r\nplt.ylim(y_min, y_max)\r\nplt.xticks(())\r\nplt.yticks(())\r\nplt.show()\r\n\r\n\r\nclf = svm.SVC(C=0.8, kernel='rbf', gamma=20, decision_function_shape='ovr')\r\nclf.fit(x_train, y_train.ravel())\r\n\r\nprint(clf.score(x_train, y_train)) # 精度\r\nprint(clf.score(x_test, y_test))\r\n\r\n\r\n\r\n\r\n\r\nclass dataStruct:\r\n def __init__(self, dataMatIn, labelMatIn, C, toler, eps):\r\n self.dataMat = dataMatIn # 样本数据\r\n self.labelMat = labelMatIn # 样本标签\r\n self.C = C # 参数C\r\n self.toler = toler # 容错率\r\n self.eps = eps # 乘子更新最小比率\r\n self.m = np.shape(dataMatIn)[0] # 样本数\r\n self.alphas = np.mat(np.zeros((self.m, 1))) # 拉格朗日乘子alphas,shape(m,1),初始化全为0\r\n self.b = 0 # 参数b,初始化为0\r\n self.eCache = np.mat(np.zeros((self.m, 2))) # 误差缓存,\r\n\r\n\r\ndef takeStep(i1, i2, dS):\r\n # 如果选择了两个相同的乘子,不满足线性等式约束条件,因此不做更新\r\n if (i1 == i2):\r\n print(\"i1 == i2\")\r\n return 0\r\n # 从数据结构中取得需要用到的数据\r\n alpha1 = dS.alphas[i1, 0]\r\n alpha2 = dS.alphas[i2, 0]\r\n y1 = dS.labelMat[i1]\r\n y2 = dS.labelMat[i2]\r\n\r\n # 如果E1以前被计算过,就直接从数据结构的cache中读取它,这样节省计算量,#如果没有历史记录,就计算E1\r\n if (dS.eCache[i1, 0] == 1):\r\n E1 = dS.eCache[i1, 1]\r\n else:\r\n u1 = (np.multiply(dS.alphas, dS.labelMat)).T * np.dot(dS.dataMat, dS.dataMat[i1, :].T) + dS.b # 计算SVM的输出值u1\r\n E1 = float(u1 - y1) # 误差E1\r\n # dS.eCache[i1] = [1,E1] #存到cache中\r\n\r\n # 如果E2以前被计算过,就直接从数据结构的cache中读取它,这样节省计算量,#如果没有历史记录,就计算E2\r\n if (dS.eCache[i2, 0] == 1):\r\n E2 = dS.eCache[i2, 1]\r\n else:\r\n u2 = (np.multiply(dS.alphas, dS.labelMat)).T * np.dot(dS.dataMat, dS.dataMat[i2, :].T) + dS.b # 计算SVM的输出值u2\r\n E2 = float(u2 - y2) # 误差E2\r\n # dS.eCache[i2] = [1,E2] #存到cache中\r\n\r\n s = y1 * y2\r\n\r\n # 计算alpha2的上界H和下界L\r\n if (s == 1): # 如果y1==y2\r\n L = max(0, alpha1 + alpha2 - dS.C)\r\n H = min(dS.C, alpha1 + alpha2)\r\n elif (s == -1): # 如果y1!=y2\r\n L = max(0, alpha2 - alpha1)\r\n H = min(dS.C, dS.C + alpha2 - alpha1)\r\n if (L == H):\r\n print(\"L==H\")\r\n return 0\r\n\r\n # 计算学习率eta\r\n k11 = np.dot(dS.dataMat[i1, ::], dS.dataMat[i1, :].T)\r\n k12 = np.dot(dS.dataMat[i1, ::], dS.dataMat[i2, :].T)\r\n k22 = np.dot(dS.dataMat[i2, ::], dS.dataMat[i2, :].T)\r\n eta = k11 - 2 * k12 + k22\r\n\r\n if (eta > 0): # 正常情况下eta是大于0的,此时计算新的alpha2,新的alpha2标记为a2\r\n a2 = alpha2 + y2 * (E1 - E2) / eta # 这个公式的推导,曾经花费了我很多精力,现在写出来却是如此简洁,数学真是个好东西\r\n # 对a2进行上下界裁剪\r\n if (a2 < L):\r\n a2 = L\r\n elif (a2 > H):\r\n a2 = H\r\n else: # 非正常情况下,也有可能出现eta《=0的情况\r\n print(\"eta<=0\")\r\n return 0\r\n\r\n # 如果更新量太小,就不值浪费算力继续算a1和b,不值得对这三者进行更新\r\n if (abs(a2 - alpha2) < dS.eps * (a2 + alpha2 + dS.eps)):\r\n print(\"so small update on alpha2!\")\r\n return 0\r\n\r\n # 计算新的alpha1,标记为a1\r\n a1 = alpha1 + s * (alpha2 - a2)\r\n\r\n # 计算b1和b2,并且更新b\r\n b1 = -E1 + y1 * (alpha1 - a1) * np.dot(dS.dataMat[i1, :], dS.dataMat[i1, :].T) + y2 * (alpha2 - a2) * np.dot(\r\n dS.dataMat[i1, :], dS.dataMat[i2, :].T) + dS.b\r\n b2 = -E2 + y1 * (alpha1 - a1) * np.dot(dS.dataMat[i1, :], dS.dataMat[i2, :].T) + y2 * (alpha2 - a2) * np.dot(\r\n dS.dataMat[i2, :], dS.dataMat[i2, :].T) + dS.b\r\n if (a1 > 0 and a1 < dS.C):\r\n dS.b = b1\r\n elif (a2 > 0 and a2 < dS.C):\r\n dS.b = b2\r\n else:\r\n dS.b = (b1 + b2) / 2\r\n\r\n # 用a1和a2更新alpha1和alpha2\r\n dS.alphas[i1] = a1\r\n dS.alphas[i2] = a2\r\n\r\n # 由于本次alpha1、alpha2和b的更新,需要重新计算Ecache,注意Ecache只存储那些非零的alpha对应的误差\r\n validAlphasList = np.nonzero(dS.alphas.A)[0] # 所有的非零的alpha标号列表\r\n dS.eCache = np.mat(np.zeros((dS.m, 2))) # 要把Ecache先清空\r\n for k in validAlphasList: # 遍历所有的非零alpha\r\n uk = (np.multiply(dS.alphas, dS.labelMat).T).dot(np.dot(dS.dataMat, dS.dataMat[k, :].T)) + dS.b\r\n yk = dS.labelMat[k, 0]\r\n Ek = float(uk - yk)\r\n dS.eCache[k] = [1, Ek]\r\n print(\"updated\")\r\n return 1\r\n\r\n\r\n'''\r\n函数名称:examineExample\r\n函数功能:给定alpha2,如果alpha2不满足KKT条件,则再找一个alpha1,对这两个乘子进行一次takeStep\r\n输入参数:i2 alpha的标号\r\n dataMat 样本数据\r\n labelMat 样本标签\r\n返回参数:如果成功对一对乘子alpha1和alpha2执行了一次takeStep,返回1;否则,返回0\r\n作者:Leo Ma\r\n时间:2019.05.20\r\n'''\r\n\r\n\r\ndef examineExample(i2, dS):\r\n # 从数据结构中取得需要用到的数据\r\n y2 = dS.labelMat[i2, 0]\r\n alpha2 = dS.alphas[i2, 0]\r\n\r\n # 如果E2以前被计算过,就直接从数据结构的cache中读取它,这样节省计算量,#如果没有历史记录,就计算E2\r\n if (dS.eCache[i2, 0] == 1):\r\n E2 = dS.eCache[i2, 1]\r\n else:\r\n u2 = (np.multiply(dS.alphas, dS.labelMat)).T * np.dot(dS.dataMat, dS.dataMat[i2, :].T) + dS.b # 计算SVM的输出值u2\r\n E2 = float(u2 - y2) # 误差E2\r\n # dS.eCache[i2] = [1,E2]\r\n\r\n r2 = E2 * y2\r\n # 如果当前的alpha2在一定容忍误差内不满足KKT条件,则需要对其进行更新\r\n if ((r2 < -dS.toler and alpha2 < dS.C) or (r2 > dS.toler and alpha2 > 0)):\r\n '''\r\n #随机选择的方法确定另一个乘子alpha1,多执行几次可可以收敛到很好的结果,就是效率比较低\r\n i1 = random.randint(0, dS.m-1)\r\n if(takeStep(i1,i2,dS)):\r\n return 1\r\n '''\r\n # 启发式的方法确定另一个乘子alpha1\r\n nonZeroAlphasList = np.nonzero(dS.alphas.A)[0].tolist() # 找到所有的非0的alpha\r\n nonCAlphasList = np.nonzero((dS.alphas - dS.C).A)[0].tolist() # 找到所有的非C的alpha\r\n nonBoundAlphasList = list(set(nonZeroAlphasList) & set(nonCAlphasList)) # 所有非边界(既不=0,也不=C)的alpha\r\n\r\n # 如果非边界的alpha数量至少两个,则在所有的非边界alpha上找到能够使\\E1-E2\\最大的那个E1,对这一对乘子进行更新\r\n if (len(nonBoundAlphasList) > 1):\r\n maxE = 0\r\n maxEindex = 0\r\n for k in nonBoundAlphasList:\r\n if (abs(dS.eCache[k, 1] - E2) > maxE):\r\n maxE = abs(dS.eCache[k, 1] - E2)\r\n maxEindex = k\r\n i1 = maxEindex\r\n if (takeStep(i1, i2, dS)):\r\n return 1\r\n\r\n # 如果上面找到的那个i1没能使alpha和b得到有效更新,则从随机开始处遍历整个非边界alpha作为i1,逐个对每一对乘子尝试进行更新\r\n randomStart = random.randint(0, len(nonBoundAlphasList) - 1)\r\n for i1 in range(randomStart, len(nonBoundAlphasList)):\r\n if (i1 == i2): continue\r\n if (takeStep(i1, i2, dS)):\r\n return 1\r\n for i1 in range(0, randomStart):\r\n if (i1 == i2): continue\r\n if (takeStep(i1, i2, dS)):\r\n return 1\r\n\r\n # 如果上面的更新仍然没有return 1跳出去或者非边界alpha数量少于两个,这种情况只好从随机开始的位置开始遍历整个可能的i1,对每一对尝试更新\r\n randomStart = random.randint(0, dS.m - 1)\r\n for i1 in range(randomStart, dS.m):\r\n if (i1 == i2): continue\r\n if (takeStep(i1, i2, dS)):\r\n return 1\r\n for i1 in range(0, randomStart):\r\n if (i1 == i2): continue\r\n if (takeStep(i1, i2, dS)):\r\n return 1\r\n '''\r\n i1 = random.randint(0,dS.m-1)\r\n if(takeStep(i1,i2,dS)):\r\n return 1 \r\n '''\r\n # 如果实在还更新不了,就回去重新选择一个alpha2吧,当前的alpha2肯定是有毒\r\n return 0\r\n\r\n\r\n'''\r\n函数名称:SVM_with_SMO\r\n函数功能:用SMO写的SVM的入口函数,里面采用了第一个启发式确定alpha2,即在全局遍历和非边界遍历之间来回repeat,直到不再有任何更新\r\n输入参数:dS dataStruct类的数据\r\n返回参数:None\r\n作者:Leo Ma\r\n时间:2019.05.20\r\n'''\r\n\r\n\r\ndef SVM_with_SMO(dS):\r\n # 初始化控制变量,确保第一次要全局遍历\r\n numChanged = 0\r\n examineAll = 1\r\n\r\n # 显然,如果全局遍历了一次,并且没有任何更新,此时examineAll和numChanged都会被置零,算法终止\r\n while (numChanged > 0 or examineAll):\r\n numChanged = 0\r\n if (examineAll):\r\n for i in range(dS.m):\r\n numChanged += examineExample(i, dS)\r\n else:\r\n for i in range(dS.m):\r\n if (dS.alphas[i] == 0 or dS.alphas[i] == dS.C): continue\r\n numChanged += examineExample(i, dS)\r\n if (examineAll == 1):\r\n examineAll = 0\r\n elif (numChanged == 0):\r\n examineAll = 1\r\n\r\n\r\n'''\r\n函数名称:cal_W\r\n函数功能:根据alpha和y来计算W\r\n输入参数:dS dataStruct类的数据\r\n返回参数:W 超平名的法向量W \r\n作者:Leo Ma\r\n时间:2019.05.20\r\n'''\r\n\r\n\r\ndef cal_W(dS):\r\n W = np.dot(dS.dataMat.T, np.multiply(dS.labelMat, dS.alphas))\r\n return W\r\n\r\n\r\n'''\r\n函数名称:showClassifer\r\n函数功能:画出原始数据点、超平面,并标出支持向量\r\n输入参数:dS dataStruct类的数据\r\n W 超平名的法向量W \r\n返回参数:None\r\n作者:机器学习实践SVM chapter 6\r\n修改:Leo Ma\r\n时间:2019.05.20\r\n'''\r\n\r\n\r\ndef showClassifer(dS, w):\r\n # 绘制样本点\r\n dataMat = dS.dataMat.tolist()\r\n data_plus = [] # 正样本\r\n data_minus = [] # 负样本\r\n for i in range(len(dataMat)):\r\n if dS.labelMat[i, 0] > 0:\r\n data_plus.append(dataMat[i])\r\n else:\r\n data_minus.append(dataMat[i])\r\n data_plus_np = np.array(data_plus) # 转换为numpy矩阵\r\n data_minus_np = np.array(data_minus) # 转换为numpy矩阵\r\n plt.scatter(np.transpose(data_plus_np)[0], np.transpose(data_plus_np)[1], s=30, alpha=0.7, c='r') # 正样本散点图\r\n plt.scatter(np.transpose(data_minus_np)[0], np.transpose(data_minus_np)[1], s=30, alpha=0.7, c='g') # 负样本散点图\r\n # 绘制直线\r\n x1 = max(dataMat)[0]\r\n x2 = min(dataMat)[0]\r\n a1, a2 = w\r\n b = float(dS.b)\r\n a1 = float(a1[0])\r\n a2 = float(a2[0])\r\n y1, y2 = (-b - a1 * x1) / a2, (-b - a1 * x2) / a2\r\n plt.plot([x1, x2], [y1, y2])\r\n # 找出支持向量点\r\n for i, alpha in enumerate(dS.alphas):\r\n if abs(alpha) > 0.000000001:\r\n x, y = dataMat[i]\r\n plt.scatter([x], [y], s=150, c='none', alpha=0.7, linewidth=1.5, edgecolor='red')\r\n plt.xlabel(\"happy 520 day, 2018.06.13\")\r\n # plt.savefig(\"svm.png\")\r\n plt.show()\r\n\r\n\r\nif __name__ == '__main__':\r\n\r\n dS = dataStruct(np.mat(x_train), np.mat(y_train).T, 0.8, 0.001, 0.01) # 初始化数据结构 dataMatIn, labelMatIn,C,toler,eps\r\n\r\n for i in range(0, 1): # 只需要执行一次,效果就非常不错\r\n SVM_with_SMO(dS)\r\n W = cal_W(dS)\r\n print(W,dS.b)\r\n showClassifer(dS, W.tolist())\r\n"
},
{
"alpha_fraction": 0.7142857313156128,
"alphanum_fraction": 0.7142857313156128,
"avg_line_length": 9.5,
"blob_id": "dc4eea3288d5389d6f96b749bd83fb28f5300aa0",
"content_id": "56e1d74ca5084de4dcacbcf17c9cc2865f736a0f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 21,
"license_type": "no_license",
"max_line_length": 14,
"num_lines": 2,
"path": "/README.md",
"repo_name": "StephensCheng/SVM",
"src_encoding": "UTF-8",
"text": "# SVM\nNewton and SGD\n"
},
{
"alpha_fraction": 0.5183210968971252,
"alphanum_fraction": 0.5446369051933289,
"avg_line_length": 30.630434036254883,
"blob_id": "b84638cd1e68ed715b4f63ba19708ecbb8b337b5",
"content_id": "8d5c429d52f397dcc4b1ec6fac298f0b63a344d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3158,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 92,
"path": "/S-SVM.py",
"repo_name": "StephensCheng/SVM",
"src_encoding": "UTF-8",
"text": "import matplotlib.pyplot as plt\r\nimport random\r\nimport time\r\nfrom mpl_toolkits.mplot3d import Axes3D\r\nfrom sklearn import datasets, model_selection\r\nfrom sklearn.decomposition import PCA\r\nfrom sklearn import svm\r\nimport numpy as np\r\n\r\n\r\ndef loadDataSet():\r\n dataMat = [] # 列表list\r\n labelMat = []\r\n txt = open('./Data/LR/horse.txt')\r\n for line in txt.readlines():\r\n lineArr = line.strip().split() # strip():返回一个带前导和尾随空格的字符串的副本\r\n # split():默认以空格为分隔符,空字符串从结果中删除\r\n dataMat.append([float(lineArr[0]), float(lineArr[1])])\r\n if int(lineArr[2])==0:\r\n labelMat.append(-1)\r\n else:\r\n labelMat.append(int(lineArr[2]))\r\n x_train = dataMat[:70]\r\n y_train = labelMat[:70]\r\n x_test = dataMat[70:]\r\n y_test = labelMat[70:]\r\n\r\n np.save(\"./Data/s-svm/train_data.npy\", np.array(x_train))\r\n np.save(\"./Data/s-svm/train_target.npy\", np.array(y_train))\r\n np.save(\"./Data/s-svm/test_data.npy\", np.array(x_test))\r\n np.save(\"./Data/s-svm/test_target.npy\", np.array(y_test))\r\n return x_train, y_train, x_test, y_test\r\n\r\n\r\nclass LinearSVM:\r\n def __init__(self):\r\n self.w = self.b = None\r\n\r\n def fit(self, x, y, c=1, lr=0.01, epoch=10000):\r\n x, y = np.asarray(x, np.float32), np.asarray(y, np.float32)\r\n self.w = np.zeros(x.shape[1])\r\n self.b = 2\r\n for _ in range(epoch):\r\n self.w *= 1 - lr\r\n error = 1 - y * self.predict(x, True)\r\n idx = np.argmax(error)\r\n if error[idx] <= 0:\r\n continue\r\n delta = lr * c * y[idx]\r\n self.w += delta * x[idx]\r\n self.b += delta\r\n\r\n def predict(self, x, raw=False):\r\n x = np.asarray(x, np.float32)\r\n y_pred = x.dot(self.w) + self.b\r\n if raw:\r\n return y_pred\r\n return np.sign(y_pred).asstype(np.float32)\r\n\r\n\r\ndef showClassifer(x_train, y_train, w, b):\r\n # 绘制样本点\r\n dataMat = np.mat(x_train).tolist()\r\n data_plus = [] # 正样本\r\n data_minus = [] # 负样本\r\n for i in range(len(dataMat)):\r\n if np.mat(y_train).T[i, 0] > 0:\r\n data_plus.append(dataMat[i])\r\n else:\r\n data_minus.append(dataMat[i])\r\n data_plus_np = np.array(data_plus) # 转换为numpy矩阵\r\n data_minus_np = np.array(data_minus) # 转换为numpy矩阵\r\n plt.scatter(np.transpose(data_plus_np)[0], np.transpose(data_plus_np)[1], s=30, alpha=0.7, c='r') # 正样本散点图\r\n plt.scatter(np.transpose(data_minus_np)[0], np.transpose(data_minus_np)[1], s=30, alpha=0.7, c='g') # 负样本散点图\r\n # 绘制直线\r\n x1 = max(dataMat)[0]\r\n x2 = min(dataMat)[0]\r\n a1, a2 = w\r\n b = float(b)\r\n y1, y2 = (-b - a1 * x1) / a2, (-b - a1 * x2) / a2\r\n plt.plot([x1, x2], [y1, y2])\r\n\r\n plt.show()\r\n\r\n\r\nif __name__ == \"__main__\":\r\n x_train, y_train, x_test, y_test = loadDataSet()\r\n t = LinearSVM()\r\n t.fit(x=x_train, y=y_train, c=1, lr=0.01, epoch=10000)\r\n w, b = t.w, t.b\r\n print(w, b)\r\n showClassifer(x_train, y_train, w, b)\r\n"
}
] | 3 |
emartyacn/python-ocapi-sdk
|
https://github.com/emartyacn/python-ocapi-sdk
|
4b547d32b6a836e18e1a72261eb6215c2034dc78
|
d2240b27c8ec116657308511df8d0cebb49b02af
|
234c23e98038b5a4588623ab86b7bf76572b3d58
|
refs/heads/master
| 2022-11-06T17:57:00.052349 | 2020-06-22T14:32:15 | 2020-06-22T14:32:15 | 274,214,826 | 0 | 0 | null | 2020-06-22T18:35:14 | 2020-06-22T14:32:24 | 2020-06-22T14:32:22 | null |
[
{
"alpha_fraction": 0.6767263412475586,
"alphanum_fraction": 0.720716118812561,
"avg_line_length": 34.563636779785156,
"blob_id": "303f46d676e64fc38132f103e0c86dbde0a3b61d",
"content_id": "2bb365ffd21686783d2824337fd1b405c44626d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1955,
"license_type": "no_license",
"max_line_length": 314,
"num_lines": 55,
"path": "/README.md",
"repo_name": "emartyacn/python-ocapi-sdk",
"src_encoding": "UTF-8",
"text": "# Python module for OCAPI (under development)\n\n## How to use\n\n### Prequisites\n\n- [OCAPI settings](https://github.com/SalesforceCommerceCloud/ocapi-settings) must be added to the instance under test.\n- pip install requirements.txt\n- [docs](https://documentation.b2c.commercecloud.salesforce.com/DOC1/topic/com.demandware.dochelp/OCAPI/current/shop/Resources/index.htm)\n\n### Authentication with OCAPI can happen one of two ways\n\n1. You can create a file named `.pycapi` at the root of the repo using the following format (Not tested on Windows).\n\n```shell\n[default]\nclient_id = <CLIENT_ID>\nclient_secret = <CLIENT_SECRET>\nhostname = <INSTANCE_URI>\napi_version = v20_4\n```\n\n2. Manualy supply credenials through an API instance.\n\n```python\nfrom ocapi.client import ShopAPI\n\napi = ShopAPI(hostname='dev-us.pandora.net', client_id='<CLIENT_ID>', client_secret='<CLIENT_SECRET>', api_version='\n ...: v20_4')\n\napi.product_search(site_id='en-US', query='rings')\n```\n\n### **Note: The above uses a very limited portion of the search endpoint [docs](https://documentation.b2c.commercecloud.salesforce.com/DOC1/topic/com.demandware.dochelp/OCAPI/current/shop/Resources/ProductSearch.html). In order to fully utilize the search API, the product_search method will need to be improved.**\n\n\n### Output should be:\n\n```shell\n[{'_type': 'product_search_hit',\n 'hit_type': 'master',\n 'link': 'https://dev-us.pandora.net/s/en-US/dw/shop/v20_4/products/188882C01?q=rings&client_id=cea04f38-4d79-4a1d-b3cb-171b771dccce',\n 'product_id': '188882C01',\n 'product_name': 'Wrapped Open Infinity Ring',\n 'product_type': {'_type': 'product_type', 'master': True},\n 'represented_product': {'_type': 'product_ref',\n 'id': '188882C01-48',\n 'link': 'https://dev-us.pandora.net/s/en-US/dw/shop/v20_4/products/188882C01-48?q=rings&client_id=cea04f38-4d79-4a1d-b3cb-171b771dccce'}},\n...\n```\n\n### From here we can parse and compile a product URL...\n\n---\nAll code authored by: Erik Marty"
},
{
"alpha_fraction": 0.5752754211425781,
"alphanum_fraction": 0.5899632573127747,
"avg_line_length": 21.72222137451172,
"blob_id": "c4aadd2f96dfe6c0071e9495a1a40bf0e6546afc",
"content_id": "c853ccd2af88b02cf995b6589eff5f2b1ecb0daf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 817,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 36,
"path": "/setup.py",
"repo_name": "emartyacn/python-ocapi-sdk",
"src_encoding": "UTF-8",
"text": "from setuptools import setup\n\nPACKAGE = \"python-ocapi-sdk\"\nVERSION = \"0.1.0\"\n\nclassifiers = [\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Software Development :: SDK',\n]\n\ninstall_requires = [\n \"requests>=2.23.0\",\n \"requests-oauthlib>=1.3.0\",\n]\n\n\ndef main():\n setup(\n name=PACKAGE,\n version=VERSION,\n description=\"Python SDK for Sales Force Commerce Cloud Open Commerce API\",\n url=\"\",\n author=\"Erik Marty\",\n author_email=\"[email protected]\",\n license=\"Apache-2.0\",\n classifiers=classifiers,\n keywords=\"ocapi sales force commerce cloud\",\n packages=[\"ocapi\"],\n package_dir={\"ocapi\": 'ocapi'},\n install_requires=install_requires,\n )\n\n\nif __name__ == '__main__':\n main()"
},
{
"alpha_fraction": 0.5512868165969849,
"alphanum_fraction": 0.5613576769828796,
"avg_line_length": 28.47252655029297,
"blob_id": "38ab90693034e39cdd59a916a4c4e0cf09560ccb",
"content_id": "89997a81710a73bed81dd2a69c3893e4981b31a6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2681,
"license_type": "no_license",
"max_line_length": 144,
"num_lines": 91,
"path": "/ocapi/client.py",
"repo_name": "emartyacn/python-ocapi-sdk",
"src_encoding": "UTF-8",
"text": "import json\n\nimport requests\nfrom requests_oauthlib import OAuth2Session\n\nfrom ocapi.pycapi import PyCAPI\nfrom ocapi.lib.conf import Provider\n\nimport logging\n\n\nclass ShopAPI(PyCAPI):\n\n \"\"\"A module to wrap portions of the OCAPI API using Python\n\n References:\n\n https://github.com/ashishkumar-tudip/python-demandware-sdk\n https://api-explorer.commercecloud.salesforce.com\n https://documentation.b2c.commercecloud.salesforce.com/DOC1/topic/com.demandware.dochelp/OCAPI/current/shop/Resources/index.html\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n self.ocapi = PyCAPI(**kwargs)\n self.API_TYPE = 'dw/shop'\n\n\n def api_url(self, site_id='-'):\n return 'https://{0}/s/{1}/{2}/{3}'.format(\n self.ocapi.hostname,\n site_id,\n self.API_TYPE,\n self.ocapi.api_version\n )\n\n\n def product_search(self, site_id, query):\n \"\"\"\n https://documentation.b2c.commercecloud.salesforce.com/DOC1/topic/com.demandware.dochelp/OCAPI/current/shop/Resources/ProductSearch.html\n\n \"\"\"\n api_url = self.api_url(site_id)\n endpoint = '/product_search?q={0}&client_id={1}'.format(query, self.ocapi.client_id)\n request_url = '{0}{1}'.format(api_url, endpoint)\n res = requests.get(\n request_url,\n headers=self.ocapi.headers,\n timeout=30,\n )\n logging.debug(json.dumps(res.json(), indent=2))\n try:\n hits = res.json()['hits']\n logging.info(json.dumps(hits, indent=2))\n return hits\n except Exception as e:\n logging.exception('\\n\\n')\n\n\n def auth(self):\n # WIP\n endpoint = '/customers/auth?client_id={0}'.format(self.ocapi.client_id)\n request_url = '{0}{1}'.format(self.api_url, endpoint)\n payload = {'type': 'credentials'}\n req = requests.post(\n request_url,\n headers=self.ocapi.headers,\n auth=self.creds,\n json=payload,\n )\n\n\n def customer(self):\n # WIP\n endpoint = '/customers?client_id={0}'.format(self.ocapi.client_id)\n request_url = '{0}{1}'.format(self.api_url, endpoint)\n payload = {\n \"password\":\"abcd1234$$\",\n \"customer\": {\n \"login\": \"ocapi.qa\",\n \"email\":\"[email protected]\",\n \"last_name\":\"Ocapi\"\n }\n }\n req = requests.post(\n request_url,\n headers=self.ocapi.headers,\n json=payload,\n timeout=30,\n )\n logging.info(json.dumps(req.json(), indent=2))\n print(json.dumps(req.json(), indent=2))"
}
] | 3 |
thekatiebr/csc7210-project2
|
https://github.com/thekatiebr/csc7210-project2
|
f5eabe487e71bbc92d9d99d573a50e7d8c1dace7
|
42a3f329d7256594da53b558022386336202c056
|
926a3da15bb6bedbcca9a458375efa88d7e135c2
|
refs/heads/master
| 2023-03-14T01:18:59.080349 | 2021-02-26T20:07:31 | 2021-02-26T20:07:31 | 307,846,421 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.540096640586853,
"alphanum_fraction": 0.5590982437133789,
"avg_line_length": 40.952701568603516,
"blob_id": "d7cd3f0a0dd3a8031311cbb308193df4929048ef",
"content_id": "566c18a8c4085f771824609c2c0aaef0adcf9dd6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6210,
"license_type": "no_license",
"max_line_length": 124,
"num_lines": 148,
"path": "/source/DogCatData.py",
"repo_name": "thekatiebr/csc7210-project2",
"src_encoding": "UTF-8",
"text": "# PyTorch Imports\nfrom torchvision import transforms\nfrom torch.utils.data import Dataset\nimport torch\n# Data Science | Image Tools | MatplotLib\nimport numpy as np\nimport pandas as pd\nimport os, sys, shutil, time, argparse\n\n# Image manipulations\nfrom PIL import Image\n\nclass DogCatData(Dataset):\n '''\n Class to process a custom image dataset. Particularly, this class is designed to handle the \n Chest XRay data https://stanfordmlgroup.github.io/competitions/chexpert/\n\n Class Variables:\n * data_frame: this is a dataframe containing the relative file paths and labels \n * test: boolean; True => this dataset does not have labels, only image paths\n * root_dir: string pointing to the root of the image directory, paths in the data_frame are to be\n concatenated to this value to yield the correct path\n * transform: A pytorch transform pipeline of preprocessing operations to conduct to prepare\n the image for the model\n * \n '''\n def __init__(self,df,root_dir,transform_key=None, test = False, normalize=True):\n '''\n Constructor for Dataset class. This method assigns the class variables based on the parameters\n\n Parameters\n * df: this is a dataframe containing the relative file paths and labels \n * root_dir: string pointing to the root of the image directory, paths in the data_frame are to be\n concatenated to this value to yield the correct path\n * transform: Optional; A pytorch transform pipeline of preprocessing operations to conduct to prepare\n the image for the model; if None/no argument provided, method define_image_transforms is called\n * task: defines whether binary classification of diabetic retinopathy severity or a binary classification\n \"multi\" => each class is one-hot encoded as a vector\n tuple => tuple[0] is negative class tuple[1] is positive class\n * test: OPTIONAL; boolean; True => this dataset does not have labels, only image paths\n '''\n self.data_frame = df #passing dataframe instead of csv file because of train/test split\n print(self.data_frame)\n self.test = test \n self.root_dir = root_dir\n self.transform = self.define_image_transforms(transform_key, normalize)\n #image data \n \n def define_image_transforms(self, key, normalize=False):\n '''\n This function defines the pipeline of preprocessing operations that is required for an image\n to be processed by a model\n\n No parameters\n\n Upon Completion:\n * A dictionary with pipelines for training, validation, and testing data are returned\n * The pipeline includes (1) resizing the image to 224x224\n (2) converting the image to a pytorch tensor\n (3) normalizing the image (this is required; idk why)\n '''\n if normalize:\n image_transforms = {\n \"train\":\n transforms.Compose([\n transforms.Resize(256),\n transforms.CenterCrop(224),\n transforms.ToTensor(),\n transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n ]),\n \"valid\":\n transforms.Compose([\n transforms.Resize(256),\n transforms.CenterCrop(224),\n transforms.ToTensor(),\n transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n ]),\n \"test\":\n transforms.Compose([\n transforms.Resize(256),\n transforms.CenterCrop(224),\n transforms.ToTensor(),\n transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n ])\n } \n else:\n image_transforms = {\n \"train\":\n transforms.Compose([\n transforms.Resize(128),\n transforms.CenterCrop(96),\n transforms.ToTensor()\n ]),\n \"valid\":\n transforms.Compose([\n transforms.Resize(128),\n transforms.CenterCrop(96),\n transforms.ToTensor()\n ]),\n \"test\":\n transforms.Compose([\n transforms.Resize(128),\n transforms.CenterCrop(96),\n transforms.ToTensor()\n ])\n } \n return image_transforms[key]\n\n def __len__(self):\n '''\n Method inherited from Pytorch Dataset class\n Returns the number of items in the datset\n '''\n return self.data_frame.shape[0]\n \n def __getitem__(self, idx):\n '''\n Method inherited from Pytorch Dataset class\n Returns a given image based on the index passed\n\n Parameters:\n * idx: integer corresponding to the location of a particular image in the dataframe\n\n Upon completion, this method will open the image based on the \"Path\" column and return the image and the label,\n if self.test == False\n '''\n\n #print(self.task)\n if torch.is_tensor(idx):\n idx = idx.tolist()\n\n\n image_name = os.path.join(self.root_dir,\n self.data_frame.loc[idx, \"filename\"])\n \n with Image.open(image_name) as img:\n img = Image.open(image_name)\n img = img.convert('RGB')\n #print(img.shape)\n img_tensor = self.transform(img)\n if not self.test:\n image_label = self.data_frame.loc[idx, \"label\"]\n i=0\n #return one-hot encoded tensor\n label_tensor = torch.tensor(image_label, dtype=torch.long)\n image_label = label_tensor.clone()\n return(img_tensor, image_label)\n return (img_tensor)\n\n"
},
{
"alpha_fraction": 0.6503655314445496,
"alphanum_fraction": 0.6725485324859619,
"avg_line_length": 27.503597259521484,
"blob_id": "3e1dd9a4a98c306f0642cbc6a1edbc80f2cba256",
"content_id": "88fff4790dbb11ae51f681702e5617e5704b35ac",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3967,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 139,
"path": "/source/kerasleftover.py",
"repo_name": "thekatiebr/csc7210-project2",
"src_encoding": "UTF-8",
"text": "\n\n\n\n\nimport pandas as pd\nimport numpy as np\nimport sys\nimport os\nimport random\nfrom pathlib import Path\nfrom sklearn.model_selection import train_test_split\nfrom tensorflow.keras.preprocessing.image import ImageDataGenerator\n\n\n\n\ndef getDataGeneratorsKeras(data_file):\n #use sklearn to split data into training, test, val\n df = pd.read_csv(data_file)\n x = 'image'\n y = 'level'\n df.loc[df[y] >= 2, \"level_new\"] = 1\n df.loc[df[y] < 2, \"level_new\"] = 0\n y = \"level_new\"\n df[y] = df[y].astype(str)\n pth = 'data/diabetes/'\n img_size = (224,224)\n batch_size = 16\n\n train, test_df = train_test_split(df, test_size = 0.1, random_state=random.randint(1,100))\n train_df, val_df = train_test_split(train, test_size=0.1, random_state=random.randint(1,100))\n\n #create data generators\n\n # Base train/validation generator\n _datagen = ImageDataGenerator(\n rescale=1./255.,\n validation_split=0.25,\n featurewise_center=False,\n featurewise_std_normalization=False,\n rotation_range=90,\n width_shift_range=0.2,\n height_shift_range=0.2,\n horizontal_flip=True,\n vertical_flip=True\n )\n # Train generator\n train_generator = _datagen.flow_from_dataframe(\n dataframe=train_df,\n directory=pth,\n x_col=x,\n y_col=y,\n batch_size=batch_size,\n shuffle=True,\n class_mode=\"categorical\",\n target_size=img_size)\n print('Train generator created')\n # Validation generator\n val_generator = _datagen.flow_from_dataframe(\n dataframe=val_df,\n directory=pth,\n x_col=x,\n y_col=y,\n batch_size=batch_size,\n shuffle=True,\n class_mode=\"categorical\",\n target_size=img_size) \n print('Validation generator created')\n # Test generator\n _test_datagen=ImageDataGenerator(rescale=1./255.)\n test_generator = _test_datagen.flow_from_dataframe(\n dataframe=test_df,\n directory=pth,\n x_col=x,\n y_col=y,\n batch_size=batch_size,\n shuffle=True,\n class_mode=\"categorical\",\n target_size=img_size) \n print('Test generator created')\n\n return train_generator, val_generator, test_generator\n\ntrain_generator, val_generator, test_generator = getDataGenerators(data_file=\"data/trainLabels.csv\")\n\nx,y = train_generator.next()\nfor i in range(0,3):\n image = x[i]\n label = y[i]\n print (label)\n\n\nimport tensorflow as tf\nfrom tensorflow import keras\nfrom tensorflow.keras import layers\nfrom tensorflow.keras.models import Model, Sequential\nfrom tensorflow.keras.layers import Dense, GlobalAveragePooling2D, Conv2D, Dropout, MaxPooling2D, Dense, Activation\nimport numpy as np\nimport pandas as pd\nfrom sklearn.metrics import accuracy_score\nimport os\nos.sys.path.insert(0, \".\")\nimport DiabetesData\n\n#get diabetes dataloaders\ntrain_generator, val_generator, test_generator = DiabetesData.getDataGenerators(data_file=\"data/trainLabels.csv\")\nbatch_size = 16\nepochs = 1\ninput_shape_=(3,224,224)\nmodel = Sequential()\n\n\nmodel.add(Conv2D(32, (3, 3), input_shape=input_shape_))\nmodel.add(Activation('relu'))\n# model.add(MaxPooling2D(pool_size=(2, 2)))\n\nmodel.add(Conv2D(32, (3, 3)))\nmodel.add(Activation('relu'))\n# model.add(MaxPooling2D(pool_size=(2, 2)))\n\nmodel.add(Conv2D(64, (3, 3)))\nmodel.add(Activation('relu'))\n# model.add(MaxPooling2D(pool_size=(2, 2)))\n\nmodel.add(Flatten()) # this converts our 3D feature maps to 1D feature vectors\nmodel.add(Dense(64))\nmodel.add(Activation('relu'))\nmodel.add(Dropout(0.5))\nmodel.add(Dense(1))\nmodel.add(Activation('softmax'))\n\nmodel.compile(loss=\"binary_crossentropy\", optimizer='adam', metrics=['accuracy'])\n\n\n#fit model\nmodel.fit(x=train_generator, \n batch_size=batch_size,\n epochs=epochs)\n\n# Evaluate the model on the test data using `evaluate`\nprint(\"Evaluate on test data\")\nresults = model.evaluate(test_generator)\nprint(\"test loss, test acc:\", results)\n"
},
{
"alpha_fraction": 0.5376645922660828,
"alphanum_fraction": 0.5513608455657959,
"avg_line_length": 38.28275680541992,
"blob_id": "c3eef6a389f65bc97385922df60430ae1c5894af",
"content_id": "f924bc30a35ce02b51fb81028b8dadb2eae0a875",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5695,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 145,
"path": "/source/Autoencoder.py",
"repo_name": "thekatiebr/csc7210-project2",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport pandas as pd\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nimport matplotlib.pyplot as plt\nfrom torch.utils.data.sampler import SubsetRandomSampler\nfrom sklearn.model_selection import train_test_split\nfrom torch.utils.data import DataLoader\nfrom torchvision import datasets, transforms\nfrom tqdm import tqdm\nfrom DiabetesData import DiabeticData\n\n\nclass Flatten(nn.Module):\n def forward(self, input):\n return input.view(input.size(0), -1)\n\n\nclass Unflatten(nn.Module):\n def __init__(self, channel, height, width):\n super(Unflatten, self).__init__()\n self.channel = channel\n self.height = height\n self.width = width\n\n def forward(self, input):\n return input.view(input.size(0), self.channel, self.height, self.width)\n\n\n# define the NN architecture\n# define the NN architecture\nclass ConvAutoencoder(nn.Module):\n def __init__(self, device=\"cpu\", task='task'):\n super(ConvAutoencoder, self).__init__()\n ## encoder layers ##\n # conv layer (depth from 3 --> 16), 3x3 kernels\n self.conv1 = nn.Conv2d(3, 512, 3, padding=1) \n # conv layer (depth from 16 --> 4), 3x3 kernels\n self.conv2 = nn.Conv2d(512, 4, 3, padding=1)\n # pooling layer to reduce x-y dims by two; kernel and stride of 2\n self.pool = nn.MaxPool2d(2, 2)\n \n ## decoder layers ##\n ## a kernel of 2 and a stride of 2 will increase the spatial dims by 2\n self.t_conv1 = nn.ConvTranspose2d(4, 512, 2, stride=2)\n self.t_conv2 = nn.ConvTranspose2d(512, 3, 2, stride=2)\n \n self.to(device)\n self.optimizer = torch.optim.Adam(self.parameters(), lr=0.001)\n self.criterion = nn.MSELoss() # nn.BCELoss()\n self.device = device\n self.task = task\n \n\n def forward(self, x):\n\n ## encode ##\n # add hidden layers with relu activation function\n # and maxpooling after\n x = F.relu(self.conv1(x))\n x = self.pool(x)\n # add second hidden layer\n x = F.relu(self.conv2(x))\n x = self.pool(x) # compressed representation\n \n ## decode ##\n # add transpose conv layers, with relu activation function\n x = F.relu(self.t_conv1(x))\n # output layer (with sigmoid for scaling from 0 to 1)\n x = F.sigmoid(self.t_conv2(x))\n \n return x\n\n def fit(self, n_epochs, train_loader, validation_loader=None):\n print(\"in fit function\")\n history = {}\n history[\"training_loss\"] = []\n history[\"validation_loss\"] = []\n for epoch in range(1, n_epochs+1):\n print(\"Epoch: {0}\".format(epoch))\n # monitor training loss\n train_loss = 0.0\n val_loss = 0.0\n ###################\n # train the model #\n ###################\n with tqdm(total=len(train_loader)) as pbar:\n for data in train_loader:\n # _ stands in for labels, here\n # no need to flatten images\n images, _ = data\n images = images.to(self.device)\n # clear the gradients of all optimized variables\n self.optimizer.zero_grad()\n # forward pass: compute predicted outputs by passing inputs to the model\n outputs = self.forward(images)\n # calculate the loss\n loss = self.criterion(outputs, images)\n # backward pass: compute gradient of the loss with respect to model parameters\n loss.backward()\n # perform a single optimization step (parameter update)\n self.optimizer.step()\n # update running training loss\n# print(\"Loss: \" , loss.item())\n# print(\"img size: \", images.size(0))\n train_loss += loss.item()*images.size(0)\n pbar.update(1)\n # print avg training statistics \n train_loss = train_loss/len(train_loader)\n history[\"training_loss\"].append(train_loss)\n \n torch.save(self.state_dict(), \"models/ConvAE_{0}_{1}.pth\".format(self.task,epoch))\n\n if validation_loader is not None:\n for data in validation_loader:\n images, _ = data\n images = images.to(self.device)\n # clear the gradients of all optimized variables\n self.optimizer.zero_grad()\n # forward pass: compute predicted outputs by passing inputs to the model\n outputs = self.forward(images)\n # calculate the loss\n loss = self.criterion(outputs, images)\n # backward pass: compute gradient of the loss with respect to model parameters\n loss.backward()\n val_loss += loss.item()*images.size(0)\n history[\"validation_loss\"].append(val_loss)\n print('Epoch: {} \\tTraining Loss: {:.6f}\\tValidation Loss: {:.6f}'.format(\n epoch, \n train_loss,\n val_loss\n ))\n self.visualize(history)\n\n def visualize(self, history):\n plt.plot(history['training_loss'])\n plt.plot(history['validation_loss'])\n plt.title('model loss')\n plt.ylabel('loss')\n plt.xlabel('epoch')\n plt.legend(['train', 'val'], loc='upper left')\n plt.savefig(\"training_{0}.png\".format(self.task))\n plt.clf()"
},
{
"alpha_fraction": 0.46952566504478455,
"alphanum_fraction": 0.5135802626609802,
"avg_line_length": 33.82352828979492,
"blob_id": "3a6aa85941d94fee06fd97d76797d170837ba692",
"content_id": "e64065b0ca619937217a3bfe39d1b55f0b9592d3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7695,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 221,
"path": "/source/VGGAutoencoder.py",
"repo_name": "thekatiebr/csc7210-project2",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport pandas as pd\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nimport matplotlib.pyplot as plt\nfrom torch.utils.data.sampler import SubsetRandomSampler\nfrom sklearn.model_selection import train_test_split\nfrom torch.utils.data import DataLoader\nfrom torchvision import datasets, transforms\nfrom tqdm import tqdm\nfrom DiabetesData import DiabeticData\n\n\nVGG_type = {\n \"Custom\": [64, \"M\", 128, \"M\", 256, \"M\", 512, \"M\", 512, \"M\", 1024],\n \"VGG11\": [64, \"M\", 128, \"M\", 256, 256, \"M\", 512, 512, \"M\", 512, 512, \"M\", 1024, 1024],\n \"VGG13\": [64, 64, \"M\", 128, 128, \"M\", 256, 256, \"M\", 512, 512, \"M\", 512, 512],\n \"VGG16\": [64,64,\"M\",128,128,\"M\",256,256,256,\"M\",512,512,512,\"M\",512,512,512,1024,1024,1024],\n \"VGG19\": [64,64,\"M\",28,128,\"M\",256,256,256,256,\"M\",512,512,512,512,\"M\",512,512,512,512,1024,1024,1024,1024]\n}\n\nclass Decoder(nn.Module):\n def __init__(self, out_channels=3):\n super(Decoder, self).__init__()\n\n self.upconv0 = nn.ConvTranspose2d(\n in_channels=64,\n out_channels=128,\n kernel_size=2,\n stride=2,\n padding=0,\n )\n\n self.upconv1 = nn.ConvTranspose2d(\n in_channels=128,\n out_channels=256,\n kernel_size=2,\n stride=2,\n padding=0,\n )\n self.upconv2 = nn.ConvTranspose2d(\n in_channels=256,\n out_channels=512,\n kernel_size=2,\n stride=2,\n padding=0,\n )\n self.upconv3 = nn.ConvTranspose2d(\n in_channels=512, \n out_channels=1024, \n kernel_size=2, \n stride=2, \n padding=0\n )\n self.upconv4 = nn.ConvTranspose2d(\n in_channels=1024,\n out_channels=3,\n kernel_size=2,\n stride=2,\n padding=0,\n )\n\n self.bn0 = nn.BatchNorm2d(128)\n self.bn1 = nn.BatchNorm2d(256)\n self.bn2 = nn.BatchNorm2d(512)\n self.bn3 = nn.BatchNorm2d(1024)\n # self.bn0 = nn.BatchNorm2d(512)\n # self.bn1 = nn.BatchNorm2d(256)\n # self.bn2 = nn.BatchNorm2d(128)\n # self.bn3 = nn.BatchNorm2d(64)\n self.relu = nn.LeakyReLU(0.2)\n self.op = nn.Sigmoid()\n\n def forward(self, x):\n x = self.relu(self.upconv0(x))\n x = self.bn0(x)\n x = self.relu(self.upconv1(x))\n x = self.bn1(x)\n x = self.relu(self.upconv2(x))\n x = self.bn2(x)\n x = self.relu(self.upconv3(x))\n x = self.bn3(x)\n x = self.relu(self.upconv4(x))\n output = self.op(x)\n\n return x\n\n\nclass VGGEncoder(nn.Module):\n def __init__(self, vgg_version=\"VGG16\", in_channels=3):\n super(VGGEncoder, self).__init__()\n self.in_channels = in_channels\n arch = VGG_type[vgg_version]\n arch.reverse()\n self.conv_layers = self.create_conv(arch)\n # after completing all the conv layer the final matrix will be [ bs , 512, 7 , 7]\n\n def forward(self, x):\n x = self.conv_layers(x)\n\n return x\n\n def create_conv(self, architecture):\n layers = []\n in_channels = self.in_channels\n\n for x in architecture:\n if type(x) == int:\n out_channels = x\n layers += [\n nn.Conv2d(\n in_channels=in_channels,\n out_channels=out_channels,\n kernel_size=3,\n stride=1,\n padding=1,\n ),\n nn.BatchNorm2d(x),\n nn.ReLU(),\n ]\n in_channels = x\n\n elif x == \"M\":\n layers += [nn.MaxPool2d(kernel_size=2, stride=2)]\n\n\n return nn.Sequential(*layers)\n\n\nclass VGGAutoencoder(nn.Module):\n def __init__(self, channels=3, device=\"cpu\", task=\"task\"):\n super(VGGAutoencoder, self).__init__()\n self.task = task\n self.device = device\n self.enc = VGGEncoder(\"Custom\")\n self.dec = Decoder(channels)\n self.enc.to(self.device)\n self.dec.to(self.device)\n self.to(self.device)\n\n self.optimizer = torch.optim.Adam(self.parameters(), lr=0.001)\n self.criterion = nn.MSELoss() # nn.BCELoss()\n\n def forward(self, x):\n x = self.enc(x)\n x = self.dec(x)\n\n return x\n\n def fit(self, n_epochs, train_loader, validation_loader=None):\n print(\"in fit function\")\n history = {}\n history[\"training_loss\"] = []\n history[\"validation_loss\"] = []\n for epoch in range(1, n_epochs+1):\n print(\"Epoch: {0}\".format(epoch))\n # monitor training loss\n train_loss = 0.0\n val_loss = 0.0\n ###################\n # train the model #\n ###################\n with tqdm(total=len(train_loader)) as pbar:\n for data in train_loader:\n # _ stands in for labels, here\n # no need to flatten images\n images, _ = data\n images = images.to(self.device)\n # clear the gradients of all optimized variables\n self.optimizer.zero_grad()\n # forward pass: compute predicted outputs by passing inputs to the model\n outputs = self.forward(images)\n # calculate the loss\n loss = self.criterion(outputs, images)\n # backward pass: compute gradient of the loss with respect to model parameters\n loss.backward()\n # perform a single optimization step (parameter update)\n self.optimizer.step()\n # update running training loss\n# print(\"Loss: \" , loss.item())\n# print(\"img size: \", images.size(0))\n train_loss += loss.item()*images.size(0)\n pbar.update(1)\n # print avg training statistics \n train_loss = train_loss/len(train_loader)\n history[\"training_loss\"].append(train_loss)\n \n torch.save(self.state_dict(), \"models/VGGAE_{0}_{1}.pth\".format(self.task,epoch))\n\n if validation_loader is not None:\n for data in validation_loader:\n images, _ = data\n images = images.to(self.device)\n # clear the gradients of all optimized variables\n self.optimizer.zero_grad()\n # forward pass: compute predicted outputs by passing inputs to the model\n outputs = self.forward(images)\n # calculate the loss\n loss = self.criterion(outputs, images)\n # backward pass: compute gradient of the loss with respect to model parameters\n loss.backward()\n val_loss += loss.item()*images.size(0)\n history[\"validation_loss\"].append(val_loss)\n print('Epoch: {0} \\tTraining Loss: {1}\\tValidation Loss: {2}'.format(\n epoch, \n train_loss,\n val_loss\n ))\n self.visualize(history)\n\n def visualize(self, history):\n plt.plot(history['training_loss'])\n plt.plot(history['validation_loss'])\n plt.title('model loss')\n plt.ylabel('loss')\n plt.xlabel('epoch')\n plt.legend(['train', 'val'], loc='upper left')\n plt.savefig(\"training_{0}.png\".format(self.task))\n plt.clf()"
},
{
"alpha_fraction": 0.701525866985321,
"alphanum_fraction": 0.7130628824234009,
"avg_line_length": 28.217391967773438,
"blob_id": "83c83596fde843555279bd9aee900746e400476d",
"content_id": "09a785641f18dcc2d2ded84e512837a21bb59f23",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2687,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 92,
"path": "/source/ModelPrototypeDriver.py",
"repo_name": "thekatiebr/csc7210-project2",
"src_encoding": "UTF-8",
"text": "# PyTorch Imports\n#from IPython.core.interactiveshell import InteractiveShell\nfrom torchvision import transforms, datasets, models\nimport torch\nimport torch.nn.functional as F\nfrom torch import optim, cuda\nfrom torch.utils.data import Dataset, DataLoader, sampler\nimport torch.nn as nn\nfrom torchsummary import summary # Useful for examining network\nfrom collections import Counter\n# Data Science | Image Tools | MatplotLib\nimport numpy as np\nimport pandas as pd\nimport matplotlib\nimport os, sys, shutil, time, argparse, collections\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import accuracy_score, recall_score, precision_score, f1_score, roc_auc_score\nfrom tqdm import tqdm\nfrom datetime import date\n# Image manipulations\nfrom PIL import Image\n\n# Visualizations\nimport matplotlib.pyplot as plt\n\nfrom DiabetesModelPT import DiabetesModel\nfrom DiabetesData import DiabeticData\n\n\n\n\nt = 1\ntask = ([0], [4])\n# task = ([0,1], (2,3,4))\nbatch_size=16\nepochs = 15\nroot_dir = \"data/diabetes\"\n# task = ([0,1,2], (3,4))\n\n# train = pd.read_csv(\"data/diabetes_ad_train.csv\")\n# val = pd.read_csv(\"data/diabetes_ad_valid.csv\")\n# test = pd.read_csv(\"data/diabetes_ad_test.csv\")\n\ndata = pd.read_csv(\"data/trainLabels.csv\")\n\nzeroes = data[data['level'] == 0]\nones = data[data['level'] == 4]\ndata = pd.concat([zeroes,ones])\ndata = data.sample(frac=1)\n\ntrain, test = train_test_split(data, test_size=0.1)\ntrain, val = train_test_split(train, test_size=0.1)\n\ntrain = train.reset_index()\ntest = test.reset_index()\nval = val.reset_index()\n\ndata = {'train': DiabeticData(df = train, transform_key=\"train\", root_dir=root_dir, task = task),\n 'valid': DiabeticData(df = val, transform_key=\"valid\", root_dir=root_dir, task = task),\n 'test': DiabeticData(df = test, transform_key=\"test\", root_dir=root_dir, task = task),\n }\n\ndataloaders = {\n 'train': DataLoader(data['train'], batch_size=batch_size, shuffle=True),\n 'valid': DataLoader(data['valid'], batch_size=batch_size, shuffle=True),\n 'test': DataLoader(data['test'], batch_size=1, shuffle=True)\n} \n\n\nmodel = DiabetesModel(measure_uncertainty=False)\n\nmodel.fit(dataloaders['train'], dataloaders['valid'], n_epochs=epochs)\n\npreds = []\ntrues = []\n\npred, true = model.predict(dataloaders['test'])\npreds.append(pred)\npreds = np.asarray(preds)\ntrue = np.asarray(true)\nprint(preds.shape)\nprint(true.shape)\n\npreds = preds[0]\nlabels = [int(p >= 0.5) for p in preds]\n\n\nprint(\"Accuracy: \", accuracy_score(true, labels))\nprint(\"Recall: \", recall_score(true, labels))\nprint(\"Precision: \", precision_score(true, labels))\nprint(\"F1: \", f1_score(true, labels))\nprint(\"AUC: \", roc_auc_score(true, pred))"
},
{
"alpha_fraction": 0.6298507452011108,
"alphanum_fraction": 0.6358209252357483,
"avg_line_length": 24.846153259277344,
"blob_id": "56e3e001469565a219b7139a311e4e512573b0bf",
"content_id": "1f628dec4ab189f9fdc2bf2418ff6feff93eb545",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 335,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 13,
"path": "/source/GetCatDogDataFile.py",
"repo_name": "thekatiebr/csc7210-project2",
"src_encoding": "UTF-8",
"text": "import sys, os\nimport pandas as pd\ndirectory=\"data/dogcat/train\"\nresults = []\nfor filename in os.listdir(directory):\n if filename.endswith(\".jpg\"):\n if \"dog\" in filename:\n results.append([filename, 1])\n else:\n results.append([filename, 0])\n\ndf = pd.DataFrame(results)\ndf.to_csv(\"data/dogcat.csv\")"
},
{
"alpha_fraction": 0.6610220670700073,
"alphanum_fraction": 0.6747807860374451,
"avg_line_length": 33.628273010253906,
"blob_id": "647f973abe0f49b437aa1120552d076ec1d31519",
"content_id": "a9c675b5297fe4752e5d5f8118a419262f8bea34",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6614,
"license_type": "no_license",
"max_line_length": 124,
"num_lines": 191,
"path": "/source/AutoencoderDriver.py",
"repo_name": "thekatiebr/csc7210-project2",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n# coding: utf-8\n\n# ## Define Imports and Determine Device\nimport os, sys\nimport numpy as np\nimport pandas as pd\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nimport matplotlib.pyplot as plt\nfrom collections import Counter\nfrom torch.utils.data.sampler import SubsetRandomSampler\nfrom sklearn.model_selection import train_test_split\nfrom torch.utils.data import DataLoader\nfrom torchvision import datasets, transforms\nfrom tqdm import tqdm\nos.sys.path.insert(0, \".\")\nfrom DiabetesData import DiabeticData\nfrom Autoencoder import ConvAutoencoder\nfrom ConvVarAutoencoder import ConvVAE\nfrom VGGAutoencoder import VGGAutoencoder\nfrom DogCatData import DogCatData\n\ndevice = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\nprint(device)\ndatatype=\"diabetes\"\nbatch_size=16\nepochs = 5\nmodel_file=\"\"\nnormalize=False\nsize=96\n\nif datatype == \"diabetes\":\n filename = \"data/trainLabels_ad.csv\"\n root_dir = \"data/diabetes\"\n task = ([0],[4])\n classes = ['none', 'proliferative']\n # task = ([0,1,2], [3,4])\n # task = ([0,1,2], (3,4))\n train = pd.read_csv(\"data/diabetes_ad_train.csv\")\n val = pd.read_csv(\"data/diabetes_ad_valid.csv\")\n test = pd.read_csv(\"data/diabetes_ad_test.csv\")\n train = train.sample(frac=0.25)\n train = train.reset_index()\n\n data = {'train': DiabeticData(df = train, transform_key=\"train\", root_dir=root_dir, task = task, normalize = normalize),\n 'valid': DiabeticData(df = val, transform_key=\"valid\", root_dir=root_dir, task = task, normalize = normalize),\n 'test': DiabeticData(df = test, transform_key=\"test\", root_dir=root_dir, task = task, normalize = normalize)\n }\n\n\nelif datatype == \"dogcat\":\n filename = \"data/dogcat_ad.csv\"\n root_dir = \"data/dogcat/train\"\n classes = ['dog', 'cat']\n # task = ([0,1,2], (3,4))\n\n train = pd.read_csv(\"data/dogcat_ad_train.csv\")\n val = pd.read_csv(\"data/dogcat_ad_valid.csv\")\n test = pd.read_csv(\"data/dogcat_ad_test.csv\")\n\n data = {'train': DogCatData(df = train, transform_key=\"train\", root_dir=root_dir, normalize = normalize),\n 'valid': DogCatData(df = val, transform_key=\"valid\", root_dir=root_dir, normalize = normalize),\n 'test': DogCatData(df = test, transform_key=\"test\", root_dir=root_dir, normalize = normalize)\n }\n\nelif datatype == \"fruit\":\n classes = [\"apple\", \"banana\"]\n root_dir=\"data/Fruit-Images-Dataset-master\"\n train = pd.read_csv(\"data/fruit_ad_train.csv\")\n val = pd.read_csv(\"data/fruit_ad_valid.csv\")\n test = pd.read_csv(\"data/fruit_ad_test.csv\")\n print(Counter(train['text_label']))\n print(Counter(val['text_label']))\n print(Counter(test['text_label']))\n data = {'train': DogCatData(df = train, transform_key=\"train\", root_dir=root_dir, normalize = normalize),\n 'valid': DogCatData(df = val, transform_key=\"valid\", root_dir=root_dir, normalize = normalize),\n 'test': DogCatData(df = test, transform_key=\"test\", root_dir=root_dir, normalize = normalize)\n }\n\ndataloaders = {\n 'train': DataLoader(data['train'], batch_size=batch_size, shuffle=True),\n 'valid': DataLoader(data['valid'], batch_size=batch_size, shuffle=True),\n 'test': DataLoader(data['test'], batch_size=batch_size, shuffle=True)\n} \n\nprint(train.shape)\nprint(val.shape)\nprint(test.shape)\n\n\n# model = ConvVAE(1000, device)\nmodel = VGGAutoencoder(device, task=datatype)\nif model_file != \"\":\n model.load_state_dict(torch.load(model_file))\nprint(model)\n\nmodel.fit(epochs, dataloaders[\"train\"])\n\ndef imshow(img):\n img = img / 2 + 0.5 # unnormalize\n plt.imshow(np.transpose(img, (1, 2, 0)))\n\n\n# obtain one batch of test images\ndataiter = iter(dataloaders[\"test\"])\nimages, labels = dataiter.next()\n\n# get sample outputs\noutput = model(images.to(device))\n# output = F.softmax(output)\n# prep images for display\nimages = images.numpy()\n\n\n# output is resized into a batch of iages\noutput = output.view(batch_size, 3, size, size)\n# use detach when it's an output that requires_grad\noutput = output.cpu().detach().numpy()\n\n# # plot the first ten input images and then reconstructed images\n# fig, axes = plt.subplots(nrows=2, ncols=10, sharex=True, sharey=True, figsize=(24,4))\n\n# # input images on top row, reconstructions on bottom\n# for images, row in zip([images, output], axes):\n# for img, ax in zip(images, row):\n# ax.imshow(np.squeeze(img))\n# ax.get_xaxis().set_visible(False)\n# ax.get_yaxis().set_visible(False)\n\n# plot the first ten input images and then reconstructed images\nfig, axes = plt.subplots(nrows=2, ncols=10, sharex=True, sharey=True, figsize=(24,4))\nfor idx in np.arange(batch_size):\n ax = fig.add_subplot(2, 20/2, idx+1, xticks=[], yticks=[])\n imshow(output[idx])\n ax.set_title(classes[labels[idx]])\nplt.savefig(\"autoencoded.png\") \nplt.clf()\n\n# plot the first ten input images and then reconstructed images\nfig, axes = plt.subplots(nrows=2, ncols=10, sharex=True, sharey=True, figsize=(24,4))\nfor idx in np.arange(batch_size):\n ax = fig.add_subplot(2, 20/2, idx+1, xticks=[], yticks=[])\n imshow(images[idx])\n ax.set_title(classes[labels[idx]])\nplt.savefig(\"original.png\") \nplt.clf()\n\ndataloaders['test'] = DataLoader(data['test'], batch_size=1, shuffle=True)\n\nresults = []\nresults_cols = [\"Image Label\", \"Reconstruction Loss\"]\nfor x, y in dataloaders['test']:\n X = x.to(device)\n output = model(X)\n output = output.cpu().detach().numpy()\n for i in range(y.shape[0]):\n ls = 0\n image = x[i].numpy()\n ouptut = output[i]\n label = y[i].numpy()\n ls = np.sum(np.square(image.ravel() - output.ravel()))\n # ls = model.criterion(output, image)\n results.append([label, ls])\n\nresults = pd.DataFrame(results, columns=results_cols)\nresults.to_csv(\"reconstruction_error.csv\")\n\nprint(results)\nlabel_1 = results[results[\"Image Label\"] == 1]\nlabel_0 = results[results[\"Image Label\"] == 0]\nprint(label_0)\nprint(label_1)\navg_1 = np.mean(label_1['Reconstruction Loss'].values)\navg_0 = np.mean(label_0['Reconstruction Loss'].values)\n\nprint(\"Average Reconstruction Error (Prediction = 0)\", avg_0)\nprint(\"Average Reconstruction Error (Prediction = 1)\", avg_1)\n\nplt.hist(label_1['Reconstruction Loss'].values, density=False, bins=30, color='blue')\nplt.hist(label_0['Reconstruction Loss'].values, density=False, bins=30, alpha = 0.5, color='yellow')\nplt.xlabel('MSE')\nplt.ylabel('Frequency')\nplt.savefig('errordist.png')\n#find error threshold on validation set\n\n\n\n#evaluate on testing data\n"
},
{
"alpha_fraction": 0.6380510330200195,
"alphanum_fraction": 0.6817478537559509,
"avg_line_length": 28.067415237426758,
"blob_id": "e09381e779144ee9f6faa0cef79cc55327ef1ef7",
"content_id": "68373f2288e731ef8815bd8109040e7b0b00543e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2586,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 89,
"path": "/source/KerasTest.py",
"repo_name": "thekatiebr/csc7210-project2",
"src_encoding": "UTF-8",
"text": "import tensorflow as tf\nfrom tensorflow import keras\n\nfrom keras_preprocessing.image import ImageDataGenerator\nfrom sklearn.model_selection import train_test_split\nimport pandas as pd\nimport numpy as np\n\nfilename = \"data/dogcat_ad.csv\"\nroot_dir = \"data/dogcat/train\"\nclasses = ['cat', 'dog']\nx='filename'\ny='text_label'\n# task = ([0,1,2], (3,4))\n\ndata = pd.read_csv(filename)\n# data = data.sample(frac=0.25)\ntrain, test = train_test_split(data, test_size=0.1)\ntrain, val = train_test_split(train, test_size=0.1)\n\ntrain = train[train[\"label\"] == 1]\n#filter out 1s from training set\n\n\n\ntrain = train.reset_index()\ntest = test.reset_index()\nval = val.reset_index()\n\ndatagen = ImageDataGenerator(rescale=1./255)\ntest_datagen = ImageDataGenerator(rescale=1./255)\n\ntrain_generator=datagen.flow_from_dataframe(\n dataframe=train,\n directory=root_dir,\n x_col=x,\n y_col=y,\n batch_size=32,\n seed=42,\n shuffle=True,\n class_mode=\"categorical\",\n target_size=(100,100))\n\nvalid_generator=datagen.flow_from_dataframe(\n dataframe=val,\n directory=root_dir,\n x_col=x,\n y_col=y,\n subset=\"validation\",\n batch_size=32,\n seed=42,\n shuffle=True,\n class_mode=\"categorical\",\n target_size=(100,100))\n\ntest_generator=test_datagen.flow_from_dataframe(\n dataframe=test,\n directory=root_dir,\n x_col=x,\n y_col=y,\n batch_size=32,\n seed=42,\n shuffle=False,\n class_mode='categorical',\n target_size=(100,100))\n\ninput_img = tf.keras.Input(shape=(100, 100, 1))\n\nx = tf.keras.layers.Conv2D(16, (3, 3), activation='relu', padding='same')(input_img)\nx = tf.keras.layers.MaxPooling2D((2, 2), padding='same')(x)\nx = tf.keras.layers.Conv2D(8, (3, 3), activation='relu', padding='same')(x)\nx = tf.keras.layers.MaxPooling2D((2, 2), padding='same')(x)\nx = tf.keras.layers.Conv2D(8, (3, 3), activation='relu', padding='same')(x)\nencoded = tf.keras.layers.MaxPooling2D((2, 2), padding='same')(x)\n\n# at this point the representation is (4, 4, 8) i.e. 128-dimensional\n\nx = tf.keras.layers.Conv2D(8, (3, 3), activation='relu', padding='same')(encoded)\nx = tf.keras.layers.UpSampling2D((2, 2))(x)\nx = tf.keras.layers.Conv2D(8, (3, 3), activation='relu', padding='same')(x)\nx = tf.keras.layers.UpSampling2D((2, 2))(x)\nx = tf.keras.layers.Conv2D(16, (3, 3), activation='relu')(x)\nx = tf.keras.layers.UpSampling2D((2, 2))(x)\ndecoded = tf.keras.layers.Conv2D(1, (3, 3), activation='sigmoid', padding='same')(x)\n\nautoencoder = tf.keras.Model(input_img, decoded)\nautoencoder.compile(optimizer='adam', loss='binary_crossentropy')\n\nautoencoder.fit(train_generator)"
},
{
"alpha_fraction": 0.7931689023971558,
"alphanum_fraction": 0.8077166080474854,
"avg_line_length": 62.279998779296875,
"blob_id": "b0fa2715f56f36d5e4af7d028edb01ad5d368ec2",
"content_id": "f5bed1a943720118f44070a4405c348e505526f6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1581,
"license_type": "no_license",
"max_line_length": 239,
"num_lines": 25,
"path": "/README.md",
"repo_name": "thekatiebr/csc7210-project2",
"src_encoding": "UTF-8",
"text": "# CSC 7210: Project 2\n## Katherine Brown\n\n### About\nThis project casts the problem of the computational detection of diabetic retionopathy as a anomaly detection problem. I hypothesized that retinal scans with diabetic retinopathy would contain enough artifiacts to be considered anomalous. \n\nUnfortunately, this was not the case. I was able to develop the autoencoders to detect anomalies in other image datasets, but the autoencoders failed on the diabetic retinopathy data. The issue I believe is twofold: \n1. The diabetic retinopathy images consist of the same general shape. When debugging, I noticed that the autoencoders re-constructed images based on shape and color.\n2. The clinical abnormalities used to diagnose diabetic retinopathy are minute. The error of missing these features is small and overshadowed by the remaining reconstruction.\n\n### Datasets Used\n1. [Diabetic Retinopathy Detection](https://www.kaggle.com/c/diabetic-retinopathy-detection)\n2. [Fruit 360](https://www.kaggle.com/moltean/fruits). I trained the autoencoders on apples and attempted to reconstruct bananas\n\n### Required Package Installation\nYou will need to install the following packages to run any file in the submission. Keras, PyTorch, and Scikit-Learn implement the anomaly detection models. Matplotlib produces the visualizations.\n\n`conda install pandas=0.25.1`\n`conda install numpy=1.17.2`\n`conda install matplotlib`\n`conda install scikit-learn`\n`conda install tensorflow`\n`conda install keras`\n`conda install pytorch torchvision torchaudio cudatoolkit=10.1 -c pytorch`\n`conda install torchvision`"
},
{
"alpha_fraction": 0.5092734098434448,
"alphanum_fraction": 0.5315295457839966,
"avg_line_length": 32.70588302612305,
"blob_id": "1690d28057198c4809ec69f18a74f0df4f19ce3c",
"content_id": "3691d4d0080dc6d4214ff8060bdde3567ebded2f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4583,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 136,
"path": "/source/ConvVarAutoencoder.py",
"repo_name": "thekatiebr/csc7210-project2",
"src_encoding": "UTF-8",
"text": "import torch\nimport torch.nn as nn\nfrom tqdm import tqdm\nimport torch.nn.functional as F\nimport torch.optim as optim\n\nclass Flatten(nn.Module):\n def forward(self, input):\n return input.view(input.size(0), -1)\n\n\nclass Unflatten(nn.Module):\n def __init__(self, channel, height, width):\n super(Unflatten, self).__init__()\n self.channel = channel\n self.height = height\n self.width = width\n\n def forward(self, input):\n return input.view(input.size(0), self.channel, self.height, self.width)\n\n\nclass ConvVAE(nn.Module):\n\n def __init__(self, latent_size, device, task):\n super(ConvVAE, self).__init__()\n self.device = device\n self.latent_size = latent_size\n self.task = task\n self.encoder = nn.Sequential(\n nn.Conv2d(3, 64, kernel_size=3, padding=1),\n nn.ReLU(),\n nn.Conv2d(64, 32, kernel_size=3, padding=1),\n nn.ReLU(),\n Flatten(),\n nn.Linear(294912, 1024),\n nn.ReLU()\n )\n\n # hidden => mu\n self.fc1 = nn.Linear(1024, self.latent_size)\n\n # hidden => logvar\n self.fc2 = nn.Linear(1024, self.latent_size)\n\n self.decoder = nn.Sequential(\n nn.Linear(self.latent_size, 1024),\n nn.ReLU(),\n nn.Linear(1024, 294912),\n nn.ReLU(),\n Unflatten(32, 96, 96),\n nn.ReLU(),\n nn.ConvTranspose2d(32, 64, kernel_size=3, padding=1),\n nn.ReLU(),\n nn.ConvTranspose2d(64, 3, kernel_size=3, padding=1),\n nn.Sigmoid()\n )\n self.to(device)\n self.optimizer = optim.Adam(self.parameters(), lr=1e-3)\n\n def encode(self, x):\n h = self.encoder(x)\n print(x.size())\n mu, logvar = self.fc1(h), self.fc2(h)\n return mu, logvar\n\n def decode(self, z):\n z = self.decoder(z)\n return z\n\n def reparameterize(self, mu, logvar):\n if self.training:\n std = torch.exp(0.5 * logvar)\n eps = torch.randn_like(std)\n return eps.mul(std).add_(mu)\n else:\n return mu\n\n def forward(self, x):\n mu, logvar = self.encode(x)\n z = self.reparameterize(mu, logvar)\n return self.decode(z), mu, logvar\n\n def loss_function(self, recon_x, x, mu, logvar):\n # reconstruction loss\n BCE = F.binary_cross_entropy(recon_x.view(-1, 442368), x.view(-1, 442368), reduction='sum')\n\n # KL divergence loss\n KLD = -0.5 * torch.sum(1 + logvar - mu.pow(2) - logvar.exp())\n\n return BCE + KLD\n\n def fit(self, n_epochs, train_loader, validation_loader=None):\n self.train()\n train_loss = 0\n val_loss = 0\n for epoch in range(n_epochs):\n train_loss_ep = 0\n val_loss_ep = 0\n for batch_idx, (data, _) in tqdm(enumerate(train_loader), total=len(train_loader), desc='train'):\n data = data.to(self.device)\n\n self.optimizer.zero_grad()\n recon_batch, mu, logvar = self.forward(data)\n\n loss = self.loss_function(recon_batch, data, mu, logvar)\n train_loss_ep += loss.item()\n\n loss.backward()\n self.optimizer.step()\n if validation_loader is not None:\n for data in validation_loader:\n images, _ = data\n images = images.to(self.device)\n # clear the gradients of all optimized variables\n self.optimizer.zero_grad()\n # forward pass: compute predicted outputs by passing inputs to the model\n outputs = self.forward(images)\n # calculate the loss\n loss = self.criterion(outputs, images)\n # backward pass: compute gradient of the loss with respect to model parameters\n loss.backward()\n val_loss_ep += loss.item()*images.size(0)\n val_loss_ep /= len(validation_loader.dataset)\n val_loss += val_loss_ep\n history[\"validation_loss\"].append(val_loss)\n train_loss_ep /= len(train_loader.dataset)\n train_loss += train_loss_ep\n print('Epoch: {} \\tTraining Loss: {:.6f}\\tValidation Loss: {:.6f}'.format(\n epoch, \n train_loss,\n val_loss\n ))\n torch.save(self.state_dict(), \"models/ConvAE_{0}_{1}.pth\".format(self.task,epoch))\n\n return train_loss"
},
{
"alpha_fraction": 0.5802946090698242,
"alphanum_fraction": 0.599613606929779,
"avg_line_length": 39.60784149169922,
"blob_id": "d7fb5015844a124f96ed2d8ab61bd99403f9f077",
"content_id": "b19a67f88771a4dbf622938bb443aaca90a4fab8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4141,
"license_type": "no_license",
"max_line_length": 175,
"num_lines": 102,
"path": "/source/DiabetesModelPT.py",
"repo_name": "thekatiebr/csc7210-project2",
"src_encoding": "UTF-8",
"text": "# PyTorch Imports \n#from IPython.core.interactiveshell import InteractiveShell\nfrom torchvision import transforms, datasets, models\nimport torch\nfrom torch import optim, cuda\nimport torch.nn.functional as F\nfrom torch.utils.data import Dataset, DataLoader, sampler\nimport torch.nn as nn\nfrom torchsummary import summary # Useful for examining network\nfrom torch.nn import NLLLoss, CrossEntropyLoss\n# Data Science | Image Tools | MatplotLib\nimport numpy as np\nimport pandas as pd\nimport os, sys, shutil, time, argparse\nfrom datetime import date\nfrom sklearn.metrics import accuracy_score, recall_score, precision_score, f1_score, roc_auc_score\nfrom tqdm import tqdm\n\n# Image manipulations\nfrom PIL import Image\n\n# Visualizations\nimport matplotlib.pyplot as plt\n\n\nclass DiabetesModel(nn.Module):\n def __init__(self, measure_uncertainty=True, task = \"diabetes\"):\n super(DiabetesModel, self).__init__()\n self.model_type = \"Densenet121_newtest2\"\n self.model = models.densenet121(pretrained=True)\n # self.model.classifier = nn.Linear(1024, 1024) #densenet121\n self.model.classifier = nn.Identity()\n self.fc_layers = nn.ModuleList([\n # nn.Linear(25088, 1024),\n nn.Linear(1024,512),\n nn.Linear(512,256)\n ]) \n self.classifier_layer = nn.Linear(256, 2)\n self.device = 'cpu'\n if torch.cuda.is_available():\n self.to('cuda')\n self.device = 'cuda'\n print(self.device)\n self.task = task\n self.criterion = nn.CrossEntropyLoss()\n self.measure_uncertainty= measure_uncertainty\n self.optimizer = optim.SGD(self.parameters(), lr=0.0005, momentum=0.985, nesterov=True)\n summary(self.model, input_size=(3,224,224))\n \n def forward(self, x):\n x = self.model(x)\n for layer in self.fc_layers:\n x = layer(x)\n x = F.relu(x)\n x = F.dropout(x, p=0.5, training=self.measure_uncertainty)\n x = self.classifier_layer(x)\n return x\n\n \n\n def fit(self, train_generator, validation_generator=None, n_epochs=50):\n for epoch in range(n_epochs):\n self.train()\n with tqdm(total=len(train_generator)) as pbar:\n for local_batch, local_labels in train_generator:\n self.optimizer.zero_grad() #test comment\n local_batch, local_labels = local_batch.to(self.device), local_labels.to(self.device)\n output = self.forward(local_batch)\n loss = self.criterion(output, local_labels.squeeze())\n loss.backward()\n self.optimizer.step()\n pbar.update(1)\n torch.save(self.state_dict(), \"models/{0}_{1}_{2}.pth\".format(self.model_type, self.task,epoch))\n pred_train, true_train = self.predict(train_generator) \n pred_train = [int(p >= 0.5) for p in pred_train] \n pred_test, true_test = self.predict(validation_generator) \n pred_test = [int(p >= 0.5) for p in pred_test] \n print(\"Epoch {0}: Training Accuracy = {1}; Validation Accuracy = {2}\".format(epoch, accuracy_score(true_train, pred_train), accuracy_score(true_test, pred_test)))\n \n def predict(self, generator):\n self.eval()\n # criterion = nn.NLLLoss()\n test_loss = 0\n correct = 0\n probabilities = []\n predictions = []\n correct = []\n with torch.no_grad():\n for data, target in generator:\n data, target = data.to(self.device), target.to(self.device)\n output = self.forward(data)\n test_loss += self.criterion(output, target).item() # sum up batch loss\n output = F.log_softmax(output, dim=1)\n \n probability = torch.exp(output)\n probability = probability.cpu().numpy()\n target = target.cpu().numpy()\n\n probabilities.extend(probability[:,1])\n correct.extend(target)\n\n return probabilities, correct"
}
] | 11 |
karan-shah-25/jetbot
|
https://github.com/karan-shah-25/jetbot
|
442157b68e996d9e04307f613162fa97a3eaad1c
|
34981426d3cba7910697efbbc6c9c6c8d1a84346
|
add67ca9cbe3ec56012f52ea018179eee2482bc1
|
refs/heads/master
| 2022-08-30T06:17:57.576674 | 2020-06-02T02:32:35 | 2020-06-02T02:32:35 | 268,431,985 | 0 | 0 |
MIT
| 2020-06-01T05:20:02 | 2020-05-30T12:29:17 | 2020-05-13T06:46:56 | null |
[
{
"alpha_fraction": 0.6567521095275879,
"alphanum_fraction": 0.682735025882721,
"avg_line_length": 42.02941131591797,
"blob_id": "fa10248e75c90c4864413e52f89be9f5c758feab",
"content_id": "30a95d68d4ba551093542f0a901b5c209d2d0e91",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2925,
"license_type": "permissive",
"max_line_length": 118,
"num_lines": 68,
"path": "/jetbot/robot.py",
"repo_name": "karan-shah-25/jetbot",
"src_encoding": "UTF-8",
"text": "import time\nimport traitlets\nfrom traitlets.config.configurable import SingletonConfigurable\nfrom Adafruit_MotorHAT import Adafruit_MotorHAT\nfrom .motor import Motor\n\n\nclass Robot(SingletonConfigurable):\n \n left_motor1 = traitlets.Instance(Motor)\n left_motor2 = traitlets.Instance(Motor)\t\n right_motor1 = traitlets.Instance(Motor)\n right_motor2 = traitlets.Instance(Motor)\n\n # config\n i2c_bus = traitlets.Integer(default_value=1).tag(config=True)\n left_motor1_channel = traitlets.Integer(default_value=1).tag(config=True)\n left_motor1_alpha = traitlets.Float(default_value=1.0).tag(config=True)\n left_motor2_channel = traitlets.Integer(default_value=3).tag(config=True)\n left_motor2_alpha = traitlets.Float(default_value=1.0).tag(config=True)\n right_motor1_channel = traitlets.Integer(default_value=2).tag(config=True)\n right_motor1_alpha = traitlets.Float(default_value=1.0).tag(config=True)\n right_motor2_channel = traitlets.Integer(default_value=4).tag(config=True)\n right_motor2_alpha = traitlets.Float(default_value=1.0).tag(config=True)\n \n def __init__(self, *args, **kwargs):\n super(Robot, self).__init__(*args, **kwargs)\n self.motor_driver = Adafruit_MotorHAT(i2c_bus=self.i2c_bus)\n self.left_motor1 = Motor(self.motor_driver, channel=self.left_motor1_channel, alpha=self.left_motor1_alpha)\n self.left_motor2 = Motor(self.motor_driver, channel=self.left_motor2_channel, alpha=self.left_motor2_alpha)\n self.right_motor1 = Motor(self.motor_driver, channel=self.right_motor1_channel, alpha=self.right_motor1_alpha)\n self.right_motor2 = Motor(self.motor_driver, channel=self.right_motor2_channel, alpha=self.left_motor2_alpha)\n \n def set_motors(self, left_speed, right_speed):\n self.left_motor1.value = left_speed\n self.left_motor2.value = left_speed\n self.right_motor1.value = right_speed\n self.right_motor2.value = right_speed\n \n def forward(self, speed=1.0, duration=None):\n self.left_motor1.value = speed\n self.left_motor2.value = speed\n self.right_motor1.value = speed\n self.right_motor2.value = speed\n\n def backward(self, speed=1.0):\n self.left_motor1.value = -speed\n self.left_motor2.value = -speed\n self.right_motor1.value = -speed\n self.right_motor2.value = -speed\n\n def left(self, speed=1.0):\n self.left_motor1.value = -speed\n self.left_motor2.value = -speed\n self.right_motor1.value = speed\n self.right_motor2.value = speed\n\n def right(self, speed=1.0):\n self.left_motor1.value = speed\n self.left_motor2.value = speed\n self.right_motor1.value = -speed\n self.right_motor2.value = -speed\n\n def stop(self):\n self.left_motor1.value = 0\n self.left_motor2.value = 0\n self.right_motor1.value = 0\n self.right_motor2.value = 0"
}
] | 1 |
happyberry/Projekt-obrazy
|
https://github.com/happyberry/Projekt-obrazy
|
0dc0dcb23e93f114e809703fd923b9c9a0a1f5b5
|
603a066e326e74ce2db92e4fa76a249638526590
|
5c56f40fe88edc023223a9837c4aeab11ac81375
|
refs/heads/master
| 2020-09-13T11:55:59.975270 | 2019-11-19T23:09:36 | 2019-11-19T23:09:36 | 222,770,878 | 0 | 1 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5748974084854126,
"alphanum_fraction": 0.5954172611236572,
"avg_line_length": 37.26174545288086,
"blob_id": "43a355600bb491a4aa458cf3f6e9be13ea2c9564",
"content_id": "e6b66c710021f08c031a9625bca08bf100744164",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5848,
"license_type": "no_license",
"max_line_length": 161,
"num_lines": 149,
"path": "/kostki.py",
"repo_name": "happyberry/Projekt-obrazy",
"src_encoding": "UTF-8",
"text": "from time import sleep\r\nfrom skimage import io, draw, exposure, filters, color, measure, morphology, transform\r\nfrom matplotlib import pyplot as plt\r\nfrom scipy import ndimage as ndi\r\nimport numpy as np\r\nimport functools\r\n\r\n\r\ndef getDistance(x1, x2, y1, y2):\r\n return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5\r\n\r\n\r\ndef drawCircularContours(blackWhite, tolerance):\r\n contours = measure.find_contours(blackWhite, 0.5, \"high\")\r\n blackWhite[:, :] = 0\r\n for n, contour in enumerate(contours):\r\n numberOfPointsInContour = len(contour)\r\n if 50 < numberOfPointsInContour < 500:\r\n\r\n centroidx = np.sum(contour[:, 0]) / numberOfPointsInContour\r\n centroidy = np.sum(contour[:, 1]) / numberOfPointsInContour\r\n # print(centroidx, centroidy)\r\n\r\n distancesToCentroid = []\r\n sumOfDistances = 0\r\n\r\n for point in contour:\r\n distanceFromPointToCenter = getDistance(point[0], centroidx, point[1], centroidy)\r\n distancesToCentroid.append(distanceFromPointToCenter)\r\n sumOfDistances += distanceFromPointToCenter\r\n avgDistanceToCentroid = sumOfDistances / numberOfPointsInContour\r\n\r\n deviation = 0\r\n maximalSingleDeviation = 0\r\n\r\n for i in range(numberOfPointsInContour):\r\n if abs(avgDistanceToCentroid - distancesToCentroid[i]) > maximalSingleDeviation:\r\n maximalSingleDeviation = abs(avgDistanceToCentroid - distancesToCentroid[i])\r\n deviation += abs(avgDistanceToCentroid - distancesToCentroid[i])\r\n deviation /= numberOfPointsInContour\r\n # print(deviation)\r\n\r\n if deviation < tolerance and maximalSingleDeviation < 0.4 * avgDistanceToCentroid:\r\n rr, cc = draw.polygon(contour[:, 1], contour[:, 0])\r\n blackWhite[cc, rr] = 1\r\n # plt.plot(centroidy, centroidx, \"bo\", markersize=\"1\")\r\n else:\r\n rr, cc = draw.polygon(contour[:, 1], contour[:, 0])\r\n blackWhite[cc, rr] = 0\r\n\r\n\r\ndef countResult(image):\r\n contours = measure.find_contours(image, 0.5, \"high\")\r\n sumaoczek = 0\r\n srodkioczek = []\r\n for n, contour in enumerate(contours):\r\n numberOfPointsInContour = len(contour)\r\n if 50 < numberOfPointsInContour < 500:\r\n sumaoczek += 1\r\n plt.plot(contour[:, 1], contour[:, 0], linewidth=3)\r\n \"\"\"centroidx = np.sum(contour[:, 1]) / len(contour)\r\n centroidy = np.sum(contour[:, 0]) / len(contour)\r\n srodkioczek.append([centroidx, centroidy])\r\n #print(srodkioczek[-1])\r\n\r\n odlegloscimiedzyoczkami = []\r\n for i in range(len(srodkioczek)):\r\n for j in range(i + 1, len(srodkioczek)):\r\n odlegloscimiedzyoczkami.append(getDistance(srodkioczek[i][0], srodkioczek[j][0], srodkioczek[i][1], srodkioczek[j][1]))\r\n minimalnaodleglosc = 10000\r\n for odleglosc in odlegloscimiedzyoczkami:\r\n if odleglosc < minimalnaodleglosc:\r\n minimalnaodleglosc = odleglosc\r\n przydzieloneoczka = [False] * len(srodkioczek)\r\n kostki = []\r\n for i in range(len(srodkioczek)):\r\n if not przydzieloneoczka[i]:\r\n kostki.append(1)\r\n przydzieloneoczka[i] = True\r\n for j in range(i + 1, len(srodkioczek)):\r\n if not przydzieloneoczka[j] and getDistance(srodkioczek[i][0], srodkioczek[j][0], srodkioczek[i][1], srodkioczek[j][1]) < 3 * minimalnaodleglosc:\r\n kostki[-1] = kostki[-1] + 1\r\n przydzieloneoczka[j] = True\r\n if kostki[-1] == 6:\r\n break\"\"\"\r\n print(\"Suma oczek:\", sumaoczek)\r\n #kostki = sorted(kostki)\r\n #for i in range(len(kostki)):\r\n #kostki[i] = str(kostki[i])\r\n return sumaoczek\r\n #return sumaoczek, \", \".join(kostki)\r\n\r\n\r\ndef processImage(dice):\r\n\r\n if (len(dice)) > 1000:\r\n dice = transform.resize(dice, (1000, int(dice.shape[1] * 1000 / dice.shape[0])))\r\n # io.imshow(dice)\r\n # plt.show()\r\n\r\n greyscale = color.rgb2gray(dice)\r\n greyscale = filters.gaussian(greyscale, 1.25)\r\n greyscaleEdges = filters.sobel(greyscale)\r\n greyscaleEdges = exposure.rescale_intensity(greyscaleEdges)\r\n # io.imshow(greyscaleEdges)\r\n # plt.show()\r\n\r\n blackWhite = greyscaleEdges > 0.14\r\n # io.imshow(blackWhite)\r\n # plt.show()\r\n blackWhite = morphology.remove_small_objects(blackWhite, 50)\r\n drawCircularContours(blackWhite, 2.5)\r\n # io.imshow(blackWhite)\r\n # plt.show()\r\n blackWhite = ndi.binary_fill_holes(blackWhite)\r\n blackWhite = morphology.remove_small_objects(blackWhite, 150)\r\n #suma, countedResult = countResult(blackWhite)\r\n suma = countResult(blackWhite)\r\n fullResult = \"Suma oczek: \" + str(suma)\r\n #kosteczki = \"Kostki: \" + countedResult\r\n io.imshow(dice)\r\n plt.text(dice.shape[1] // 2, 0.1 * len(dice), fullResult, fontsize=16,\r\n bbox={'facecolor': 'white', 'alpha': 0.7}, ha='center')\r\n #plt.text(dice.shape[1] // 2, 0.9 * len(dice), kosteczki, fontsize=16,\r\n # bbox={'facecolor': 'white', 'alpha': 0.7}, ha='center')\r\n plt.axis('off')\r\n plt.show()\r\n # plt.savefig(\"k0\" + str(i) + \".jpg\", bbox_inches='tight')\r\n plt.clf()\r\n sleep(0.1)\r\n\r\n\r\ndef main():\r\n\r\n message = \"Wpisz nazwe pliku z obrazkiem. Plik musi znajdowac sie w folderze 'kostki'. Wpisz q, jesli chcesz wyjsc\\n\"\r\n while True:\r\n userAnswer = input(message)\r\n if userAnswer == 'q':\r\n break\r\n try:\r\n dice = io.imread(\"kostki/\" + userAnswer + \".jpg\")\r\n except FileNotFoundError:\r\n print(\"Bledna nazwa, sprobuj ponownie\")\r\n continue\r\n processImage(dice)\r\n\r\n\r\nif __name__ == '__main__':\r\n main()"
},
{
"alpha_fraction": 0.8235294222831726,
"alphanum_fraction": 0.8235294222831726,
"avg_line_length": 24.5,
"blob_id": "b9a39fe916ea2fb3ea6c00f9622492be581899bf",
"content_id": "c59c09d3cb57873187b981a09c70b0d3f02b18f4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 52,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 2,
"path": "/README.md",
"repo_name": "happyberry/Projekt-obrazy",
"src_encoding": "UTF-8",
"text": "# Projekt-obrazy\nProgram liczący oczka na kostkach\n"
}
] | 2 |
n1056722/Djangostock
|
https://github.com/n1056722/Djangostock
|
bd88001ee71259ebace2527d7851161118a3431d
|
e8b233bb65a53af1c21292e2b43bd7c304895312
|
2edaa506ae397bb65dc3b82b6c6c50e90ac246f9
|
refs/heads/main
| 2023-04-26T18:19:15.911698 | 2021-06-18T12:34:45 | 2021-06-18T12:34:45 | 366,678,004 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5093265771865845,
"alphanum_fraction": 0.5270313024520874,
"avg_line_length": 35.75581359863281,
"blob_id": "c3940c85c6aeff395ed8473ed355e9dd5f253dc3",
"content_id": "5edcacd22d5e380ffdd911d2533e284a1a16fa42",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3235,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 86,
"path": "/middleware/middleware.py",
"repo_name": "n1056722/Djangostock",
"src_encoding": "UTF-8",
"text": "import hmac\n\nfrom django.http import JsonResponse, HttpResponse\nfrom django.utils.deprecation import MiddlewareMixin\nfrom rest_framework import status\n\nfrom appuser.models import AppUser, AppUserLog\n\n\nclass TokenCheckMiddleware(MiddlewareMixin):\n \"\"\"\n 使用中間建判斷打來的token是哪個使用者\n \"\"\"\n\n def process_request(self, request):\n if request.path.startswith('/item/api/'):\n token = request.META.get('HTTP_X_STOCK_TOKEN')\n print(request.META)\n if token:\n try:\n app_user = AppUser.objects.filter(token=token, is_enable=True).first()\n request.app_user = app_user\n except Exception as e:\n data = {\n 'msg': 'token error',\n }\n print(str(e))\n return JsonResponse(status=status.HTTP_401_UNAUTHORIZED, data=data)\n else:\n data = {\n 'msg': 'token error',\n }\n return JsonResponse(status=status.HTTP_401_UNAUTHORIZED, data=data)\n\n\nclass SignatureCheckMiddleware(MiddlewareMixin):\n \"\"\"\n 確認傳過來的簽章是否有被竄改\n \"\"\"\n\n def process_request(self, request):\n if request.path.startswith('/item/api/'):\n request_signature = request.META.get('HTTP_X_STOCK_SIGNATURE')\n # print(request.META)\n # try:\n app_user = request.app_user\n secret_key = app_user.secret_key\n print(secret_key)\n method = request.method\n path = request.path\n print(method, path)\n get_dict = dict(request.GET) # {'uts': ['1622453552']} 取得uts裝進字典 1970\n sorted_get_dict = {k: v[0] for k, v in sorted(get_dict.items())} # {'uts': '1622454017'}\n sorted_get_dict_string = \"&\".join(f\"{k}={v}\" for k, v in sorted_get_dict.items()) # uts=1622454289\n params = sorted_get_dict_string\n payload = method + path + params\n new_signature = hmac.new(secret_key.encode(), payload.encode(), 'sha256').hexdigest()\n if new_signature != request_signature:\n print(new_signature)\n print(request_signature)\n data = {\n 'msg': 'signature'\n }\n return JsonResponse(status=status.HTTP_401_UNAUTHORIZED, data=data)\n # except Exception as e:\n # data = {\n # 'status': 404,\n # }\n # print(str(e))\n # return JsonResponse(data=data)\n\n\nclass VisitTimesMiddleware(MiddlewareMixin):\n def process_response(self, request, response):\n if request.path.startswith('/item/api/'):\n try:\n if 200 <= response.status_code <= 299:\n path = request.path\n app_user = request.app_user\n app_user_log = AppUserLog()\n app_user_log.path = path\n app_user_log.app_user = app_user\n app_user_log.save()\n except Exception as e:\n print(str(e))\n return response\n\n\n"
},
{
"alpha_fraction": 0.6328240633010864,
"alphanum_fraction": 0.6574667096138,
"avg_line_length": 29.74242401123047,
"blob_id": "573b33e2c254644f2236277f0d9715c96090e208",
"content_id": "0a9809fab530cd238cc5ee6e783156536ad673e6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2055,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 66,
"path": "/stock/locustfile.py",
"repo_name": "n1056722/Djangostock",
"src_encoding": "UTF-8",
"text": "import hmac\nimport json\nimport os\nfrom pathlib import Path\n\nimport environ\n\nfrom locust import HttpUser, TaskSet, task, between\nimport time\n\n# load env\nBASE_DIR = Path(__file__).resolve().parent.parent\nenv = environ.Env()\nenv.read_env(os.path.join(BASE_DIR, 'envs/.local'))\n\nuts = int(time.time())\napi_secret = env('API_SECRET')\n# host = 'http:127.0.0.1:8000'\napi_url = f'/item/api/list/?uts={uts}'\nx_stock_token = env('X_STOCK_TOKEN')\npayload = f'GET/item/api/list/uts={uts}'\nx_stock_signature = hmac.new(api_secret.encode(), payload.encode(), 'sha256').hexdigest()\n\n\ndef auth_header():\n return {'x-stock-token': x_stock_token, 'x-stock-signature': x_stock_signature}\n\n\n# 任務類\nclass WebsiteTasks(TaskSet):\n @task\n def on_start(self):\n self.tokenInfo = None\n headers = auth_header()\n response = self.client.request(method=\"GET\", url=api_url, headers=headers)\n if response.status_code == 200:\n self.tokenInfo = json.loads(response.content)\n\n\nclass WebsiteUser(HttpUser):\n tasks = [WebsiteTasks]\n # host = host\n wait_time = between(0.5, 10)\n\n# locust -f locustfile.py --host http://127.0.0.1:8000 --web-host 127.0.0.1 啟動蝗蟲指令\n\n\n# uts = int(time.time()) # 時間戳記\n# url = f'http://127.0.0.1:8000/item/api/list/?uts={uts}'\n# payload = f'GET/item/api/list/uts={uts}'\n# api_secret_key = '50fOE3yM3UCgfiBBZiJwZthentA'\n# x_stock_signature = hmac.new(api_secret_key.encode(), payload.encode(), 'sha256').hexdigest()\n# headers = {'x-stock-token': '5c6dd3ce9022fd807d50',\n# 'x-stock-signature': x_stock_signature}\n# print(x_stock_signature)\n# response = requests.get(url=url, headers=headers)\n\n# print('LOGIN RESULT:', response.status_code, response.content)\n\n# def dummy_test(self):\n# if self.tokenInfo is not None:\n# token = self.tokenInfo['token']\n# headers = auth_header()\n# headers['Authorization'] = 'Bearer' + token\n# response = self.client.request(method='GET', url=api_url, headers=headers)\n# print('API RESULT:', response.status_code, response.content)\n"
},
{
"alpha_fraction": 0.625,
"alphanum_fraction": 0.641566276550293,
"avg_line_length": 20.419355392456055,
"blob_id": "80089e5558e98de37ec1e0cfa95bad76e77ff537",
"content_id": "6869f1fe64dccc002aee00f409275f9cc4649ddb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 664,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 31,
"path": "/appuser/models.py",
"repo_name": "n1056722/Djangostock",
"src_encoding": "UTF-8",
"text": "from django.contrib.admin.views import main\nfrom django.db import models\nfrom django.contrib.auth.models import User\n\n\n# Create your models here.\nclass AppUser(models.Model):\n name = models.CharField(\n max_length=100\n )\n token = models.CharField(\n max_length=100\n )\n secret_key = models.CharField(\n max_length=100\n )\n is_enable = models.BooleanField(\n default=False\n )\n\n\nclass AppUserLog(models.Model):\n app_user = models.ForeignKey(\n AppUser, on_delete=models.PROTECT\n )\n create_at = models.DateTimeField(\n auto_now_add=True\n )\n path = models.CharField(\n max_length=20\n )\n"
},
{
"alpha_fraction": 0.8125,
"alphanum_fraction": 0.8125,
"avg_line_length": 20.33333396911621,
"blob_id": "eb7771ec23626bc2740fbd712efe28ce32a3d699",
"content_id": "66459da6048f6e4fa526e7a0213e4c647c5313e4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 128,
"license_type": "no_license",
"max_line_length": 34,
"num_lines": 6,
"path": "/appuser/admin.py",
"repo_name": "n1056722/Djangostock",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\n\n# Register your models here.\nfrom appuser.models import AppUser\n\nadmin.site.register(AppUser)\n"
},
{
"alpha_fraction": 0.6253870129585266,
"alphanum_fraction": 0.6651186943054199,
"avg_line_length": 28.363636016845703,
"blob_id": "49c93e75fe87ef13dfe7de93d7ed4071fea023aa",
"content_id": "8bc2c268e68dd0df88ce31ea8fb9ed829824b494",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2000,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 66,
"path": "/stock/sdk.py",
"repo_name": "n1056722/Djangostock",
"src_encoding": "UTF-8",
"text": "import hmac\nimport logging\nimport time\nimport os\nimport environ\nimport requests\nfrom pathlib import Path\nimport json\n\n# load env\nBASE_DIR = Path(__file__).resolve().parent.parent\nenv = environ.Env()\nenv.read_env(os.path.join(BASE_DIR, 'envs/.local'))\nfrom pip._vendor.pyparsing import Optional\nfrom rest_framework.response import Response\n\n# @staticmethod\n# def base_get(base_path:str='', detail_path: str='', params:Optional[dict]=None)-> Response:\n# logging.info(\"请求方式:GET, 请求url: %s , 请求参数: %s \" % (base_path + detail_path, params))\n# response = requests.get(base_path + detail_path, params=params)\n# logging.info(\"请求方式:GET, 请求url: %s , 请求参数: %s , 结果:%s\" % (base_path + detail_path, params, response))\n# return response\n\n\nuts = int(time.time()) # 時間戳記\nurl = f'http://127.0.0.1:8000/item/api/list/?uts={uts}'\npayload = f'GET/item/api/list/uts={uts}'\napi_secret_key = env('API_SECRET')\nx_stock_token = env('X_STOCK_TOKEN')\nx_stock_signature = hmac.new(api_secret_key.encode(), payload.encode(), 'sha256').hexdigest()\nheaders = {'x-stock-token': x_stock_token,\n 'x-stock-signature': x_stock_signature}\nprint(x_stock_signature)\nresponse = requests.get(url=url, headers=headers)\nprint(response.status_code)\nprint(response.json())\n\n# res = requests.get(\"https://nidss.cdc.gov.tw/nndss/DiseaseMap?id=19CoV\")\n#\n# print(res.text)\n\n\n# url = 'http://127.0.0.1:8000/item/api/7/detail'\n# headers = {\"token\": \"b528ca33d4681b39ed7d\"}\n# r = requests.get(url=url, headers=headers)\n# print(r.status_code)\n# print(r.json())\n# \"signature\": \"8427ecb4f757f58798fb7a6ab8c6fb97882d153d63246e4eb8839406492a3874\"\n\n# class Shiba:\n# pee_length = 10\n#\n# def __init__(self, height, weight):\n# self.height = height\n# self.weight = weight\n#\n# @classmethod\n# def pee(cls):\n# print(\"pee\" + \".\" * cls.pee_length)\n#\n#\n# Shiba.pee()\n# # result: pee..........\n#\n# black_shiba = Shiba(30, 40)\n# black_shiba.pee()\n"
},
{
"alpha_fraction": 0.5789081454277039,
"alphanum_fraction": 0.5882243514060974,
"avg_line_length": 29.84482765197754,
"blob_id": "a8fc5e741ebba5ed5bab6c279e05c1ba13d7f3b0",
"content_id": "831a207e86a2c7108bb2f61fa256a349d56271af",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5593,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 174,
"path": "/appuser/views.py",
"repo_name": "n1056722/Djangostock",
"src_encoding": "UTF-8",
"text": "from datetime import timedelta\nfrom django.db import connection\n\nfrom django.contrib.auth.decorators import login_required\nfrom django.db.models import Count\nfrom django.db.models.functions import ExtractHour, ExtractMinute\nfrom django.http import HttpResponseRedirect\nfrom django.shortcuts import render, redirect, get_object_or_404\nfrom django.utils import timezone\n# Create your views here.\nfrom django.urls import reverse\nimport secrets\n\nfrom django.utils.datetime_safe import datetime\n\nfrom appuser.models import AppUser, AppUserLog\n\n\n@login_required\ndef list(request):\n appusers = AppUser.objects.all()\n appuser_list = []\n for appuser in appusers:\n appuser_list.append(\n {\n 'id': appuser.id,\n 'name': appuser.name,\n 'token': appuser.token,\n 'secretkey': appuser.secret_key,\n 'is_enable': appuser.is_enable,\n }\n )\n data = {\n 'title': '股票管理後台',\n 'appuser_list': appuser_list,\n }\n return render(request, 'appuser/list.html', context=data)\n\n\n@login_required\ndef add(request):\n if request.method == 'GET':\n data = {\n 'title': '新增appuser'\n }\n return render(request, 'appuser/add.html', context=data)\n elif request.method == 'POST':\n name = request.POST.get('name', '')\n if name == '':\n return render(request, 'appuser/add.html')\n token = secrets.token_hex(10)\n secretkey = secrets.token_urlsafe(20)\n appuser = AppUser()\n appuser.name = name\n appuser.token = token\n appuser.secret_key = secretkey\n appuser.save()\n return redirect(reverse('appuser:list'))\n\n\n@login_required\ndef edit(request, pk):\n appuser = get_object_or_404(AppUser, id=pk)\n if request.method == 'GET':\n data = {\n 'title': '修改權限',\n 'pk': appuser.id,\n 'name': appuser.name,\n 'is_enable': appuser.is_enable,\n }\n return render(request, 'appuser/edit.html', context=data)\n elif request.method == 'POST':\n is_enable = request.POST.get('is_enable')\n appuser.is_enable = is_enable\n appuser.save(update_fields=['is_enable'])\n return redirect(reverse('appuser:list'))\n\n\ndef pie_chart(request): # 用圓餅圖顯示24小時內 誰打了Api打了幾次\n labels = []\n data = []\n now = timezone.now()\n start = now - timedelta(hours=23, minutes=59, seconds=59)\n qs = AppUserLog.objects.values('app_user_id').filter(create_at__gt=start).annotate(total=Count('path')).order_by(\n 'total')\n for app_user in qs:\n labels.append(app_user['app_user_id'])\n data.append(app_user['total'])\n log_data = {\n 'labels': labels,\n 'data': data,\n }\n return render(request, 'appuser/pie_chart.html', context=log_data)\n\n\ndef line_chart(request): # 用線圖顯示24小時內每小時api打的次數\n labels = []\n data = []\n now = timezone.now()\n start = now - timedelta(hours=23, minutes=59, seconds=59)\n hour_data = AppUserLog.objects.filter(create_at__gt=start)\n count_res = hour_data.annotate(hour=ExtractHour('create_at')).values('hour').order_by('hour').annotate(\n count=Count('path'))\n for i in count_res:\n labels.append(i['hour'])\n data.append(i['count'])\n \"\"\"\n 做出小時對應次數的mapping\n {\n 11: 1,\n 12: 6,\n 14: 4,\n }\n \"\"\"\n label_mapping_data = {}\n for i in range(len(labels)):\n label_mapping_data[labels[i]] = data[i]\n # 開始補資料`\n\n new_labels = []\n new_data = []\n for i in range(24):\n if i in label_mapping_data: # 如果該小時有資料則用資料\n new_labels.append(i)\n new_data.append(label_mapping_data[i])\n else: # 沒的話就補0\n new_labels.append(i)\n new_data.append(0)\n\n # 旋轉, 不然都是從0時開始, 可以根據現在幾點去做旋轉\n\n def rotate(l, n):\n return l[n:] + l[:n]\n\n now_hour = datetime.now().hour + 1\n new_labels = rotate(new_labels, now_hour)\n new_data = rotate(new_data, now_hour)\n\n labels_min = []\n data_min = []\n now_min = timezone.now()\n start_min = now_min - timedelta(minutes=59, seconds=59)\n min_data = AppUserLog.objects.filter(create_at__gt=start_min)\n count_min = min_data.annotate(minute=ExtractMinute('create_at')).values('minute').order_by('minute').annotate(\n count=Count('path'))\n for i in count_min:\n labels_min.append(i['minute'])\n data_min.append(i['count'])\n\n label_min_mapping_data = {}\n for i in range(len(labels_min)):\n label_min_mapping_data[labels_min[i]] = data_min[i]\n new_labels_min = []\n new_data_min = []\n for i in range(60):\n if i in label_min_mapping_data: # 如果該分鐘有資料則用資料\n new_labels_min.append(i)\n new_data_min.append(label_min_mapping_data[i])\n else: # 沒的話就補0\n new_labels_min.append(i)\n new_data_min.append(0)\n\n now_minute = datetime.now().minute + 1\n new_labels_min = rotate(new_labels_min, now_minute)\n new_data_min = rotate(new_data_min, now_minute)\n log_data = {\n 'labels': new_labels,\n 'data': new_data,\n 'labels_min': new_labels_min,\n 'data_min': new_data_min,\n }\n\n print(log_data)\n return render(request, 'appuser/line_chart.html', context=log_data, )\n"
},
{
"alpha_fraction": 0.8214285969734192,
"alphanum_fraction": 0.8214285969734192,
"avg_line_length": 13,
"blob_id": "fc60400da27332edbf78ac99ffa74ce34eb20b2e",
"content_id": "c8ae954ee94b60eca11c5935e9741dacfa671acf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 40,
"license_type": "no_license",
"max_line_length": 13,
"num_lines": 2,
"path": "/README.md",
"repo_name": "n1056722/Djangostock",
"src_encoding": "UTF-8",
"text": "# djangostock\ndjango 股票後台練習\n"
},
{
"alpha_fraction": 0.5930736064910889,
"alphanum_fraction": 0.6147186160087585,
"avg_line_length": 18.16666603088379,
"blob_id": "2b33a9bcc3d5a5060132a35760343b5828ba3e31",
"content_id": "5bdca5f4c4bb993b7f71c70eb4c95d2847a2d012",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 231,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 12,
"path": "/item/models.py",
"repo_name": "n1056722/Djangostock",
"src_encoding": "UTF-8",
"text": "from django.db import models\n\n\n# Create your models here.\nclass Item(models.Model):\n name = models.CharField(\n max_length=20,\n )\n value = models.DecimalField(\n max_digits=10,\n decimal_places=2,\n )\n\n"
},
{
"alpha_fraction": 0.6655629277229309,
"alphanum_fraction": 0.6655629277229309,
"avg_line_length": 26.454545974731445,
"blob_id": "9ea586e3895e5e76ce8cf6ca7975738b9076a400",
"content_id": "0b51ffe9fdc36e858b994938f138162d69517425",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 632,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 22,
"path": "/item/serializers.py",
"repo_name": "n1056722/Djangostock",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\n\nfrom item.models import Item\n\n\n# 如果序列化是數據庫的表盡量用ModelSerializer\nclass ItemListSerializer(serializers.ModelSerializer):\n class Meta:\n model = Item\n fields = ('id', 'name',)\n\n def to_representation(self, item):\n return {'items_id': item.id, 'items_name': item.name}\n\n\nclass ItemRetrieveSerializer(serializers.ModelSerializer):\n class Meta:\n model = Item\n fields = ('id', 'name', 'value',)\n\n def to_representation(self, item):\n return {'items_id': item.id, 'items_name': item.name, 'items_value': str(item.value)}\n"
},
{
"alpha_fraction": 0.6267995834350586,
"alphanum_fraction": 0.628792941570282,
"avg_line_length": 30.35416603088379,
"blob_id": "6ddf39082f0ec18f7a2b3eae2dda7d271fd222c8",
"content_id": "a00da599c768461a345d884ba994acf313cedcf2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4709,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 144,
"path": "/item/views.py",
"repo_name": "n1056722/Djangostock",
"src_encoding": "UTF-8",
"text": "from datetime import timedelta\nimport re\n\nfrom django.contrib import messages\nfrom django.contrib.auth.decorators import login_required\nfrom django.http import JsonResponse\nfrom django.shortcuts import render, redirect, get_object_or_404\nfrom django.urls import reverse\n\nfrom rest_framework import generics, authentication, exceptions\nfrom rest_framework import permissions\nfrom rest_framework.decorators import api_view\nfrom rest_framework.filters import OrderingFilter, SearchFilter\nfrom rest_framework.pagination import LimitOffsetPagination\nfrom rest_framework.response import Response\n\nfrom appuser.models import AppUser, AppUserLog\nfrom item.models import Item\nfrom item.serializers import ItemListSerializer, ItemRetrieveSerializer\n\n\n# class ItemAuthentication(authentication.BaseAuthentication):\n# def authenticate(self, request):\n# token = request.META.get('HTTP_TOKEN')\n# if not token:\n# data = {\n# 'status': 403,\n# 'msg': '認證未通過',\n# }\n# return JsonResponse(data=data)\n# try:\n# token = AppUser.objects.get(token=token)\n# except token.DoesNotExist:\n# raise exceptions.AuthenticationFailed('No such user')\n\n\n# api股票列表\n\nclass ItemListView(generics.ListAPIView):\n queryset = Item.objects.all()\n serializer_class = ItemListSerializer\n\n def list(self, request, *args, **kwargs):\n queryset = self.filter_queryset(self.get_queryset())\n\n page = self.paginate_queryset(queryset)\n if page is not None:\n serializer = self.get_serializer(page, many=True)\n return self.get_paginated_response(serializer.data)\n\n serializer = self.get_serializer(queryset, many=True) #\n return Response(data={'items': serializer.data})\n # authentication_classes = [ItemAuthentication, ]\n # permission_classes = (permissions.IsAuthenticated,) # 權限\n # filter_backends = (OrderingFilter, SearchFilter) # 排序 搜尋欄功能\n # ordering_fields = ('id', 'name') # 可依照id 名字做排序\n # search_fields = ('name',) # 可依照名字欄位做關鍵字搜尋\n # ordering = ('id',) # 排序\n # pagination_class = LimitOffsetPagination # 前端一頁要顯示幾個項目+分頁\n\n\n# api股票詳情\n\nclass ItemRetrieveView(generics.RetrieveAPIView):\n queryset = Item.objects.all()\n serializer_class = ItemRetrieveSerializer\n # permission_classes = (permissions.IsAuthenticated,)\n # authentication_classes = [ItemAuthentication, ]\n\n\n# 後台股票列表\n# @login_required\ndef list(request):\n items = Item.objects.all()\n item_list = []\n for item in items:\n item_list.append(\n {\n 'id': item.id,\n 'name': item.name,\n 'value': item.value,\n }\n )\n data = {\n 'title': '股票管理後台',\n 'item_list': item_list,\n }\n return render(request, 'item/list.html', context=data)\n\n\n@login_required # 新增股票\ndef add(request):\n if request.method == 'GET':\n data = {\n 'title': '新增股票'\n }\n return render(request, 'item/add.html', context=data)\n elif request.method == 'POST':\n name = request.POST.get('name', '')\n value = request.POST.get('value', '')\n test_str = re.search(r'W', name or value)\n if name or value == '':\n # messages.add_message(request, messages.INFO, '不能為空')\n return render(request, 'item/add.html')\n elif test_str:\n return render(request, 'item/add.html')\n items = Item()\n items.name = name\n items.value = value\n items.save()\n return redirect(reverse('item:list'))\n\n\n@login_required # 股票編輯\ndef edit(request, pk):\n item = get_object_or_404(Item, id=pk)\n if request.method == 'GET':\n data = {\n 'title': '修改',\n 'pk': item.id,\n 'name': item.name,\n 'value': item.value,\n }\n return render(request, 'item/edit.html', context=data)\n elif request.method == 'POST':\n name = request.POST.get('name')\n value = request.POST.get('value')\n item.name = name\n item.value = value\n item.save(update_fields=['name'])\n item.save(update_fields=['value'])\n return redirect(reverse('item:list'))\n\n\n@login_required # 刪除\ndef delete(request, pk):\n Item.objects.get(pk=pk).delete()\n messages.add_message(request, messages.INFO, '已刪除股票')\n return redirect(reverse('item:list'))\n\n\n@login_required # 首頁\ndef home(request):\n return render(request, 'main/home.html')\n"
},
{
"alpha_fraction": 0.6081081032752991,
"alphanum_fraction": 0.6325364112854004,
"avg_line_length": 17.33333396911621,
"blob_id": "e035d58b4522864f695c4ba59e922647770ca627",
"content_id": "23b167aafc9b9945426aecc01f92bc5a61775472",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "YAML",
"length_bytes": 2040,
"license_type": "no_license",
"max_line_length": 226,
"num_lines": 105,
"path": "/docker-compose.yml",
"repo_name": "n1056722/Djangostock",
"src_encoding": "UTF-8",
"text": "version: \"3\"\n\nservices:\n db:\n image: postgres:11\n environment:\n - POSTGRES_DB=${POSTGRES_DB}\n - POSTGRES_USER=${POSTGRES_USER}\n - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}\n ports:\n - 5432:5432\n volumes:\n - stockdata:/var/lib/postgresql/data\n\n\n# redis:\n# image: redis:5\n# restart: always\n# volumes:\n# - ./redis/data:/data\n# ports:\n# - 6379:6379\n\n# nginx:\n# image: nginx:latest\n# ports:\n# - 80:80\n# volumes:\n# - ./nginx:/etc/nginx/conf.d\n# depends_on:\n# - web\n#\n# web:\n# build: ../../.\n# image: mydjangotemplate\n# command: bash -c \"python manage.py collectstatic --no-input && python manage.py migrate && gunicorn --workers=2 --timeout 100 core.wsgi -b 0.0.0.0:8000 --reload --error-logfile '/mydjangotemplate/logs/gunicorn-error.log'\"\n# volumes:\n# - django-static:/mydjangotemplate/staticfiles\n# ports:\n# - 8000:8000\n# depends_on:\n# - redis\n#\n# celery-worker:\n# image: mydjangotemplate\n# command: celery -A core worker --concurrency=4 -l info -n worker1@%h\n# depends_on:\n# - web\n# - redis\n#\n# celery-beat:\n# image: mydjangotemplate\n# command: celery -A core beat -l info\n# depends_on:\n# - web\n# - redis\n\nvolumes:\n stockdata:\n\n#列出Container\n#docker ps\n#docker ps -f \"name=log\"\n#\n#列出Container狀態\n#docker stats\n#\n#列出Network\n#docker network ls\n#\n#列出Volume\n#docker volume ls\n#\n#Build\n#docker build . -f DockerfileBase -t samplebase # 指定檔案, 並給予tag名稱\n#docker-compose build\n#\n#Up\n#docker-compose up -d\n#\n#Down\n#docker-compose down\n#\n#Down且移除volume\n#docker-compose down -v\n#\n#刪除所有none的image\n#docker rmi $(docker images -f \"dangling=true\" -q)\n#\n#看容器狀態\n#docker stats\n#\n#進入container\n#docker exec -exit xxx /bin/bash\n#\n#建立網路\n#docker network create xxx\n#\n#移除所有container\n#d rm $(d container ls -a -q)\n#\n#顯示各個container最後n筆log\n#docker-compose logs --tail=\"2\"\n#\n## 將容器的狀態輸出到stats.txt"
},
{
"alpha_fraction": 0.688622772693634,
"alphanum_fraction": 0.7844311594963074,
"avg_line_length": 26.83333396911621,
"blob_id": "338b8c6e7d4956c33c79555b0efd8b64cb050235",
"content_id": "79b20767d6493e3051ae0be4e82b832c565d46ec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 167,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 6,
"path": "/.env.example",
"repo_name": "n1056722/Djangostock",
"src_encoding": "UTF-8",
"text": "DEBUG=False\nSECRET_KEY=VkYp2s5v8y/B?E(H+MbQeThWmZq4t6w9z$C&F)J@NcRfUjXn2r5u8x!A%D*G-KaP\nDATABASE_URL_DEFAULT=psql://server:[email protected]:5432/default\nALLOWED_HOSTS=127.0.0.1,localhost\nAPI_SECRET_KEY=\nX_STOCK_TOKEN=\n"
},
{
"alpha_fraction": 0.49740931391716003,
"alphanum_fraction": 0.5854922533035278,
"avg_line_length": 20.44444465637207,
"blob_id": "34abc48f8125ff0383c990dad26c8f959b47f02a",
"content_id": "46ac5647ed5e81d524672c2f6ece3415453d9a0a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 386,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 18,
"path": "/appuser/migrations/0004_auto_20210611_1810.py",
"repo_name": "n1056722/Djangostock",
"src_encoding": "UTF-8",
"text": "# Generated by Django 2.2.9 on 2021-06-11 10:10\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('appuser', '0003_auto_20210611_1700'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='appuser',\n name='name',\n field=models.CharField(max_length=100),\n ),\n ]\n"
}
] | 13 |
sawant98d/hellodjango
|
https://github.com/sawant98d/hellodjango
|
2a03775aacdb08c2a69ba54a42a9eeee6f88d7ee
|
e91b45680dca6a195e1197b3e02866a9a6939112
|
8bc7b3aab5fc5449b6753288806f63f0c1f1c9c2
|
refs/heads/master
| 2023-04-25T00:44:07.853146 | 2021-05-23T13:12:15 | 2021-05-23T13:12:15 | 367,614,544 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5906822085380554,
"alphanum_fraction": 0.5973377823829651,
"avg_line_length": 25.173913955688477,
"blob_id": "d1ec3924cc21edab5d985fa1af542f90efbfc7d1",
"content_id": "c7ce28aa720be90f41897e4205cbc7e23937f1e3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 601,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 23,
"path": "/exam/views.py",
"repo_name": "sawant98d/hellodjango",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\nfrom django.http import HttpResponse\n\n# Create your views here.\n\ndef showResult(request):\n s = \"<h1>This is show RESULT page</h1>\"\n return HttpResponse(s)\n\ndef showTest(request):\n #s = \"<h1>This is show TEST page</h1>\"\n #res = render(request, 'exam/test.htm')\n que = \"Where is Mumbai?\"\n a = \"Andra Pradesh\"\n b = \"Gujrat\"\n c = \"Maharashtra\"\n d = \"Rajastan\"\n e = \"KARNATAKA\"\n level = \"EASY\"\n data = {'que':que, 'a':a, 'b':b, 'c':c, 'd':d, \n 'e':e, 'level':level}\n res = render(request, 'exam/test.htm', context=data)\n return res"
},
{
"alpha_fraction": 0.6520737409591675,
"alphanum_fraction": 0.6520737409591675,
"avg_line_length": 28,
"blob_id": "1fad467ff67b1e9b8f66de9a55105c98105930c4",
"content_id": "730a169c1f11d10c4aa5f604965ea15727d0f42b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 434,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 15,
"path": "/BRMApp/urls.py",
"repo_name": "sawant98d/hellodjango",
"src_encoding": "UTF-8",
"text": "from BRMApp import views\nfrom django.conf.urls import url\n\nurlpatterns = [\n url('view-books', views.viewBooks),\n url('edit-book', views.editBook),\n url('delete-book', views.deleteBook),\n url('search-book', views.searchBook),\n url('new-book', views.newBook),\n url('add', views.add),\n url('search', views.search),\n url('edit',views.edit),\n url('login', views.userLogin),\n url('logout', views.userLogout),\n]"
},
{
"alpha_fraction": 0.6363636255264282,
"alphanum_fraction": 0.6608391404151917,
"avg_line_length": 28.620689392089844,
"blob_id": "a48e6d640dd500104548a6f8db76d7a1ca89545e",
"content_id": "54f239f80e2dcaccd00fa45255dd3ee52302778b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 858,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 29,
"path": "/testapp/views.py",
"repo_name": "sawant98d/hellodjango",
"src_encoding": "UTF-8",
"text": "from testapp.models import Employee\nfrom django.shortcuts import render\nfrom django.http import HttpResponse\n# Create your views here.\n\ndef greeting(request):\n s = \"<h1>Hello and welcome to the first view of testapp</h1>\"\n return HttpResponse(s)\n\ndef showContact(request):\n s = \"<h1>Contact Page</h1>\"\n s += \"<p>website: abc.com</p>\"\n s += \"<p>Contact: 987654321</p>\"\n s += \"<p>Email: [email protected]</p>\"\n return HttpResponse(s)\n\ndef about(request):\n #s = \"<h1>This is an about page</h1>\"\n #l = [10,20,30]\n data = {'msg':'this is message from views.py lol for quote filter test purpose'}\n res = render(request, 'testapp/about.htm', data)\n return res\n\n\ndef employee_info_view(request):\n employees = Employee.objects.all()\n data = {'employees':employees}\n res = render(request, 'testapp/employees.htm',data)\n return res"
}
] | 3 |
ddrmaster1000/Arduino
|
https://github.com/ddrmaster1000/Arduino
|
98ac5b5f12d3b7923f09a802fcdcbbb456282b5e
|
91d9c32e1bf799a04a03f7fed2262c1f904ce71e
|
44e220a37f459c6c71e7fdf266bed5da424c3ae5
|
refs/heads/master
| 2021-05-05T10:34:40.805145 | 2019-10-20T16:15:40 | 2019-10-20T16:15:40 | 118,038,455 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5649122595787048,
"alphanum_fraction": 0.5929824709892273,
"avg_line_length": 21.10344886779785,
"blob_id": "86a25847b0aaadddcc3c7ff7530464512bfdadb5",
"content_id": "05a18e1fb476c1e4e3dd487fa8f9ee93da141041",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2565,
"license_type": "permissive",
"max_line_length": 108,
"num_lines": 116,
"path": "/Micron_Tim_Steve_Desk_LEDS_1_2/Micron_Tim_Steve_Desk_LEDS_1_2.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include <SPI.h>\n#include \"RF24.h\"\n\n\n\nRF24 radio(7,8);\nconst uint64_t pipe = 0xE8E8F0F0E1LL; //channel to receive\nbyte addresses[][6] = {\"1Node\",\"2Node\"};\n\ntypedef struct {\n int ID;\n byte Butt;\n}MsgData;\n\n\n//UNcomment if your name is Steve\nMsgData MY_LED_DATA = {420, 0};\nMsgData HIS_LED_DATA = {69, 0};\nint My_Name = 420;\nint His_Name = 69;\n\n//UNcomment if your name is Tim\n//MsgData MY_LED_DATA = {69, 0};\n//MsgData HIS_LED_DATA = {420, 0};\n//int My_Name = 69;\n//int His_Name = 420;\n\n\n\nint inPin = 3; // choose the pin for the button\nint ledPin = 6;\nint val=0;\nbool tOff = 0;\nbool rOff = 0;\n\nvoid setup(void){\n Serial.begin(115200);\n radio.begin();\n radio.setAutoAck(false);\n radio.openReadingPipe(1,pipe);\n radio.startListening();\n pinMode(ledPin, OUTPUT); // declare LED as output\n pinMode(inPin, INPUT); // declare pushbutton as input\n}\n\n\n\nvoid loop(void){\n\n val = digitalRead(inPin);\n receive();\n //Serial.println();\n\n//Recieved Data, Turn on LED\n while(HIS_LED_DATA.ID == His_Name && HIS_LED_DATA.Butt == 1){ //Turn on if is an expected signal\n rOff = 1;\n Serial.println(\"Received one!\");\n digitalWrite(ledPin, HIGH);\n receive();\n }\n \n //Turn off LED after a long receive\n if(rOff == 1){\n digitalWrite(ledPin, LOW);\n }\n\n\n//If Button Press, \"ON\" transmit\n while(val == HIGH){\n tOff = 1;\n MY_LED_DATA.Butt = 1;\n transmit(MY_LED_DATA);\n val = digitalRead(inPin);\n \n }\n//after an \"ON\" Transmit, executre \"OFF\" Transmit\n if(tOff == 1){\n tOff = 0;\n MY_LED_DATA.Butt = 0;\n transmit(MY_LED_DATA);\n } \n}\n\n\n\n\n \nvoid receive(){ //Recieve Data from another node\n radio.openWritingPipe(addresses[1]);\n radio.openReadingPipe(1,addresses[0]);\n radio.startListening(); \n if(radio.available()){\n while(radio.available()){ \n radio.read(&HIS_LED_DATA, sizeof(MsgData)); //byte value\n delay(5);\n }\n }\n return;\n}\n\nvoid transmit(MsgData Transmit_Msg){ //Transmit Data to Another Node\n radio.openWritingPipe(addresses[0]);\n radio.openReadingPipe(1,addresses[1]);\n radio.stopListening();\n //unsigned long msg = value;\n for(byte i=0; i<15; i++){ \n radio.write(&Transmit_Msg, sizeof(MsgData));\n delay(5);\n }\n Serial.println(\"Transmitted Data\");\n Serial.print(\"ID: \");\n Serial.println(Transmit_Msg.ID);\n Serial.print(\"Button: \");\n Serial.println(Transmit_Msg.Butt);\n Serial.println();\n}\n\n"
},
{
"alpha_fraction": 0.6155462265014648,
"alphanum_fraction": 0.6239495873451233,
"avg_line_length": 15.413793563842773,
"blob_id": "d636fbb95ab2126934fa77d9dfe9f5ede19ce79d",
"content_id": "c0411dfccda9590035e31260ad4ebe5f72b2fdc6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 476,
"license_type": "permissive",
"max_line_length": 46,
"num_lines": 29,
"path": "/MIDI-Arduino-2-master/PIR.cpp",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "/*\n * Created by Keegan Dahm on 4/8/15.\n */\n\n#include \"PIR.h\"\n\nPIR::PIR(unsigned int pin) {\n this->pin = pin;\n\n pinMode(pin, INPUT);\n}\n\nbool PIR::update() {\n last_check_result = check_result;\n check_result = digitalRead(pin);\n return check_result;\n}\n\nbool PIR::check() {\n return check_result;\n}\n\nbool PIR::check_positive_edge() {\n return check_result && !last_check_result;\n}\n\nbool PIR::check_negative_edge() {\n return !check_result && last_check_result;\n}\n"
},
{
"alpha_fraction": 0.6155507564544678,
"alphanum_fraction": 0.6436285376548767,
"avg_line_length": 14.965517044067383,
"blob_id": "9e20fcfaf8d464fe825996a07f549d0a3efc7845",
"content_id": "7e31fba3d94dbe21facb80e7a2241da6edc04fe2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 463,
"license_type": "permissive",
"max_line_length": 44,
"num_lines": 29,
"path": "/MIDI-Arduino-2-master/SerialStream.h",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "/*\n * Created by Keegan Dahm on 4/9/15.\n */\n\n#ifndef MIDI_ARDUINO_2_SERIALSTREAM_H\n#define MIDI_ARDUINO_2_SERIALSTREAM_H\n\n\n#include \"Queue.h\"\n#include <Arduino.h>\n#include <stdint.h>\n\nclass SerialStream {\nprivate:\n Queue<uint8_t> *q;\n\npublic:\n SerialStream(unsigned int length = 128);\n ~SerialStream();\n\n bool buffer();\n unsigned int available();\n uint8_t read();\n uint8_t peek();\n void flush();\n};\n\n\n#endif /* MIDI_ARDUINO_2_SERIALSTREAM_H */\n"
},
{
"alpha_fraction": 0.45449066162109375,
"alphanum_fraction": 0.4653405547142029,
"avg_line_length": 16.103092193603516,
"blob_id": "a0ad8f8a7d318b1b6575d484e044002abee03934",
"content_id": "11622d9e70de3904f0f36839faad565aa4352e64",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1659,
"license_type": "permissive",
"max_line_length": 54,
"num_lines": 97,
"path": "/MIDI-Arduino-2-master/Queue.h",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "/*\n * Created by Keegan Dahm on 4/9/15.\n */\n\n#ifndef MIDI_ARDUINO_2_QUEUE_H\n#define MIDI_ARDUINO_2_QUEUE_H\n\n\n#include <stdlib.h>\n\ntemplate <typename T> class Queue {\nprivate:\n T *data;\n unsigned int length;\n unsigned int front, back;\n\npublic:\n Queue(unsigned int length) {\n length;\n data = (T *) malloc((length + 1) * sizeof(T));\n\n this->length = length;\n front = 0;\n back = 0;\n }\n\n ~Queue() {\n free((void *) data);\n }\n\n bool enqueue(T data) {\n if (full())\n return false;\n\n this->data[back++] = data;\n\n if (back > length)\n back = 0;\n\n return true;\n }\n\n T peek() {\n if (empty())\n return 0xFF;\n\n return this->data[front];\n }\n\n T dequeue() {\n if (empty())\n return 0xFF;\n\n T data = this->data[front++];\n if (front > length)\n front = 0;\n\n return data;\n }\n\n void clear() {\n front = back;\n }\n\n bool full() {\n if (front > back)\n return front == back + 1;\n else if (front < back)\n return front == 0 && back == length;\n else\n return false;\n }\n\n bool empty() {\n return front == back;\n }\n\n unsigned int space_occupied() {\n if (front > back)\n return length - (front - back) + 1;\n else if (front == back)\n return 0;\n else\n return back - front;\n }\n\n unsigned int space_free() {\n return length - space_occupied();\n }\n\n unsigned int space() {\n return length;\n }\n};\n\n\n#endif /* MIDI_ARDUINO_2_QUEUE_H */\n"
},
{
"alpha_fraction": 0.4337517321109772,
"alphanum_fraction": 0.482566237449646,
"avg_line_length": 15.382857322692871,
"blob_id": "dc3489b8c5c6a2678fefc925ad05154c1f8eea3c",
"content_id": "a5fd1bbaf88b3d8dd73c4a4c82966c56490185ea",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2868,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 175,
"path": "/Myo_Armband/Myo_Armband.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include \"LPD8806.h\"\n\nint nLEDs = 48;\n\n// Chose 2 pins for output; can be any valid output pins:\nint dataPin = 6;\nint clockPin = 5;\nLPD8806 strip = LPD8806(nLEDs, dataPin, clockPin);\nint val;\nint arr[3];\nint x, y, state;\nint serialln = 0;\nuint32_t color[3] = {0, 0, 0};\nbyte r, g, b = 10;\nint xarr[20] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};\n\nvoid setup() {\n // put your setup code here, to run once:\n Serial.begin(9600);\n strip.begin();\n strip.show();\n}\n\nvoid loop() {\n // put your main code here, to run repeatedly:\n if (Serial.available())\n { // If data is available to read,\n // // read it and store it in val\n\n\n readIncomingBytes();\n if (state == 1 ) {\n twistBright();\n }\n else if ( state == 2) {\n colorChange();\n }\n else if (state == 3) {\n therapyy();\n }\n\n }\n}\n\n\nvoid readIncomingBytes() {\n while (Serial.available() > 0)\n {\n int incomingByte = Serial.read();\n if (incomingByte == (int)'<')\n {\n serialln = 0;\n\n }\n else if (incomingByte == (int)'>')\n {\n x = arr[0];\n y = arr[1];\n state = arr[2];\n return;\n }\n else\n {\n arr[serialln] = incomingByte;\n serialln++;\n }\n }\n}\n\n\nvoid twistBright() {\n x = x / 5;\n\n float floaty = (y / 5.0);\n if (x > nLEDs) {\n x = nLEDs;\n }\n for ( int i = 0; i < x; i++) {\n strip.setPixelColor(i, strip.Color(floaty * r, (floaty)*g, (floaty)*b)); // Set new pixel 'on'\n }\n for (int j = x; j < nLEDs; j++) {\n strip.setPixelColor(j, strip.Color(0, 0, 0)); // Set new pixel 'on'\n }\n strip.show();\n\n}\n\nvoid colorChange() {\n for ( int i = 0; i < nLEDs; i++) {\n strip.setPixelColor(i, Wheel(x)); // Set new pixel 'on'\n }\n strip.show();\n\n\n}\n\n\nuint32_t Wheel(uint16_t WheelPos)\n{\n\n if (WheelPos > 0 && WheelPos < (190 / 3))\n {\n r = 0;\n g = 0;\n b = 10;\n }\n else if (WheelPos < (2 * 190 / 3))\n {\n r = 0;\n g = 10;\n b = 0;\n }\n else\n {\n r = 10;\n g = 0;\n b = 0;\n }\n return (strip.Color(r*5, g*5, b*5));\n}\n\nvoid therapyx()\n{\n if (x > 38 && x < 180)\n {\n r = 0;\n g = 10;\n b = 0;\n }\n else\n {\n r = 10;\n g = 0;\n b = 0;\n }\n x = x / 5;\n if (x > nLEDs) {\n x = nLEDs;\n }\n for ( int i = 0; i < x; i++) {\n strip.setPixelColor(i, strip.Color(5*r, 5*g, 5*b)); // Set new pixel 'on'\n }\n for (int j = x; j < nLEDs; j++) {\n strip.setPixelColor(j, strip.Color(0, 0, 0)); // Set new pixel 'on'\n }\n strip.show();\n}\n\n\nvoid therapyy()\n{\n if (y > 12 && y < 40)\n {\n r = 0;\n g = 10;\n b = 0;\n }\n else\n {\n r = 10;\n g = 0;\n b = 0;\n }\n\n if (y < 0) {\n y = 0;\n }\n for ( int i = 0; i < y; i++) {\n strip.setPixelColor(i, strip.Color(5*r, 5*g, 5*b)); // Set new pixel 'on'\n }\n for (int j = y; j < nLEDs; j++) {\n strip.setPixelColor(j, strip.Color(0, 0, 0)); // Set new pixel 'on'\n }\n strip.show();\n}\n\n"
},
{
"alpha_fraction": 0.563119649887085,
"alphanum_fraction": 0.5862524509429932,
"avg_line_length": 20.295774459838867,
"blob_id": "940584eddc9664b0683ca735be42567b8aa717a9",
"content_id": "3e8728a7b47402ed0fc327e0af360216fb3bb4b5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1513,
"license_type": "permissive",
"max_line_length": 108,
"num_lines": 71,
"path": "/Micron_Tim_Steve_Desk_Middle_Man/Micron_Tim_Steve_Desk_Middle_Man.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include <SPI.h>\n#include \"RF24.h\"\n\n\n\nRF24 radio(7,8);\nconst uint64_t pipe = 0xE8E8F0F0E1LL; //channel to receive\nbyte addresses[][6] = {\"1Node\",\"2Node\"};\n\ntypedef struct {\n int ID;\n byte Butt;\n}MsgData;\n\nMsgData DataIn;\nMsgData DataOut;\n\nvoid setup(void){\n Serial.begin(115200);\n radio.begin();\n radio.setAutoAck(false);\n radio.openReadingPipe(1,pipe);\n radio.startListening();\n}\n\n\n\nvoid loop(void){\n \nreceive();\n\nif((DataIn.Butt != DataOut.Butt) || (DataIn.ID != DataOut.ID)){\n transmit(DataIn);\n DataOut = DataIn;\n}\n\n}\n\n\n\n\n \nvoid receive(){ //Recieve Data from another node\n radio.openWritingPipe(addresses[1]);\n radio.openReadingPipe(1,addresses[0]);\n radio.startListening(); \n if(radio.available()){\n while(radio.available()){ \n radio.read(&DataIn, sizeof(MsgData)); //byte value\n delay(5);\n }\n }\n return;\n}\n\nvoid transmit(MsgData Transmit_Msg){ //Transmit Data to Another Node\n radio.openWritingPipe(addresses[0]);\n radio.openReadingPipe(1,addresses[1]);\n radio.stopListening();\n //unsigned long msg = value;\n for(byte i=0; i<15; i++){ \n radio.write(&Transmit_Msg, sizeof(MsgData));\n delay(5);\n }\n Serial.println(\"Transmitted Data\");\n Serial.print(\"ID: \");\n Serial.println(Transmit_Msg.ID);\n Serial.print(\"Button: \");\n Serial.println(Transmit_Msg.Butt);\n Serial.println();\n}\n\n"
},
{
"alpha_fraction": 0.533577561378479,
"alphanum_fraction": 0.5445665717124939,
"avg_line_length": 16.0625,
"blob_id": "8052d7e3664ddc3ce9a8114d4cc1c9d9dfbad64d",
"content_id": "6597039aa1d65444c59067a7358a26c31373d849",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 819,
"license_type": "permissive",
"max_line_length": 71,
"num_lines": 48,
"path": "/MIDI-Arduino-2-master/AutoArray.h",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "/*\n * Created by Keegan Dahm on 4/9/15.\n */\n\n#ifndef MIDI_ARDUINO_2_AUTOARRAY_H\n#define MIDI_ARDUINO_2_AUTOARRAY_H\n\n\n#include <stdlib.h>\n\ntemplate <typename T> class AutoArray {\nprivate:\n T *data;\n unsigned int allocated;\n\npublic:\n AutoArray() {\n allocated = 0;\n data = (T *) malloc(allocated * sizeof(T));\n }\n\n ~AutoArray() {\n free((void *) data);\n }\n\n bool add(T addition) {\n T *new_data = (T *) realloc(data, (allocated + 1) * sizeof(T));\n\n if (!new_data)\n return false;\n\n new_data[allocated++] = addition;\n data = new_data;\n\n return true;\n }\n\n inline unsigned int length() {\n return allocated;\n }\n\n inline T &operator[](unsigned int n) {\n return data[n];\n }\n};\n\n\n#endif /* MIDI_ARDUINO_2_AUTOARRAY_H */\n"
},
{
"alpha_fraction": 0.6934097409248352,
"alphanum_fraction": 0.722779393196106,
"avg_line_length": 36.72972869873047,
"blob_id": "67f3e5a0acbcfb80669c1542a9ce9c5bd39c6073",
"content_id": "0759c446c88eb618d7ca8ad9761057b652a7b98a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1396,
"license_type": "permissive",
"max_line_length": 123,
"num_lines": 37,
"path": "/BandTransmitter2016Final/BandTransmitter2016Final.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include <SPI.h>\n#include <nRF24L01.h>\n#include <RF24.h>\n\nRF24 radio(8, 9);\n\nconst byte rxAddr[6] = \"00001\"; //array size being sent\nchar text[6]; //'text' array is name of array being sent. Must match rxAddr\nString stringOne = \"\"; //define a blank string for later use\nbyte index = 0; //variable of size byte that will be used \nvolatile int counter = 129; //multiple of numpattern - 1 to initilize to blank\nconst int numPattern = 13; //total number of patterns you have to be sent\n\nvoid setup()\n{\n Serial.begin(9600); \n attachInterrupt(1, changePattern, RISING); //interrupt. xx(interuppt pin, function being called, interuppt on rising edge\n radio.begin(); //start NRF radio\n //radio.setRetries(15, 15); //if no data sent/recieved, retry 15 times before stopping\n radio.openWritingPipe(rxAddr); //start writing to byte rxAddr\n \n radio.stopListening(); //stop NRF from listening for anything\n}\n\nvoid changePattern(){ //change pattern function\n counter++; //increase pattern by 1\n counter %= numPattern; \n}\n\nvoid loop()\n{\n stringOne = counter; //set string to counter\n stringOne.toCharArray(text,6); //set string one into 'text' array of size 6\n Serial.println(text); //print out on serial what is being sent through NRF\n radio.write(&text, sizeof(text)); //write to be sent. text array pointer, send actual text array \n delay(125); //need some delay for NRF to actually transmit\n}\n"
},
{
"alpha_fraction": 0.5736607313156128,
"alphanum_fraction": 0.6116071343421936,
"avg_line_length": 27.90322494506836,
"blob_id": "84d94d3eac286b66725e6ebed9dfbea847942b1a",
"content_id": "58a10493f34cd4c4e9493a4a99932453757d60c9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 896,
"license_type": "permissive",
"max_line_length": 67,
"num_lines": 31,
"path": "/RFID_test_1.0/test/test.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "int ledPin4 = 4; // LED connected to digital pin 13\nint ledPin5 = 5;\nvoid setup()\n{\n Serial.begin(9600);\n pinMode(ledPin4, OUTPUT);\n pinMode(ledPin5, OUTPUT);// sets the digital pin as output\n}\n\nvoid loop()\n{\n digitalWrite(ledPin4, HIGH); //Push Out\n digitalWrite(ledPin5, HIGH);// sets the LED on\nSerial.println(\"A\");\n delay(100); // waits for a second\n digitalWrite(ledPin4, LOW); //Holds\n digitalWrite(ledPin5, HIGH);// sets the LED on\nSerial.println(\"B\");\n\n\n \n delay(2500); // waits for a second\n digitalWrite(ledPin4, LOW); //Pulls in\n digitalWrite(ledPin5, LOW); // sets the LED off\n Serial.println(\"C\");\n delay(100); // waits for a second\n digitalWrite(ledPin4, LOW); //Holds\n digitalWrite(ledPin5, HIGH);// sets the LED on\n delay(2500); // waits for a second\nSerial.println(\"D\");\n}\n"
},
{
"alpha_fraction": 0.6066452264785767,
"alphanum_fraction": 0.6141479015350342,
"avg_line_length": 19.282608032226562,
"blob_id": "21391b2983deee823367ea354fc9d8d8aef53536",
"content_id": "b9a6d8a97aabd2d9e7732eb04559474b3dca4e76",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 933,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 46,
"path": "/MIDI-Arduino-2-master/LED.cpp",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "/*\n * Created by Keegan Dahm on 4/8/15.\n */\n\n#include \"LED.h\"\n\nstatic void set_pins(unsigned int red, unsigned int green, unsigned int blue) {\n pinMode(red, OUTPUT);\n digitalWrite(red, LOW);\n pinMode(green, OUTPUT);\n digitalWrite(blue, LOW);\n pinMode(blue, OUTPUT);\n digitalWrite(blue, LOW);\n}\n\nLED::LED() {\n red = green = blue = 0;\n}\n\nLED::LED(unsigned int red) {\n this->red = red;\n this->green = red + 1;\n this->blue = red + 2;\n\n set_pins(red, green, blue);\n}\n\nLED::LED(unsigned int red, unsigned int green, unsigned int blue) {\n this->red = red;\n this->green = green;\n this->blue = blue;\n\n set_pins(red, green, blue);\n}\n\nvoid LED::set(bool red, bool green, bool blue) {\n digitalWrite(this->red, red);\n digitalWrite(this->green, green);\n digitalWrite(this->blue, blue);\n}\n\nvoid LED::reset() {\n digitalWrite(red, LOW);\n digitalWrite(green, LOW);\n digitalWrite(blue, LOW);\n}\n"
},
{
"alpha_fraction": 0.47675591707229614,
"alphanum_fraction": 0.5280609130859375,
"avg_line_length": 31.819852828979492,
"blob_id": "9a8512ff42f22966538c68c15350905b1e85d19b",
"content_id": "db8621ff1f0539eac7309618408094db45132a4a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 8927,
"license_type": "permissive",
"max_line_length": 187,
"num_lines": 272,
"path": "/Longboard_HackISU_S17/Longboard_HackISU_S17.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include \"LPD8806.h\"\n\nint nLEDs = 18;\n\nint analogPin = 3;\nfloat val_hall = 0;\nbyte north = '0';\n\n//int r_dataPin = 3;\n//int r_clockPin = 4;\n//int l_dataPin = 5;\n//int l_clockPin = 6;\n\n\nint r_dataPin = 2;\nint r_clockPin = 3;\nint l_dataPin = 4;\nint l_clockPin = 5;\n\nint k = 0;\nint i = 0;\nlong time_1;\nlong time_2;\nlong time_final = 70.179;\nfloat velocity = 0;\nlong led_count = 0;\nbool flag = 0;\n\nLPD8806 r_strip = LPD8806(nLEDs, r_dataPin, r_clockPin);\nLPD8806 l_strip = LPD8806(nLEDs, l_dataPin, l_clockPin);\n\n\nvoid setup() {\n attachInterrupt(0, interrupt, RISING);\n r_strip.begin();\n r_strip.show();\n l_strip.begin();\n l_strip.show();\n Serial.begin(9600);\n}\n\nvoid interrupt(){\n \n \n}\n\nvoid loop() {\n val_hall = analogRead(analogPin) * (5.0 / 1024.0);\n Serial.print(velocity);\n\n if (val_hall >= .3 && north == '0') {\n north = '1';\n k++;\n time_1 = millis();\n led_count++;\n } else if (val_hall < .3 && north == '1') {\n north = '0';\n k++;\n time_2 = millis();\n time_final = abs(time_1 - time_2);\n\n velocity = 70.179 / time_final;\n led_count++;\n\n }\n Serial.println(velocity);\n \n if (k >= nLEDs) { //restart the count of LEDs\n k = 0;\n }\n\n if (velocity <= 7) { // testing values: 3, 5. Riding values 5, 10\n // next_pixel();\n rainbow_next_pixel();\n //zigzag();\n\n }\n else if(velocity > 7.5 && velocity < 15){\n light_speed(velocity);\n }\n else{\n rainbow_speed(velocity);\n }\n}\n\n\nvoid leap_frog(){\n r_strip.setPixelColor(k-1, 0); //set pixel color to the next pixel.\n l_strip.setPixelColor(k-1, 0);\n \n if(k%2){\n l_strip.setPixelColor(k, 0,255,0); \n l_strip.setPixelColor(k+1, 255,255,0); \n r_strip.setPixelColor(k, 255,40,0);\n }\n\n if(k%2 == 0){\n l_strip.setPixelColor(k+1, 0,255,0); \n l_strip.setPixelColor(k, 255,255,0);\n r_strip.setPixelColor(k+1, 0,255,0); //set pixel color to the next pixel.\n }\n if(k == 0){\n r_strip.setPixelColor(nLEDs-1, 0); //set pixel color to the next pixel.\n l_strip.setPixelColor(nLEDs-1, 0);\n }\n r_strip.show();\n l_strip.show();\n}\n\n\nvoid zigzag(){\n if(k%2){\n l_strip.setPixelColor(k+1, 0,255,0); //set pixel color to the next pixel.\n r_strip.setPixelColor(k, 255,40,0);\n }\n\n if(k%2 == 0){\n r_strip.setPixelColor(k+1, 0,255,0); //set pixel color to the next pixel.\n l_strip.setPixelColor(k, 255,40,0);\n }\n r_strip.setPixelColor(k-1, 0); //set pixel color to the next pixel.\n l_strip.setPixelColor(k-1, 0);\n if(k == 0){\n r_strip.setPixelColor(nLEDs-1, 0); //set pixel color to the next pixel.\n l_strip.setPixelColor(nLEDs-1, 0);\n }\n r_strip.show();\n l_strip.show();\n}\n\nvoid rainbow_next_pixel(){\n r_strip.setPixelColor(k, Wheel(((k * 384 / l_strip.numPixels()) + (led_count%384)%2) % 384, l_strip)); //set pixel color to the next pixel.\n l_strip.setPixelColor(k, Wheel(((k * 384 / r_strip.numPixels()) + (led_count%384)%2) % 384, r_strip));\n r_strip.setPixelColor(k-1, 0); //set pixel color to the next pixel.\n l_strip.setPixelColor(k-1, 0);\n if(k == 0){\n r_strip.setPixelColor(nLEDs-1, 0); //set pixel color to the next pixel.\n l_strip.setPixelColor(nLEDs-1, 0);\n\n// //the following makes a led jump to the front of the led strip\n// if(flag == 0){\n// for(i = nLEDs; i>0; i--){\n// r_strip.setPixelColor(i, Wheel(((i * 384 / l_strip.numPixels()) + (led_count%384)%2) % 384, l_strip)); //set pixel color to the next pixel.\n// l_strip.setPixelColor(i, Wheel(((i * 384 / r_strip.numPixels()) + (led_count%384)%2) % 384, r_strip));\n// delay(3);\n// r_strip.setPixelColor(i+1, 0); //set pixel color to the next pixel.\n// l_strip.setPixelColor(i+1, 0);\n// r_strip.show();\n// l_strip.show();\n// }\n// for(i=0;i<nLEDs;i++){\n// r_strip.setPixelColor(i, 0); //set pixel color to the next pixel.\n// l_strip.setPixelColor(i, 0);\n// }\n// flag = 1;\n// }\n// }\n// else(flag = 0);\n\n }\n r_strip.show();\n l_strip.show();\n\n}\n\n\nvoid next_pixel() {\n for (i = 0; i < r_strip.numPixels(); i++) {\n r_strip.setPixelColor(i, 0); //clear strips to start\n }\n for (i = 0; i < l_strip.numPixels(); i++) {\n l_strip.setPixelColor(i, 0);\n }\n r_strip.setPixelColor(k, 240); //set pixel color to the next pixel.\n l_strip.setPixelColor(k, 240);\n l_strip.show();\n r_strip.show();\n}\n\nvoid light_speed(float velocity) {\n \n for (int i = 0; i < l_strip.numPixels() && i < r_strip.numPixels(); ++i) {\n if (velocity -4 > i) {\n l_strip.setPixelColor(i, 250, i * 2, 0);\n r_strip.setPixelColor(i, 250, i * 2, 0);\n } else {\n l_strip.setPixelColor(i, 0);\n r_strip.setPixelColor(i, 0);\n }\n }\n\n r_strip.show();\n l_strip.show();\n}\n\nvoid rainbow_speed(float velocity) {\n for (i = 0; i < r_strip.numPixels(); i++) {\n r_strip.setPixelColor(i, 0); //clear strips to start\n }\n for (i = 0; i < l_strip.numPixels(); i++) {\n l_strip.setPixelColor(i, 0);\n }\n if(i%10==0){\n r_strip.setPixelColor(k, Wheel(((k * 384 / l_strip.numPixels()) + (led_count%384)) % 384, l_strip)); //set pixel color to the next pixel.\n l_strip.setPixelColor(k, Wheel(((k * 384 / r_strip.numPixels()) + (led_count%384)) % 384, r_strip));\n r_strip.setPixelColor(k+1, Wheel(((k+1 * 384 / l_strip.numPixels()) + (led_count%384)) % 384, l_strip)); //set pixel color to the next pixel.\n l_strip.setPixelColor(k+1, Wheel(((k+1 * 384 / r_strip.numPixels()) + (led_count%384)) % 384, r_strip));\n r_strip.setPixelColor(k+2, Wheel(((k+2 * 384 / l_strip.numPixels()) + (led_count%384)) % 384, l_strip)); //set pixel color to the next pixel.\n l_strip.setPixelColor(k+2, Wheel(((k+2 * 384 / r_strip.numPixels()) + (led_count%384)) % 384, r_strip));\n r_strip.setPixelColor(k+3, Wheel(((k+3 * 384 / l_strip.numPixels()) + (led_count%384)) % 384, l_strip)); //set pixel color to the next pixel.\n l_strip.setPixelColor(k+3, Wheel(((k+3 * 384 / r_strip.numPixels()) + (led_count%384)) % 384, r_strip));\n r_strip.setPixelColor(k+4, Wheel(((k+4 * 384 / l_strip.numPixels()) + (led_count%384)) % 384, l_strip)); //set pixel color to the next pixel.\n l_strip.setPixelColor(k+4, Wheel(((k+4 * 384 / r_strip.numPixels()) + (led_count%384)) % 384, r_strip));\n l_strip.show();\n r_strip.show();\n }\n}\nvoid rainbow(uint8_t wait) {\n int i, j;\n\n for (j = 0; j < 384; j++) { // 3 cycles of all 384 colors in the wheel\n for (i = 0; i < l_strip.numPixels() && i < r_strip.numPixels(); i++) {\n l_strip.setPixelColor(i, Wheel((i + j) % 384, l_strip));\n r_strip.setPixelColor(i, Wheel((i + j) % 384, r_strip));\n }\n l_strip.show(); // write all the pixels out\n r_strip.show(); // write all the pixels out\n delay(wait);\n }\n}\n\n// Slightly different, this one makes the rainbow wheel equally distributed\n// along the chain\nvoid rainbowCycle(uint8_t wait) {\n uint16_t i, j;\n\n for (j = 0; j < 384 * 5; j++) { // 5 cycles of all 384 colors in the wheel\n for (i = 0; i < l_strip.numPixels() && i < r_strip.numPixels(); i++) {\n // tricky math! we use each pixel as a fraction of the full 384-color wheel\n // (thats the i / strip.numPixels() part)\n // Then add in j which makes the colors go around per pixel\n // the % 384 is to make the wheel cycle around\n l_strip.setPixelColor(i, Wheel(((i * 384 / l_strip.numPixels()) + j) % 384, l_strip));\n r_strip.setPixelColor(i, Wheel(((i * 384 / r_strip.numPixels()) + j) % 384, r_strip));\n }\n l_strip.show(); // write all the pixels out\n r_strip.show(); // write all the pixels out\n delay(wait);\n }\n}\n\nuint32_t Wheel(uint16_t WheelPos, LPD8806 strip) {\n byte r, g, b;\n switch (WheelPos / 128) {\n case 0:\n r = 127 - WheelPos % 128; //Red down\n g = WheelPos % 128; // Green up\n b = 0; //blue off\n break;\n case 1:\n g = 127 - WheelPos % 128; //green down\n b = WheelPos % 128; //blue up\n r = 0; //red off\n break;\n case 2:\n b = 127 - WheelPos % 128; //blue down\n r = WheelPos % 128; //red up\n g = 0; //green off\n break;\n }\n return (strip.Color(r, g, b));\n}\n"
},
{
"alpha_fraction": 0.582619309425354,
"alphanum_fraction": 0.6193390488624573,
"avg_line_length": 20.5,
"blob_id": "b52868c4028734b96311a93c955ad2430fd78a55",
"content_id": "8350042b38c00d05503c6e391127db96f5222570",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1634,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 76,
"path": "/Processing_to_Arduino_Brain_Wave/Processing_to_Arduino_Brain_Wave.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "/*\n Fade\n\n This example shows how to fade an LED on pin 9\n using the analogWrite() function.\n\n The analogWrite() function uses PWM, so if\n you want to change the pin you're using, be\n sure to use another PWM capable pin. On most\n Arduino, the PWM pins are identified with \n a \"~\" sign, like ~3, ~5, ~6, ~9, ~10 and ~11.\n\n This example code is in the public domain.\n */\nsigned int val = 0;\n\nint led = 9; // the PWM pin the LED is attached to\nint brightness = 0; // how bright the LED is\nint fadeAmount = 5; // how many points to fade the LED by\nint unfilt_num = 0;\nint output = 0;\n\n// the setup routine runs once when you press reset:\nvoid setup() {\n // declare pin 9 to be an output:\n Serial.begin(9600);\n pinMode(led, OUTPUT);\n}\n\n// the loop routine runs over and over again forever:\nvoid loop() {\n // set the brightness of pin 9:\n\n if (Serial.available()) \n { // If data is available to read,\n val = Serial.read()*10; // read it and store it in val\n\n\n \n output = 250 * (val / 1800.0);\n if(val < 0){\n output = 0;\n }\n analogWrite(led, output);\n \n\n delay(10);\n\n\n /*\n if(val<600)\n val=20;\n else if(val<1200)\n val=100;\n else \n val=255;\n analogWrite(led, val);\n delay(10);\n */\n \n }\n /*\n analogWrite(led, brightness);\n\n // change the brightness for next time through the loop:\n brightness = brightness + fadeAmount;\n\n // reverse the direction of the fading at the ends of the fade:\n if (brightness == 0 || brightness == 255) {\n fadeAmount = -fadeAmount ;\n }\n // wait for 30 milliseconds to see the dimming effect\n delay(30);\n*/\n \n}\n"
},
{
"alpha_fraction": 0.5688105225563049,
"alphanum_fraction": 0.5982019305229187,
"avg_line_length": 25.640552520751953,
"blob_id": "737f77bf4e9334972cbcce815337b10db763327c",
"content_id": "7d25e1c06779720b1acf0e9b5f796f241a5f3513",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5784,
"license_type": "permissive",
"max_line_length": 148,
"num_lines": 217,
"path": "/CatStairsLightUpCapacitance/CatStairsLightUpCapacitance.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include <CapacitiveSensor.h>\n#include \"LPD8806.h\"\n#include \"SPI.h\" // Comment out this line if using Trinket or Gemma\n\n\n// Capacitance Setup\nCapacitiveSensor cs_4_2 = CapacitiveSensor(4,2); // 10M resistor between pins 4 & 2, pin 2 is sensor pin, add a wire and or foil if desired\nlong cap_value = cs_4_2.capacitiveSensor(80);\nint cap_threshold = 1300;\nint jump_count_total = 0;\n\n\n// LED Strip Setup\n// Number of RGB LEDs in strand:\nint nLEDs = 18;\n// Chose 2 pins for output; can be any valid output pins:\nint dataPin = 8;\nint clockPin = 10;\nLPD8806 strip = LPD8806(nLEDs, dataPin, clockPin);\n\n// Function Check Array\n//byte return_data[2];\n//data[0] = 'f'; //Is there a Passing capacitance value\n//data[1] = 'f'; //time elapsed during test\n\nvoid setup() \n{\n\n cs_4_2.set_CS_AutocaL_Millis(0xFFFFFFFF); // turn off autocalibrate on channel 1 - just as an example\n// Serial.begin(115200);\n\n // Show LED Strip\n // Start up the LED strip\n strip.begin();\n // Update the strip, to start they are all 'off'\n strip.show();\n}\n\n\n\n\nvoid loop() \n{\n bool cap_activates = check_sensor();\n if( cap_activates )\n {\n colorSpreadCenter(strip.Color(20, 0, 20), 100, 0); // Red\n cap_activates = false;\n }\n\n}\n\nbool check_sensor()\n{\n //Check the sensor's value\n cap_value = cs_4_2.capacitiveSensor(80);\n \n // Check one value to quickly check sensor\n// if(cap_value > cap_threshold)\n// {\n // Sensor is tripped\n // Collect an average of x readings to make sure they are real. \n byte loop_num = 1;\n int running_value = 0;\n for(int i=0; i<loop_num; ++i)\n {\n running_value = cs_4_2.capacitiveSensor(80);\n }\n\n // Is the value still above the threshold?\n if((running_value / loop_num) > cap_threshold)\n {\n //FOUND A REAL Positive\n return true;\n }\n// }\n\n else\n {\n return false;\n }\n}\n\n\n\n\n\n// Chase one dot down the full strip.\nvoid colorSpreadCenter(uint32_t c, uint8_t wait, uint8_t starting_led) {\n ++jump_count_total;\n int i;\n int half_pixels = (strip.numPixels())/2;\n int total_time = 0;\n bool found_cap = false;\n\n // Rainbow it up every N jumps\n if(jump_count_total % 50 == 0)\n {\n rainbowCycle(0); // make it go through the cycle fairly fast\n\n found_cap = check_sensor();\n while(found_cap)\n {\n found_cap = check_sensor();\n rainbowCycle(0); // make it go through the cycle fairly fast\n }\n // Turn all the pixels off at the end of Rainbow mode\n for(i=0; i<=strip.numPixels(); i++){ strip.setPixelColor(i, 0);}\n }\n\n // NORMAL Increasing Operation\n // Then display one pixel at a time:\n for(i=starting_led; i<=half_pixels; i++) {\n strip.setPixelColor(half_pixels + i, c); // Set new pixel 'on'\n strip.setPixelColor(half_pixels - i, c); // Set new pixel 'on'\n strip.show(); // Refresh LED states\n\n // Double check the sensor while LEDs are Moving\n found_cap = check_sensor();\n if(!found_cap)\n {\n // Reverse Strip and Restart the Loop\n reverseTurnOffStrip(c, wait, i);\n return;\n }\n // delay(25);\n }\n \n //Keep the led light on until the sensor is tripped 'off'\n found_cap = check_sensor();\n while(found_cap)\n {\n// rainbowCycle(0); // make it go through the cycle fairly fast\n found_cap = check_sensor();\n }\n\n // We finally left the while loop because there was a negative number. \n // Turn off the strip\n reverseTurnOffStrip(c, wait, i);\n\n}\n\n\n\nvoid reverseTurnOffStrip(uint32_t c, uint8_t wait, uint8_t currentLed)\n{\n bool found_cap = false;\n \n // Turning all pixels off:\n int half_pixels = (strip.numPixels())/2;\n for(int i=currentLed; i>=0; i--) {\n strip.setPixelColor(half_pixels + i, 0); // Set new pixel 'on'\n strip.setPixelColor(half_pixels - i, 0); // Set new pixel 'on'\n//\n// for(int i=currentLed; i>=-1; i=i-2) {\n// strip.setPixelColor(half_pixels + i, 0); // Set new pixel 'on'\n// strip.setPixelColor(half_pixels - i, 0); // Set new pixel 'on'\n// strip.setPixelColor(half_pixels + i+1, 0); // Set new pixel 'on'\n// strip.setPixelColor(half_pixels - i-1, 0); // Set new pixel 'on'\n \n strip.show(); // Refresh LED states\n // Double check the sensor while LEDs are Moving\n found_cap = check_sensor();\n if(found_cap)\n {\n // Reverse Strip and Restart the Loop\n colorSpreadCenter(c, wait, i);\n return;\n }\n \n delay(70);\n }\n}\n\n\n// Slightly different, this one makes the rainbow wheel equally distributed \n// along the chain\nvoid rainbowCycle(uint8_t wait) {\n uint16_t i, j;\n \n for (j=0; j < 384 * 5; j++) { // 5 cycles of all 384 colors in the wheel\n for (i=0; i < strip.numPixels(); i++) {\n // tricky math! we use each pixel as a fraction of the full 384-color wheel\n // (thats the i / strip.numPixels() part)\n // Then add in j which makes the colors go around per pixel\n // the % 384 is to make the wheel cycle around\n strip.setPixelColor(i, Wheel( ((i * 384 / strip.numPixels()) + j) % 384) );\n } \n strip.show(); // write all the pixels out\n delay(wait);\n }\n for(i=0; i<strip.numPixels(); i++) strip.setPixelColor(i, 0);\n}\n\nuint32_t Wheel(uint16_t WheelPos)\n{\n byte r, g, b;\n switch(WheelPos / 128)\n {\n case 0:\n r = 127 - WheelPos % 128; //Red down\n g = WheelPos % 128; // Green up\n b = 0; //blue off\n break; \n case 1:\n g = 127 - WheelPos % 128; //green down\n b = WheelPos % 128; //blue up\n r = 0; //red off\n break; \n case 2:\n b = 127 - WheelPos % 128; //blue down \n r = WheelPos % 128; //red up\n g = 0; //green off\n break; \n }\n return(strip.Color(r,g,b));\n}\n\n \n"
},
{
"alpha_fraction": 0.612500011920929,
"alphanum_fraction": 0.6299999952316284,
"avg_line_length": 14.384614944458008,
"blob_id": "214bdbb5cbd927ec06bfb5cd3b47144fb995a07f",
"content_id": "e01aa66b3c403857903d29eb8b0835852b7d9e5c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 400,
"license_type": "permissive",
"max_line_length": 57,
"num_lines": 26,
"path": "/MIDI-Arduino-2-master/PIR.h",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "/*\n * Created by Keegan Dahm on 4/8/15.\n */\n\n#ifndef MIDI_ARDUINO_2_PIR_H\n#define MIDI_ARDUINO_2_PIR_H\n\n\n#include <Arduino.h>\n\nclass PIR {\nprivate:\n unsigned int pin;\n bool check_result = false, last_check_result = false;\n\npublic:\n PIR(unsigned int pin);\n\n bool update();\n bool check();\n bool check_positive_edge();\n bool check_negative_edge();\n};\n\n\n#endif /* MIDI_ARDUINO_2_PIR_H */\n"
},
{
"alpha_fraction": 0.5116541385650635,
"alphanum_fraction": 0.5635338425636292,
"avg_line_length": 24.00943374633789,
"blob_id": "7a0aa463e4f2b9ea26115c0bf23a776f78af7f34",
"content_id": "b25c04955de669e84bb411100a94ee318deb4a23",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2660,
"license_type": "permissive",
"max_line_length": 131,
"num_lines": 106,
"path": "/move_all_lights_1/move_all_lights_1.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include \"LPD8806.h\"\n#include \"SPI.h\" // Comment out this line if using Trinket or Gemma\n#ifdef __AVR_ATtiny85__\n #include <avr/power.h>\n#endif\n\n\n\n\nint nLED = 124;\n\nint dataPin = 2;\nint clockPin = 3;\n\nLPD8806 strip = LPD8806(nLED, dataPin, clockPin);\n//byte red = random(1,256); //randomizes the initial color for red\n// byte green = random(1,256); //green, etc.\n// byte blue = random(1,256);\n\nvoid setup(){\n strip.begin(); //start running the strip\n strip.show(); //show strip\n}\n\nvoid loop(){\n int i;\n int k;\n// red+=random(1,25);red%=127;\n// green+=random(1,25);green%=127;\n// blue=127-red;\n \n byte rand = 1;\n byte red = random(1,90); //randomizes the initial color for red\n byte green = random(1,25); //green, etc.\n byte blue = 127-red;\n //wipe();\n \n for(i=0;i<strip.numPixels()/2;i+=2){ \n strip.setPixelColor(124/2-i-1,red,green,blue);\n strip.setPixelColor(124/2+i,red,green,blue);\n strip.show();\n \n }\n// red = random(1,90); //randomizes the initial color for red\n// green = 127-red; //green, etc.\n// blue = random(1,127);\n for(i=0;i<strip.numPixels()/2;i+=2){ \n strip.setPixelColor(i,127-red,green,127-blue);\n strip.setPixelColor(123-i,127-red,green,127-blue);\n strip.show();\n \n }\n \n// for(i=0; i<strip.numPixels(); i++){ //for loop turns on the LED's one by one\n// \n// byte randomnum = random(1,4); //chooses between the switches 1-3\n// /* if(rand == randomnum){ //if rand == randonum change randomnum by 1 so we don't go into the same switch case 2x.\n// randomnum++;\n// }\n// */\n// switch (randomnum){\n// case 1:\n// red = red + random(30,150);\n// red = red % 255;\n// break;\n// \n// case 2:\n// green = green + random(30,150);\n// green = green % 255; \n// break;\n// \n// case 3:\n// blue = blue + random(30,150);\n// blue = blue % 255; \n// break; \n// } \n// \n// //byte rand = randomnum;\n// \n// strip.setPixelColor(i,red,green,blue); // Sets the color of the pixels;\n// strip.show();\n// delay(1); //how long to wait to go onto the next led\n// \n// for(int j=0; j<strip.numPixels(); j++){ // for loop turns off all the LED's except the one turned on\n// strip.setPixelColor(j,0); //turns off LED\n// strip.show();\n// }\n// \n// \n// if(i==strip.numPixels()-1){\n// for(k=0; k<strip.numPixels(); k++){ // (turns off all of the LED's so pattern can continue on. once it reaches the end LED\n// strip.setPixelColor(k,0);\n// strip.show();\n// }\n// \n// }\n//\n// }\n \n}\n\nvoid wipe(){\n for(int i=0;i<nLED ;i++){\n strip.setPixelColor(i,0);\n }\n}\n\n \n \n\n\n"
},
{
"alpha_fraction": 0.5895833373069763,
"alphanum_fraction": 0.6156250238418579,
"avg_line_length": 20.81818199157715,
"blob_id": "6039681fba443cdd63be9401cc96c1ceb0522dbd",
"content_id": "ec3939741995eb747543a8154fe96ce1445a838d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1920,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 88,
"path": "/short_range_reciever/short_range_reciever.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include <SPI.h>\n#include \"RF24.h\"\n\nRF24 radio(7,8);\nconst uint64_t pipe = 0xE8E8F0F0E1LL; //channel to recieve\nbyte addresses[][6] = {\"1Node\",\"2Node\"};\n//unsigned long msg;\n\ntypedef struct {\n byte Priority;\n byte ID;\n float sensor1;\n}MsgData;\n\n//Initializing the Data in Structs.\n//These can be altered Later by using Struct_name.Struct_access \nMsgData Recieved_Data = {0, 0};\nMsgData My_Data = {0, 0};\n\nvoid setup(void){\n Serial.begin(9600);\n radio.begin();\n radio.setAutoAck(false);\n radio.openReadingPipe(1,pipe);\n radio.startListening();\n}\n\nvoid loop(void){\n recieve();\n Serial.println(Recieved_Data.sensor1);\n \n}\n\n\n\nvoid recieve(){\n radio.openWritingPipe(addresses[1]);\n radio.openReadingPipe(1,addresses[0]);\n radio.startListening(); \n if(radio.available()){\n while(radio.available()){ \n radio.read(&Recieved_Data, sizeof(MsgData)); //byte value\n delay(5);\n }\n }\n return;\n}\n\nvoid transmit(MsgData Transmit_Msg){\n radio.openWritingPipe(addresses[0]);\n radio.openReadingPipe(1,addresses[1]);\n radio.stopListening();\n //unsigned long msg = value;\n for(byte i=0; i<15; i++){ \n radio.write(&Transmit_Msg, sizeof(MsgData));\n delay(5);\n }\n}\n/*\n\nunsigned long recieve(){\n radio.openWritingPipe(addresses[1]);\n radio.openReadingPipe(1,addresses[0]);\n radio.startListening(); \n\n if(radio.available()){\n while(radio.available()){ \n \n radio.read(&msg, sizeof(unsigned long)); //byte value\n //Serial.println(msg);\n delay(5);\n }\n }\n return msg;\n}\n\nvoid transmit(unsigned long value){\n radio.openWritingPipe(addresses[0]);\n radio.openReadingPipe(1,addresses[1]);\n radio.stopListening();\n unsigned long msg = value;\n for(byte i=0; i<15; i++){ \n\n radio.write(&msg, sizeof(unsigned long));\n delay(5);\n }\n}\n*/\n"
},
{
"alpha_fraction": 0.6080760359764099,
"alphanum_fraction": 0.6247031092643738,
"avg_line_length": 15.84000015258789,
"blob_id": "fc3bb9818f658419a561f3bbb4364fc8b3ffe383",
"content_id": "1b37e2ea7c2e73186f30a1fa6abea8e542cb9158",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 421,
"license_type": "permissive",
"max_line_length": 67,
"num_lines": 25,
"path": "/MIDI-Arduino-2-master/LED.h",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "/*\n * Created by Keegan Dahm on 4/8/15.\n */\n\n#ifndef MIDI_ARDUINO_2_LED_H\n#define MIDI_ARDUINO_2_LED_H\n\n#include <Arduino.h>\n\n\nclass LED {\nprivate:\n unsigned int red, green, blue;\n \npublic:\n LED();\n LED(unsigned int red);\n LED(unsigned int red, unsigned int green, unsigned int blue);\n\n void set(bool red = true, bool green = true, bool blue = true);\n void reset();\n};\n\n\n#endif /* MIDI_ARDUINO_2_LED_H */\n"
},
{
"alpha_fraction": 0.5083752274513245,
"alphanum_fraction": 0.5632328391075134,
"avg_line_length": 25.21977996826172,
"blob_id": "8eafd88286274c8cf1d3593e7cf1b22858616c37",
"content_id": "6583b4725976d107e1a136776f299a2febafeb35",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2388,
"license_type": "permissive",
"max_line_length": 148,
"num_lines": 91,
"path": "/test/test.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include <CapacitiveSensor.h>\n#include \"LPD8806.h\"\n\n/*\n * CapitiveSense Library Demo Sketch\n * Paul Badger 2008\n * Uses a high value resistor e.g. 10M between send pin and receive pin\n * Resistor effects sensitivity, experiment with values, 50K - 50M. Larger resistor values yield larger sensor values.\n * Receive pin is the sensor pin - try different amounts of foil/metal on this pin\n */\nint nLEDs = 12;\nint dataPin = 5;\nint clockPin = 6;\nLPD8806 strip = LPD8806(nLEDs, dataPin, clockPin);\nint low= 15;\nint med =30;\nint high =200;\n\nCapacitiveSensor cs_4_2 = CapacitiveSensor(4,2); // 10M resistor between pins 4 & 2, pin 2 is sensor pin, add a wire and or foil if desired\nCapacitiveSensor cs_4_6 = CapacitiveSensor(4,6); // 10M resistor between pins 4 & 6, pin 6 is sensor pin, add a wire and or foil\nCapacitiveSensor cs_4_8 = CapacitiveSensor(4,8); // 10M resistor between pins 4 & 8, pin 8 is sensor pin, add a wire and or foil\n\nvoid setup() \n{\n cs_4_2.set_CS_AutocaL_Millis(0xFFFFFFFF); // turn off autocalibrate on channel 1 - just as an example\n Serial.begin(9600);\n strip.begin();\n strip.show(); \n}\n\nvoid loop() \n{\n String state;\n long cap = cs_4_2.capacitiveSensor(10);\n \n if(cap < low){\n state = \"low\";\n for(int i=0;i<nLEDs;i++){\n strip.setPixelColor(i,strip.Color(0,0,155));\n }\n strip.show();\n }\n if(cap > low+1 && cap < med){\n state = \"med\";\n for(int i=0;i<nLEDs;i++){\n strip.setPixelColor(i,strip.Color(0,155,0));\n }\n strip.show();\n }\n if(cap > high){\n state = \"high\";\n for(int i=0;i<nLEDs;i++){\n strip.setPixelColor(i,strip.Color(255,0,0));\n }\n strip.show();\n \n }\n\n\n Serial.println(state);\n\n\n \n}\n\n\n\n\nuint32_t Wheel(uint16_t WheelPos)\n{\n byte r, g, b;\n switch(WheelPos / 128)\n {\n case 0:\n r = 127 - WheelPos % 128; //Red down\n g = WheelPos % 128; // Green up\n b = 0; //blue off\n break; \n case 1:\n g = 127 - WheelPos % 128; //green down\n b = WheelPos % 128; //blue up\n r = 0; //red off\n break; \n case 2:\n b = 127 - WheelPos % 128; //blue down \n r = WheelPos % 128; //red up\n g = 0; //green off\n break; \n }\n return(strip.Color(r,g,b));\n}\n\n\n"
},
{
"alpha_fraction": 0.5664950013160706,
"alphanum_fraction": 0.6108754873275757,
"avg_line_length": 27.700000762939453,
"blob_id": "b78add574695296868c7f438d3be162c566198de",
"content_id": "cb44c04b56dcf70eaf5fa8034218f95fcb9753ff",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 6602,
"license_type": "permissive",
"max_line_length": 139,
"num_lines": 230,
"path": "/RFID_test_1.0/RFID_test_1.0.ino/RFID_test_1.0.ino.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include <Wire.h>\n#include <SPI.h>\n#include <Adafruit_PN532.h>\n#include <Wire.h>\n#include \"rgb_lcd.h\"\n\n\n#include <SparkFun_ADXL345.h> // SparkFun ADXL345 Library\n\n/*********** COMMUNICATION SELECTION ***********/\n/* Comment Out The One You Are Not Using */\nADXL345 adxl = ADXL345(10); // USE FOR SPI COMMUNICATION, ADXL345(CS_PIN);\n//ADXL345 adxl = ADXL345(); // USE FOR I2C COMMUNICATION\n\nint xglobal, yglobal, zglobal;\n\nString connectConfirm;\nint count = 0;\nbool state_on_off = 1;\nbool spiked = 0;\n\nint ledPin4 = 4; // LED connected to digital pin 13\nint ledPin5 = 5;\n\nrgb_lcd lcd;\n// If using the breakout with SPI, define the pins for SPI communication.\n#define PN532_SCK (2)\n#define PN532_MOSI (3)\n#define PN532_SS (4)\n#define PN532_MISO (5)\n\n// If using the breakout or shield with I2C, define just the pins connected\n// to the IRQ and reset lines. Use the values below (2, 3) for the shield!\n#define PN532_IRQ (2)\n#define PN532_RESET (3) // Not connected by default on the NFC Shield\n\n// Uncomment just _one_ line below depending on how your breakout or shield\n// is connected to the Arduino:\n\n// Use this line for a breakout with a software SPI connection (recommended):\n//Adafruit_PN532 nfc(PN532_SCK, PN532_MISO, PN532_MOSI, PN532_SS);\n\n// Use this line for a breakout with a hardware SPI connection. Note that\n// the PN532 SCK, MOSI, and MISO pins need to be connected to the Arduino's\n// hardware SPI SCK, MOSI, and MISO pins. On an Arduino Uno these are\n// SCK = 13, MOSI = 11, MISO = 12. The SS line can be any digital IO pin.\n//Adafruit_PN532 nfc(PN532_SS);\n\n// Or use this line for a breakout or shield with an I2C connection:\nAdafruit_PN532 nfc(PN532_IRQ, PN532_RESET);\n\n#if defined(ARDUINO_ARCH_SAMD)\n// for Zero, output on USB Serial console, remove line below if using programming port to program the Zero!\n// also change #define in Adafruit_PN532.cpp library file\n #define Serial SerialUSB\n#endif\n\nvoid setup(void) {\n\n \n \n // Serial.begin(9600);\n // set up the LCD's number of columns and rows:\n lcd.begin(16, 2);\n // Print a message to the LCD.\n lcd.print(\"\");\n lcd.clear();\n Drink_Locked_LCD();\n #ifndef ESP8266\n while (!Serial); // for Leonardo/Micro/Zero\n #endif\n Serial.begin(9600);\n pinMode(ledPin4,OUTPUT);\n pinMode(ledPin5,OUTPUT);\n nfc.begin();\n\n uint32_t versiondata = nfc.getFirmwareVersion();\n if (! versiondata) {\n // Serial.print(\"Didn't find PN53x board\");\n while (1); // halt\n }\n // Got ok data, print it out!\n // Serial.print(\"Found chip PN5\"); Serial.println((versiondata>>24) & 0xFF, HEX); \n// Serial.print(\"Firmware ver. \"); Serial.print((versiondata>>16) & 0xFF, DEC); \n// Serial.print('.'); Serial.println((versiondata>>8) & 0xFF, DEC);\n \n // configure board to read RFID tags\n nfc.SAMConfig();\n \n // Serial.println(\"Waiting for an ISO14443A Card ...\");\n adxl.powerOn(); // Power on the ADXL345\n\n adxl.setRangeSetting(2); // Give the range settings\n // Accepted values are 2g, 4g, 8g or 16g\n // Higher Values = Wider Measurement Range\n // Lower Values = Greater Sensitivity\n\n adxl.setSpiBit(0); // Configure the device to be in 4 wire SPI mode when set to '0' or 3 wire SPI mode when set to 1\n // Default: Set to 1\n // SPI pins on the ATMega328: 11, 12 and 13 as reference in SPI Library \n \n adxl.setActivityXYZ(1, 1, 1); // Set to activate movement detection in the axes \"adxl.setActivityXYZ(X, Y, Z);\" (1 == ON, 0 == OFF)\n adxl.setActivityThreshold(70); // 62.5mg per increment // Set activity // Inactivity thresholds (0-255)\n\n\n \n}\n\n\nvoid loop(void) {\n\n int x,y,z;\n int initialX, initialY, initialZ;\n int actX, actY, actZ; \n\n\n \n static uint8_t success;\n uint8_t uid[] = { 0, 0, 0, 0, 0, 0, 0 }; // Buffer to store the returned UID\n uint8_t uidLength; // Length of the UID (4 or 7 bytes depending on ISO14443A card type)\n bool good;\n//Serial.println(\"A\");\n\nsuccess = nfc.readPassiveTargetID(PN532_MIFARE_ISO14443A, uid, &uidLength,20);\n\n while(!success){\n digitalWrite(ledPin4, HIGH); //Push Out\n digitalWrite(ledPin5, HIGH);// sets the LED on\n\n delay(100); // waits for a second\n digitalWrite(ledPin4, LOW); //Holds\n digitalWrite(ledPin5, HIGH);// sets the LED on\n\n delay(2000);\n //Serial.println(\"B\");\n if(spiked == 0){\n Drink_Locked_LCD();\n }\n adxl.readAccel(&x, &y, &z); // Read the accelerometer values and store them in variables declared above x,y,z\n \n while(Serial.available())\n {\n\n }\n if(state_on_off == 0){\n if(abs(xglobal - x) > 40){\n Serial.println(\"Activity X\");\n //Serial.println(x); //debug code\n Drink_Tampered_LCD();\n spiked = 1;\n }\n \n if(abs(yglobal - y) > 40){\n Serial.println(\"Activity Y\");\n //Serial.println(x); //debug code\n Drink_Tampered_LCD();\n spiked = 1;\n }\n \n if(abs(zglobal - z) > 40){\n Serial.println(\"Activity Z\");\n //Serial.println(x); //debug code\n Drink_Tampered_LCD();\n spiked = 1;\n }\n }\n\n xglobal = x;\n yglobal = y;\n zglobal = z;\n\n state_on_off = 0;\n success = nfc.readPassiveTargetID(PN532_MIFARE_ISO14443A, uid, &uidLength,20);\n\n\n\n \n }\n //Serial.println(\"C\");\n\n while(success){\n digitalWrite(ledPin4, LOW); //Pulls in\n digitalWrite(ledPin5, LOW); // sets the LED off\n \n delay(100); // waits for a second\n digitalWrite(ledPin4, LOW); //Holds\n digitalWrite(ledPin5, HIGH);// sets the LED on\n success = nfc.readPassiveTargetID(PN532_MIFARE_ISO14443A, uid, &uidLength,20);\n Drink_Good_LCD();\n // Display some basic information about the card\n // Serial.println(\"Found an ISO14443A card\");\n // Serial.print(\" UID Length: \");Serial.print(success, DEC);Serial.println(\" bytes\");\n // Serial.print(\" UID Value: \");\n //nfc.PrintHex(uid, uidLength);\n // Serial.println(\"\");\n delay(50);\n state_on_off = 1;\n spiked = 0;\n }\n \n \n}\n\nvoid Drink_Good_LCD()\n{\n lcd.setRGB(0, 200, 0);\n lcd.clear();\n lcd.print(\" Cheers!\");\n delay(2000);\n}\n\nvoid Drink_Tampered_LCD()\n{\n\n lcd.setRGB(254, 0, 0);\n lcd.clear();\n lcd.print(\" Mug Tampered\");\n delay(2000);\n\n}\n\nvoid Drink_Locked_LCD()\n{\n\n lcd.setRGB(0, 0, 254);\n lcd.clear();\n lcd.print(\" Mug Locked\");\n delay(2000);\n\n}\n\n"
},
{
"alpha_fraction": 0.5814931392669678,
"alphanum_fraction": 0.6403785347938538,
"avg_line_length": 22.774999618530273,
"blob_id": "d948a8b1288c482c142fb5f2286e3ef5f1e1314b",
"content_id": "b8c5c3a5736ebf9ec3e8fb9a517187e59c64f220",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 951,
"license_type": "permissive",
"max_line_length": 96,
"num_lines": 40,
"path": "/Ferro_Music/Ferro_Music.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include <SoftwareSerial.h>\n\nstatic long timeUntilTurnOff = 5000; // max value: 2147483647;\n\nlong millisAtLastRead = 0;\nlong timeSinceLastRead = 0;\nlong recieve = 0;\nint out_pin = 9;\n\nvoid setup() {\n // put your setup code here, to run once:\n pinMode(out_pin, OUTPUT);\n analogWrite(out_pin, 0);\n Serial.begin(600);//115200);\n}\n\nvoid loop() {\n if (Serial.available() > 0) {\n recieve = (log(Serial.read()) / log(1.05)) + 155; // becomes log[base 1.05](Serial.read())\n if (recieve >= 255) {\n recieve = 255;\n }\n \n analogWrite(out_pin, recieve);\n millisAtLastRead = millis();\n } else if (timeSinceLastRead < timeUntilTurnOff) {\n timeSinceLastRead = millis() - millisAtLastRead;\n }\n\n if (timeSinceLastRead >= timeUntilTurnOff) {\n analogWrite(out_pin, 0);\n }\n}\n\n// exp(0.075 * Serial.read()) seems to produce best results\n// 1.75 * Serial.read() // 4 spectrumGroups\n\n//void loop() {\n// analogWrite(out_pin, 255);\n//}\n"
},
{
"alpha_fraction": 0.4570123553276062,
"alphanum_fraction": 0.5244492292404175,
"avg_line_length": 15.616071701049805,
"blob_id": "4e6ae757a94159bc92874055dc11bf27978a0e32",
"content_id": "0c75ed69e76d744615d3c35df0dc942e280c9085",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3722,
"license_type": "permissive",
"max_line_length": 69,
"num_lines": 224,
"path": "/Servo_Arm_Digital_Women_Spring_2018/Servo_Arm_Digital_Women_Spring_2018.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include <Servo.h> \n// Declare the Servo pin \nint pin0 = 3; //rotation of the bottom\nint pin1 = 5; //bottom servo\nint pin2 = 6; //middle\nint pin3 = 9; //top\nint pin4 = 10; //claw\n// Create a servo object \nServo Servo0; //rotation at the bottom\nServo Servo1; //bottom servo\nServo Servo2; //middle\nServo Servo3; //top\nServo Servo4; //claw\n\nint Max0 = 179;\nint Min0 = 0;\nint Max1 = 90;\nint Min1 = 40;\nint Max2 = 45;\nint Min2 = 0;\nint Max3 = 90;\nint Min3 = 15;\nint Max4 = 180;\nint Min4 = 0;\n\nint pos0 = 90;\nint pos1 = 90;\nint pos2 = 0;\nint pos3 = 90;\nint pos4 = 0;\n\n//reading information\nint arr[3];\nint x; //reading rotation\nint y; //reading height\nint state; //reading open or close\nint serialln = 0;\n\nint xLast;\nint yLast;\nint stateLast;\n\ndouble yAverage;\n\nint TOL = 1; //tolorance\nint TOLX = 5; //tolorance\n\nint movingarr[4];\n\nvoid setup() { \n\n Serial.begin(9600);\n \n // We need to attach the servo to the used pin number \n Servo0.attach(pin0); // 0 - 180 \n Servo1.attach(pin1);// 40 - 90 //low is down\n Servo2.attach(pin2);// 0 - 45 //high is down\n Servo3.attach(pin3);// 15 -90 //low is down\n Servo4.attach(pin4); //0-180 //low is open\n\n //set in starting up position \n Servo0.write(pos0);\n Servo1.write(pos1); \n Servo2.write(pos2);\n Servo3.write(pos3);\n Servo4.write(pos4);\n\n delay(1000);\n}\n\n\nvoid loop(){ \n\n if(Serial.available())\n {\n readIncomingBytes();\n }\n\n switch(state){\n case 0: //REST\n if( (y > (yLast + TOL)) || (y < (yLast - TOL)) ){\n MoveArm();\n }\n break;\n \n case 1: //Fingers Spread, Moves Y axis\n pos4 = 0;\n Servo4.write(pos4);\n break;\n\n \n case 2: //Closed Fist, Moves Y axis\n pos4 = 180;\n Servo4.write(pos4);\n break;\n \n case 3: //Wave out moves X axis\n if(x >=75)\n {\n pos0 +=5;\n if(pos0 >= 180)\n {\n pos0 = 180;\n }\n }\n if(x <= 3)\n {\n pos0 -=5;\n if(pos0 <= 0)\n {\n pos0 = 0;\n }\n }\n Servo0.write(pos0);\n\n break;\n }\n \n \n delay(40);\n xLast = x;\n yLast = y;\n stateLast = state;\n \n\n //MoveDOWN();\n \n}\n\n\nvoid MoveArm()\n{\n \n double moveAmount1 = Max1+Min1-((y * (Max1 - Min1)/180.0) + Min1);\n double moveAmount2 = ((y * (Max2 - Min2)/180.0) + Min2);\n double moveAmount3 = Max3+Min3-((y * (Max3 - Min3)/180.0) + Min3);\n // Servo0.write(moveAmount0); \n Servo1.write(moveAmount1);\n Servo2.write(moveAmount2);\n Servo3.write(moveAmount3);\n\n//double moveAmount1 = y*(Max1-Min1)/180;\n\n\n\n\n\n}\n\nvoid MoveDOWN(){\n\n if(pos1 > Min1){\n pos1 = pos1 - 2;\n Servo1.write(pos1); \n }\n if(pos2 < Max2){\n pos2 = pos2 + 2;\n Servo2.write(pos2);\n }\n if(pos3 > Min3){\n pos3 = pos3 - 3;\n Servo3.write(pos3);\n }\n\n delay(70);\n}\n\nvoid MoveUP(){\n\n if(pos1 < Max1){\n pos1 = pos1 + 2;\n Servo1.write(pos1); \n }\n if(pos2 > Min2){\n pos2 = pos2 - 2;\n Servo2.write(pos2);\n }\n if(pos3 < Max3){\n pos3 = pos3 + 3;\n Servo3.write(pos3);\n }\n\n delay(70);\n}\n\nvoid readIncomingBytes() {\n while (Serial.available() > 0)\n {\n int incomingByte = Serial.read();\n if (incomingByte == (int)'<')\n {\n serialln = 0;\n\n }\n else if (incomingByte == (int)'>')\n {\n x = arr[0];\n y = arr[1];\n state = arr[2];\n return;\n }\n else\n {\n arr[serialln] = incomingByte;\n serialln++;\n }\n }\n /*\n //moving average\n int total = 0;\n int arrNew[4]; \n for(int i = 0; i< 4-1; i++)\n {\n arrNew[i+1] = movingarr[i];\n }\n arrNew[0] = y; \n for(int i = 0 ; i< 4-1; i++)\n {\n total = total + arrNew[i]; \n }\n y = total/4;\n */\n \n}\n"
},
{
"alpha_fraction": 0.48064085841178894,
"alphanum_fraction": 0.5313751697540283,
"avg_line_length": 17.112903594970703,
"blob_id": "dd692dff57a96cf5489e451bd042ca85bf8cbe4c",
"content_id": "0d57cc8953d9cf2f1ae06d2a606074c7b261577c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2247,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 124,
"path": "/Processing_to_Arduino_Think/Processing_to_Arduino_Think.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "\nsigned int val = 0;\n\nint led = 9; // the PWM pin the LED is attached to\nbyte red_p = 3;\nbyte green_p = 5;\nbyte blue_p = 6;\n\nunsigned int red = 0;\nunsigned int green = 0;\nunsigned int blue = 0;\n\nint unfilt_num = 0;\nint output = 0;\nbyte once = 0;\n\nint switch_val = 2;\n\nint mavg[20]={0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0};\n\n// the setup routine runs once when you press reset:\nvoid setup() {\n // declare pin 9 to be an output:\n Serial.begin(9600);\n pinMode(led, OUTPUT);\n}\n\n// the loop routine runs over and over again forever:\nvoid loop() {\n // set the brightness of pin 9:\n int var=45; //changes with person\n if (Serial.available()) \n { // If data is available to read,\n // // read it and store it in val\n\n switch (switch_val){\n case 0:\n for(int i=0; i<19; i++)\n {\n mavg[i]=mavg[i+1];\n }\n mavg[19]=Serial.read();\n \n for(int j=0; j<20; j++)\n {\n val+=mavg[j];\n }\n val/=20;\n\n if(val < (var/2)){\n red = 255*((var/2)-val+5)/(var/2);\n green = 255*(val-5)/(var/2);\n blue = 0;\n }\n else{\n red = 0;\n blue = 255* (val-(var/2))/(var/2);\n green = 255* (var-val)/(var/2);\n }\n\n\n analogWrite(blue_p, blue);\n analogWrite(green_p, green);\n analogWrite(red_p, red);\n delay(3);\n break;\n\n case 1:\n //Steve's Values\n val = Serial.read(); \n if(val>= 35){\n val=255;\n analogWrite(led, val);\n //delay(500);\n }\n else{\n val=0;\n analogWrite(led, val);\n }\n delay(3);\n break;\n\n case 2:\n //Steve's Values\n val = Serial.read(); \n if(once == 0){\n if(val>= 48){\n once = 1;\n val=255;\n analogWrite(led, val);\n delay(800);\n analogWrite(led, 0);\n }\n else{\n val=0;\n analogWrite(led, val);\n }\n delay(3);\n }\n if (val < 17){\n once = 0;\n }\n\n break;\n \n }\n\n \n \n }\n /*\n analogWrite(led, brightness);\n\n // change the brightness for next time through the loop:\n brightness = brightness + fadeAmount;\n\n // reverse the direction of the fading at the ends of the fade:\n if (brightness == 0 || brightness == 255) {\n fadeAmount = -fadeAmount ;\n }\n // wait for 30 milliseconds to see the dimming effect\n delay(30);\n*/\n \n}\n"
},
{
"alpha_fraction": 0.560562014579773,
"alphanum_fraction": 0.5838178396224976,
"avg_line_length": 20.0510196685791,
"blob_id": "260bf11239fab022b91adb6674d3e11e6ad98700",
"content_id": "c75f8be6416ca9d546a1fc05f469acfd8ea6f005",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2064,
"license_type": "permissive",
"max_line_length": 108,
"num_lines": 98,
"path": "/Micron_Tim_Steve_Desk_LEDS_1_1/Micron_Tim_Steve_Desk_LEDS.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include <SPI.h>\n#include \"RF24.h\"\n\n\n\nRF24 radio(7,8);\nconst uint64_t pipe = 0xE8E8F0F0E1LL; //channel to receive\nbyte addresses[][6] = {\"1Node\",\"2Node\"};\n\n\n\n\n\ntypedef struct {\n char ID[10];\n byte Butt;\n}MsgData;\n\n\n//UNcomment if your name is Steve\nMsgData MY_LED_DATA = {\"Steve\", 0};\nMsgData HIS_LED_DATA = {\"Tim\", 0};\n\n\n//UNcomment if your name is Tim\n//MsgData MY_LED_DATA = {\"Tim\", 0};\n//MsgData HIS_LED_DATA = {\"Steve\", 0};\n\n//UNcomment if your name is Steve\nchar My_Name[] = \"Steve\"; \nchar His_Name[] = \"Tim\";\n\n//UNcomment if your name is Tim\n//My_Name[] = \"Tim\";\n//His_Name[] = \"Steve\";\n\nint ledPin = 13; // choose the pin for the LED\n\n\nvoid setup(void){\n attachInterrupt(0, interrupt, RISING);\n Serial.begin(9600);\n radio.begin();\n radio.setAutoAck(false);\n radio.openReadingPipe(1,pipe);\n radio.startListening();\n}\n\n\nvoid interrupt(){\n MY_LED_DATA.Butt = 1;\n transmit(MY_LED_DATA);\n Serial.println(\"Transmitted!!!!!!!!!!!!!!!!!!\");\n \n}\n\nvoid loop(void){\n\n receive();\n MY_LED_DATA.Butt = 0;\n\n if(HIS_LED_DATA.ID == His_Name && HIS_LED_DATA.Butt == 1){ //Turn on if is an expected signal\n Serial.println(\"Recieved one!\");\n digitalWrite(ledPin, HIGH);\n delay(100);\n digitalWrite(ledPin, LOW);\n HIS_LED_DATA.Butt = 0;\n }\n \n}\n\n\n\n\n \nvoid receive(){ //Recieve Data from another node\n radio.openWritingPipe(addresses[1]);\n radio.openReadingPipe(1,addresses[0]);\n radio.startListening(); \n if(radio.available()){\n while(radio.available()){ \n radio.read(&HIS_LED_DATA, sizeof(MsgData)); //byte value\n delay(5);\n }\n }\n return;\n}\n\nvoid transmit(MsgData Transmit_Msg){ //Transmit Data to Another Node\n radio.openWritingPipe(addresses[0]);\n radio.openReadingPipe(1,addresses[1]);\n radio.stopListening();\n //unsigned long msg = value;\n for(byte i=0; i<5; i++){ \n radio.write(&Transmit_Msg, sizeof(MsgData));\n delay(5);\n }\n}\n\n"
},
{
"alpha_fraction": 0.6069730520248413,
"alphanum_fraction": 0.6196513175964355,
"avg_line_length": 14.774999618530273,
"blob_id": "c4c756af4da4f133dbda0bc530090f140b1e034f",
"content_id": "0707a34795ab1a0ecedd0d959f6421ec702faded",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 631,
"license_type": "permissive",
"max_line_length": 49,
"num_lines": 40,
"path": "/MIDI-Arduino-2-master/SerialStream.cpp",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "/*\n * Created by Keegan Dahm on 4/9/15.\n */\n\n#define MIN_SIZE 8\n\n#include \"SerialStream.h\"\n\n#include <stdint.h>\n\nSerialStream::SerialStream(unsigned int length) {\n q = new Queue<uint8_t>(length);\n}\n\nSerialStream::~SerialStream() {\n delete q;\n}\n\nbool SerialStream::buffer() {\n while (Serial.available())\n if (!q->enqueue(Serial.read()))\n return false;\n return true;\n}\n\nunsigned int SerialStream::available() {\n return q->space_occupied();\n}\n\nuint8_t SerialStream::read() {\n return q->dequeue();\n}\n\nuint8_t SerialStream::peek() {\n return q->peek();\n}\n\nvoid SerialStream::flush() {\n q->clear();\n}\n"
},
{
"alpha_fraction": 0.6493212580680847,
"alphanum_fraction": 0.6681749820709229,
"avg_line_length": 21.658119201660156,
"blob_id": "b4068afc5a935e92254df2c736e8c74fc56e8f50",
"content_id": "8b6e92dc52304b90967e63310d0743cfc5a23891",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2652,
"license_type": "permissive",
"max_line_length": 166,
"num_lines": 117,
"path": "/EE_333_Project_3/EE_333_Project_3.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include \"DHT.h\"\n#include \"LiquidCrystal.h\"\n\nLiquidCrystal lcd(0,1,7,8,9,10);\n\n#define DHTPIN 2\n#define DHTTYPE DHT22\n#define outputA 3\n#define outputB 4\n\nDHT dht(DHTPIN, DHTTYPE);\n\nint counter = 0;\nint aState;\nint aLastState;\nfloat f_temp;\nbyte actuator_state;\n\nint desired_temp = 69;//The desired Temperature defined by the rotary encoder. \nint temp_range=2; //temperature can be 2 below or above desired value before the thermostat changes.\n\n\nvoid setup() {\n // put your setup code here, to run once:\n lcd.begin(16,2);\n pinMode(outputA,INPUT);\n pinMode(outputB,INPUT);\n pinMode(5,OUTPUT);\n pinMode(6,OUTPUT);\n Serial.begin(9600);\n dht.begin();\n aLastState = digitalRead(outputA);\n digitalWrite(5,LOW);\n digitalWrite(6,LOW);\n \n}\n\nvoid loop() {\n delay(10); //pause for a moment before each run-through.\n Check_Rotary_Encoder();//Checks position of encoder\n Check_Temp(); //checks the temp and adjusts the actuator accordingly\n LCD_resfresh(); //Refreshes LCD screen with the newest info\n \n\n \n\n}\n\n//Checks temperature and moves actuator\nvoid Check_Temp(){\n f_temp=dht.readTemperature(true);\n if (f_temp+temp_range > desired_temp){\n AC_cool();\n }\n else if (f_temp-temp_range < desired_temp){\n AC_heat();\n }\n else{\n //do nothing if we are in the correct Temperature. \n }\n \n}\n\n//Turns on the AC\nvoid AC_cool(){\n if(actuator_state == 1){ //if statement prevents from toggling the actuator constantly. if in the state, don't change it unless the actuator is in the wrong state. \n digitalWrite(5,HIGH);\n digitalWrite(6,LOW);\n delay(50);\n digitalWrite(5,LOW);\n digitalWrite(6,LOW);\n }\n actuator_state = 0;\n}\n\n//Turns off the AC (heating because room heats up on its own)\nvoid AC_heat(){\n if(actuator_state == 0){\n digitalWrite(5,LOW);\n digitalWrite(6,HIGH);\n delay(50);\n digitalWrite(5,LOW);\n digitalWrite(6,LOW);\n }\n actuator_state = 1;\n}\n\nvoid LCD_resfresh(){\n lcd.clear();\n lcd.print(\"Temp: \");\n lcd.print(f_temp);\n lcd.print(\"F\");\n\n lcd.setCursor(0,1);\n lcd.print(\"Desired: \");\n lcd.print(desired_temp);\n lcd.print(\"F\");\n \n}\n\n//Calculates the position of the rotary encoder\nvoid Check_Rotary_Encoder(){\n aState=digitalRead(outputA); //Read current state at A\n if (aState != aLastState){\n if (digitalRead(outputB) != aState){\n counter++;\n }\n else{\n counter--;\n }\n }\n desired_temp = desired_temp + counter; //Calculate the temp with the offset so we don't have to scroll the wheel 69 times before it starts working. \n Serial.print(\"Temperature: \");\n Serial.println(desired_temp);\n aLastState = aState; //update the state\n \n}\n\n"
},
{
"alpha_fraction": 0.5460897088050842,
"alphanum_fraction": 0.5787694454193115,
"avg_line_length": 19.632558822631836,
"blob_id": "5960243a503d29984adfc111492c495356f7a9d3",
"content_id": "f82e3838999b80ced0f0710f4a16f7a74523f200",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4437,
"license_type": "permissive",
"max_line_length": 105,
"num_lines": 215,
"path": "/longboardHackISU/longboardHackISU.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include \"LPD8806.h\"\n#include \"SPI.h\" \n\nint dataPin = 2;\nint clockPin = 3;\n\nint nLEDs = 66;\nint bluetooth = 0;\n#define leftb 0\n#define leftt 29\n#define backb 30\n#define backt 34\n#define rightb 66\n#define rightt 35\n\nint velocity = 0;\nshort color=120;\nshort red, green, blue, led;\n \nLPD8806 strip = LPD8806(nLEDs, dataPin, clockPin);\n\n\nvoid setup() {\n strip.begin();\n strip.show();\n // Serial.begin(9600);\n\n}\n\n\n\nvoid loop() {\n\ncolorChase(strip.Color(127, 0, 0), 50, leftb, leftt, rightb, rightt); // Red Light Rail\ncolorChase(strip.Color(127, 127, 0), 50, leftb, leftt, rightb, rightt); // Yellow\n\nswitch(bluetooth)\n case 0:\n for(int i=0; i<strip.numPixels(); i++){\n strip.setPixelColor(i, 0); // Set new pixel 'on'\n strip.show(); // Refresh LED states\n } break;\n \n\n \n case 2: \n leftturn(leftb, leftt, 100);\n break;\n \n case 3:\n rightturn(rightb, rightt, 100);\n break;\n \n case 4: \n brake(backb, backt, 100); \n break;\n \n case 5:\n caution(100);\n break;\n \n case 6:\n off();\n break;\n \n case 7:\n singleled(led, red, green, blue);\n break;\n\n case 8:\n setall(red, green, blue);\n break;\n \n \n \n \n \n \n// colorspeed(velocity, leftb, leftt, rightb, rightt, backb, backt, color);\n \n\n \n \n \n \n \n \n}\n\n\n\n\n\n/*\nvoid colorspeed(short v,short bottoml,short topl,short bottomr,short topr,short bottomb,short bottomt,c){\n int count = 0;\n float tdistance;\n float instd = v*(.03/9600);\n distance = instd + distance;\n if(distance % .03){\n count = count +1;\n strip.setPixelColor(count-1, 0); // Erase pixel, but don't refresh!\n strip.setPixelColor(66-count+1, 0); // Erase pixel, but don't refresh!\n strip.setPixelColor(count, c); // Set new pixel 'on'\n strip.setPixelColor(66-count,c);\n strip.show(); // Refresh LED states\n \n }\n\n}\n*/\n\n\nvoid colorChase(uint32_t c, uint8_t wait, short bottoml, short topl, short bottomr, short topr) {\n int i;\n\n // Start by turning all pixels off:\n for(i=0; i<strip.numPixels(); i++) strip.setPixelColor(i, 0);\n\n // Then display one pixel at a time:\n for(i=bottoml; i<=topl; i++) {\n strip.setPixelColor(i, c); // Set new pixel 'on'\n strip.setPixelColor(66-i,c);\n strip.show(); // Refresh LED states\n strip.setPixelColor(i, 0); // Erase pixel, but don't refresh!\n strip.setPixelColor(66-i, 0); // Erase pixel, but don't refresh!\n delay(wait);\n }\n}\n\n\n\nvoid leftturn(short bottom,short top, short wait){\n int i;\n for(i=bottom; i<=top; i++) {\n strip.setPixelColor(i, 127,0,0); // Set new pixel 'on'\n strip.show(); // Refresh LED states\n }\ndelay(wait+200);\n for(i=bottom; i<=top; i++) {\n strip.setPixelColor(i, 0); // Set new pixel 'on'\n strip.show(); // Refresh LED states\n }\ndelay(wait);\n}\n \n \nvoid rightturn(short bottom,short top, short wait){\n int i;\n for(i=bottom; i>=top; i--) {\n strip.setPixelColor(i, 127,0,0); // Set new pixel 'on'\n strip.show(); // Refresh LED states\n }\ndelay(wait+200);\n for(i=bottom; i>=top; i--) {\n strip.setPixelColor(i, 0); // Set new pixel 'on'\n strip.show(); // Refresh LED states\n }\ndelay(wait);\n}\n\n\nvoid brake(short bottom,short top, short wait){\n int i;\n for(i=bottom; i<=top; i++) {\n strip.setPixelColor(i, 127,0,0); // Set new pixel 'on'\n strip.show(); // Refresh LED states\n }\ndelay(wait+200);\n for(i=bottom; i<=top; i++) {\n strip.setPixelColor(i, 0); // Set new pixel 'on'\n strip.show(); // Refresh LED states\n }\ndelay(wait);\n}\n\n\nvoid caution(short wait){\n int i;\n for(i=0; i<=strip.numPixels(); i++) {\n if(i%2){\n strip.setPixelColor(i, 127,0,0); // Set new pixel 'on'\n // Refresh LED states\n }\n }\n strip.show(); \ndelay(wait+200);\n for(i=0; i<=strip.numPixels(); i++) {\n strip.setPixelColor(i, 0); // Set new pixel 'on'\n // Refresh LED states\n }\n strip.show(); \ndelay(wait);\n}\n\nvoid off(){\n int i;\n for(i=0; i<strip.numPixels(); i++) {\n strip.setPixelColor(i, 0); // Set new pixel 'on'\n }\n strip.show();\n}\n\nvoid singleled(short num, short r, short g, short b){\n strip.setPixelColor(num,r,g,b);\n \n}\n\nvoid setall(short r, short g, short b){\n int i;\n for(i=0; i<strip.numPixels(); i++) {\n strip.setPixelColor(i,r,g,b); // Set new pixel 'on'\n }\n strip.show();\n}\n\n"
},
{
"alpha_fraction": 0.6174863576889038,
"alphanum_fraction": 0.6502732038497925,
"avg_line_length": 17.931034088134766,
"blob_id": "4fb950c04e81dc2afe2cd061ec7efe23ccaf48fc",
"content_id": "6776bc5584fc864779d842516a698194c3a5e287",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 549,
"license_type": "permissive",
"max_line_length": 101,
"num_lines": 29,
"path": "/Longboard2.0/Longboard2.0/Bluetooth/Bluetooth.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include \"LPD8806.h\"\n#include \"SPI.h\"\n#include \"SoftwareSerial.h\"\n\nSoftwareSerial mySerial(0, 1); // RX, TX\nint nLEDs = 20;\n\n//int dataPinRight = 4;\n//int clockPinRight = 5;\n\nvoid setup() {\n // put your setup code here, to run once: // Open serial communications and wait for port to open:\n Serial.begin(9600);\n while(!Serial){;}\n mySerial.begin(9600);\n mySerial.println(\"JUST DO IT\");\n}\n\nvoid loop() {\n\nif(mySerial.available()) {\n Serial.write(mySerial.read());\n}\n/*if (Serial.available()){\n mySerial.write(Serial.read());\n}\n*/\n\n}\n"
},
{
"alpha_fraction": 0.5122218132019043,
"alphanum_fraction": 0.5315577983856201,
"avg_line_length": 18.04166603088379,
"blob_id": "df2aae257f82ffb72e7c535c25f806c232f68a6e",
"content_id": "8951ed2fe50b6865cf5d3f98f16e54957a6a3928",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2741,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 144,
"path": "/MIDI-Arduino-2-master/MIDI-Arduino-2.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "/*\n * Created by Keegan Dahm on 4/8/15.\n */\n\n#include \"LED.h\"\n#include \"PIR.h\"\n#include \"SerialStream.h\"\n#include \"AutoArray.h\"\n\n#include <stdlib.h>\n\nAutoArray<LED *> leds;\nAutoArray<PIR *> pirs;\nSerialStream stream(128);\n\n\nvoid add_LED() {\n unsigned int red = stream.read();\n unsigned int green = stream.read();\n unsigned int blue = stream.read();\n\n if (!leds.add(new LED(red, green, blue))) {\n Serial.write(0xFF);\n return;\n }\n\n leds[leds.length() - 1]->set();\n\n Serial.write(leds.length() - 1);\n}\n\nvoid add_PIR() {\n unsigned int pin = stream.read();\n\n if (!pirs.add(new PIR(pin))) {\n Serial.write(0xFF);\n return;\n }\n\n Serial.write(pirs.length() - 1);\n}\n\nvoid set_LED() {\n unsigned int led = stream.read();\n unsigned int values = stream.read();\n\n leds[led]->set((values >> 2) & 1, (values >> 1) & 1, values & 1);\n}\n\nvoid reset_LED() {\n unsigned int led = stream.read();\n\n leds[led]->reset();\n}\n\nvoid update_PIRs() {\n for (int i = 0; i < pirs.length(); i++)\n pirs[i]->update();\n}\n\nvoid check_PIR() {\n unsigned int pir = stream.read();\n\n bool value = pirs[pir]->check();\n\n if (value)\n Serial.write(1);\n else\n Serial.write(0);\n}\n\nvoid check_PIR_edge() {\n unsigned int pir = stream.read();\n unsigned int edge = stream.read();\n\n bool value = edge ? pirs[pir]->check_positive_edge() : pirs[pir]->check_negative_edge();\n\n if (value)\n Serial.write(1);\n else\n Serial.write(0);\n}\n\nvoid get_num_LEDs() {\n Serial.write(leds.length());\n}\n\nvoid get_num_PIRs() {\n Serial.write(pirs.length());\n}\n\ninline void reset() {\n asm(\"jmp 0\");\n}\n\nstruct {\n void (*handler)();\n unsigned int args;\n} commands [] = {\n { add_LED, 3 }, // 0\n { add_PIR, 1 }, // 1\n { set_LED, 2 }, // 2\n { reset_LED, 1 }, // 3\n { update_PIRs, 0 }, // 4\n { check_PIR, 1 }, // 5\n { check_PIR_edge, 2 }, // 6\n { get_num_LEDs, 0 },//7\n { get_num_PIRs, 0 },//8\n { 0, 0 }\n};\nunsigned int num_commands;\n\nvoid reply() {\n unsigned int bytes = stream.available();\n\n while (stream.peek() < num_commands && commands[stream.peek()].args < stream.available())\n commands[stream.read()].handler();\n\n if (stream.available()) {\n if(stream.peek() == 0xFF) {\n stream.flush();\n Serial.write(0xFF);\n }\n else if (stream.peek() == 0xFE)\n reset();\n }\n}\n\n\nvoid setup() {\n Serial.begin(19200);\n\n for(num_commands = 0; commands[num_commands].handler; num_commands++);\n}\n\nvoid loop() {\n if (Serial.available())\n stream.buffer();\n\n if (stream.available())\n reply();\n\n delay(1);\n}"
},
{
"alpha_fraction": 0.5665286779403687,
"alphanum_fraction": 0.6026020050048828,
"avg_line_length": 20.576923370361328,
"blob_id": "0e07e1a77cfe2fc369e38b0a24dee55c4e318e51",
"content_id": "5948b830433f68d6b421da91951d9424d6aa3528",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1693,
"license_type": "permissive",
"max_line_length": 145,
"num_lines": 78,
"path": "/ledtiestartup/ledtiestartup.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include \"LPD8806.h\"\n\n\nin nLEDs = 8;\nint dataPin = 2;\nint clockPin = 3;\n\nLPD8806 strip = LPD8806(nLEDs, dataPin, clockPin);\n\nvoid setup() {\n \n strip.begin();\n strip.show(); \n \n}\n\ndouble red = 0;\ndouble green = 0;\ndouble blue = 0;\nint count = 0;\n\n\nvoid loop() {\n \n //CallRedEqualizer\n for(i=0;i=10;i++;){\n if((red + i)== green){\n \tred = random(0,254);\n \tCallRedEqualizer();\n }\n //CallGreenEqualizer\n for(i=0;i=10;i++;){\n if((green + i)== blue){\n \tgreen = random(0,254);\n \tCallRedEqualizer();\n }\n \n //CallBlueEqualizer\n for(i=0;i=10;i++;){\n if((blue + i)== red){\n \tblue = random(0,254);\n \tCallBlueEqualizer();\n }\n }\n RestartSequence();\n }\n\n\n}\n\n}\n\n\n\n\nvoid RestartSequence() {/* once all of the lights are on, it restarts picking random lights to turn on\nHave to decide if you want to have it turn on the lights randomly and sometimes the same one 2x or just once all of them are on to restart it. */\n RandomLightarray(0,24) = 0; // creates an array of 25.\n Pick = random(0,numlights); //pick one number random from max number of lights\n count = 0;\n \n if(RandomLightAray(Pick) == 1){ Pick = random(0,numlights)}\n //Call RandomlightArray again. \n //if get same light, try again\n \n count=0;\n for(i=0;i=numlights;i++;){//count how many lights you have turned on\n \tif(randomlightarray(i) ==1){\n \t\tcount++;\n }\n if(count == (numlights – 1)){ //if your count is 1 less than your total lights on\n Setlights(r,g,b,i) = <0,0,0>; //turn them all off \t\n for(i=0;i=numlights;i++){randomlightarray(i) = 0;}//restart the process\n }\n }\n Setlights(r,g,b,Pick);// turn on an individual light\n \n }\n \n \n\n\n"
},
{
"alpha_fraction": 0.5814360976219177,
"alphanum_fraction": 0.6374781131744385,
"avg_line_length": 20.148147583007812,
"blob_id": "35a4c6536165ae9181a0e64fd461829c4111c00c",
"content_id": "8c78c72afb087f6f812cfcff0226aa6ea240e12a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 571,
"license_type": "permissive",
"max_line_length": 99,
"num_lines": 27,
"path": "/libraries/LiquidCrystal_I2C2004V2/examples/HelloWorld/HelloWorld.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "//SmrazaKeen\n//Compatible with the Arduino IDE 1.6.6-1.6.9\n//Library version:1.2\n#include <Wire.h> \n#include <LiquidCrystal_I2C.h>\n\nLiquidCrystal_I2C lcd(0x3F,20,4); // set the LCD address to 0x3F for a 20 chars and 4 line display\n\nvoid setup()\n{\n lcd.init(); // initialize the lcd \n lcd.backlight();\n lcd.setCursor(1,0);\n lcd.print(\"Welcome to Smraza!\"); \n lcd.setCursor(0,1);\n lcd.print(\"Arduino LCM IIC 2004\");\n lcd.setCursor(0,2);\n lcd.print(\"www.amazon.com/shops\");\n lcd.setCursor(0,3);\n lcd.print(\"/smraza\");\n}\n\n\n\nvoid loop()\n{\n}\n"
},
{
"alpha_fraction": 0.5259770154953003,
"alphanum_fraction": 0.568735659122467,
"avg_line_length": 22.47252655029297,
"blob_id": "b1093ed1b58f4f0b1e8d254330f5b12cd10db619",
"content_id": "18a1df25b5a2d2dca973c85ac34fb70f108bcd0d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2175,
"license_type": "permissive",
"max_line_length": 240,
"num_lines": 91,
"path": "/move_all_lights_1/random_LED_color_speed/random_LED_color_speed.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include \"LPD8806.h\"\n#include \"SPI.h\" // Comment out this line if using Trinket or Gemma\n#ifdef __AVR_ATtiny85__\n #include <avr/power.h>\n#endif\n\n\n\n\nint nLED = 124;\n\nint dataPin = 2;\nint clockPin = 3;\n\nLPD8806 strip = LPD8806(nLED, dataPin, clockPin);\n\nvoid setup(){\n strip.begin(); //start running the strip\n strip.show(); //show strip\n}\n\nvoid loop(){\n int i;\n int k;\n byte red = random(1,256); \n byte green = random(1,256); \n byte blue = random(1,256);\n byte rand = 1;\n\n \n for(i=0; i<strip.numPixels(); i++){ //for loop turns on the LED's one by one\n \n byte randomnum = random(1,4); //randomizes the colors of the LED's\n switch (randomnum){\n case 1:\n red = red + random(30,150);\n red = red % 255;\n break;\n \n case 2:\n green = green + random(30,150);\n green = green % 255; \n break;\n \n case 3:\n blue = blue + random(30,150);\n blue = blue % 255; \n break; \n } \n /*\n //Create an Array that stores a random number, checks to see if that number has been chosen, if so it will create another number, check itself and if it is good, it will give the value to make an led change color. \n for(i = 0; i<99; i++){\n int arraycheck[100];//numbers 0-99\n arraycheck[i] = 100;\n \n \n }\n for(int makenum = 0; makenum < 99; makenum++){\n \n arraycheck[makenum] = random(0,strip.numPixels());\n for(int check = 0; check < 99; check++){\n for(int doublecheck = 1; doublecheck < 99; doublecheck++){\n if(arraycheck[doublecheck] == arraycheck[check]){\n makenum--; \n \n }\n \n }\n \n }\n */\n \n }\n \n \n \n strip.setPixelColor(random(0,strip.numPixels()),red,green,blue); // Sets the color of the pixels; choose random(0,strip.numPixels()) OR i depending on how you want the leds to light up. random(0,strip.numPixels()) = random, i= 1 by one;\n strip.show();\n delay(10); //how long to wait to go onto the next led\n \n\n /*\n if(i==strip.numPixels()-1){\n for(k=0; k<strip.numPixels(); k++){ // (turns off all of the LED's \n strip.setPixelColor(k,0);\n } \n }\n\n }\n */\n}\n\n\n\n\n \n \n \n \n \n \n \n\n\n"
},
{
"alpha_fraction": 0.583428680896759,
"alphanum_fraction": 0.6227969527244568,
"avg_line_length": 24.109195709228516,
"blob_id": "2a6af8961c8943a024f8282c3a0f6667c962f5c6",
"content_id": "d3055fee5af9b0bf19315c653cb3d91c83473aab",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4369,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 174,
"path": "/Longboard2.0/Longboard2.0/Longboard2.0.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include \"LPD8806.h\"\n#include \"SPI.h\"\n#include \"SoftwareSerial.h\"\n\n// Simple test for 160 (5 meters) of LPD8806-based RGB LED strip\n// Not compatible with Trinket/Gemma due to limited RAM\n\n/*****************************************************************************/\n\n// Number of RGB LEDs in strand:\nint nLEDs = 20;\n\n// Chose 2 pins for output; can be any valid output pins:\nint dataPinLeft = 2;\nint clockPinLeft = 3;\n\nint dataPinRight = 4;\nint clockPinRight = 5;\n\n// First parameter is the number of LEDs in the strand. The LED strips\n// are 32 LEDs per meter but you can extend or cut the strip. Next two\n// parameters are SPI data and clock pins:\nLPD8806 stripLeft = LPD8806(nLEDs, dataPinLeft, clockPinLeft);\nLPD8806 stripRight = LPD8806(nLEDs, dataPinRight, clockPinRight);\n\n// You can optionally use hardware SPI for faster writes, just leave out\n// the data and clock pin parameters. But this does limit use to very\n// specific pins on the Arduino. For \"classic\" Arduinos (Uno, Duemilanove,\n// etc.), data = pin 11, clock = pin 13. For Arduino Mega, data = pin 51,\n// clock = pin 52. For 32u4 Breakout Board+ and Teensy, data = pin B2,\n// clock = pin B1. For Leonardo, this can ONLY be done on the ICSP pins.\n//LPD8806 strip = LPD8806(nLEDs);\n\nvoid setup() {\n // Start up the LED strip\n stripLeft.begin();\n stripRight.begin();\n\n // Update the strip, to start they are all 'off'\n stripLeft.show();\n stripRight.show();\n}\n\nvoid loop() {\n// colorChase(strip.Color(127, 0, 0), 100); // Red\n //colorChase(strip.Color( 0,127, 0), 100); // Green\n // colorChase(strip.Color( 0, 0,127), 100); // Blue\n // colorChase(strip.Color(127,127,127), 100); // White\n\n\n\nleftturn(100);\n//rightturn(100);\n//brake(100);\n//off();\n//singleled(num,r,g,b);\n\n}\n\nvoid setall(short r, short g, short b){\n int i;\n for(i=0; i<nLEDs; i++) {\n stripLeft.setPixelColor(i,r,g,b); // Set new pixel 'on'\n stripRight.setPixelColor(i,r,g,b); // Set new pixel 'on'\n }\n stripLeft.show();\n stripRight.show();\n}\n\n\nvoid singleled(short num, short r, short g, short b){\n stripLeft.setPixelColor(num,r,g,b);\n stripRight.setPixelColor(num,r,g,b);\n}\n\n\n\nvoid off(){\n int i;\n for(i=0; i<nLEDs; i++) {\n stripRight.setPixelColor(i, 0); // Set new pixel 'off'\n stripLeft.setPixelColor(i, 0); // Set new pixel 'off'\n }\n stripLeft.show();\n stripRight.show();\n}\n\nvoid brake(short wait){\n int i;\n for(i=0; i<=nLEDs; i++) {\n stripLeft.setPixelColor(i, 127,0,0); // Set new pixel 'on'\n stripRight.setPixelColor(i, 127,0,0); // Set new pixel 'on'\n stripLeft.show(); // Refresh LED states\n stripRight.show(); // Refresh LED states\n }\ndelay(wait+200);\n for(i=0; i<=nLEDs; i++) {\n stripLeft.setPixelColor(i, 0); // Set new pixel 'on'\n stripRight.setPixelColor(i, 0); // Set new pixel 'on'\n stripLeft.show(); // Refresh LED states\n stripRight.show(); // Refresh LED states\n }\ndelay(wait);\n}\n\nvoid leftturn(short wait){\n byte i;\n for(i=0; i<=nLEDs; i++) {\n stripLeft.setPixelColor(i, 127,0,0); // Set new pixel 'on'\n stripLeft.show(); // Refresh LED states\n }\ndelay(100+200);\n for(i=0; i<=nLEDs; i++) {\n stripLeft.setPixelColor(i, 0); // Set new pixel 'on'\n stripLeft.show(); // Refresh LED states\n }\ndelay(100);\n}\n\nvoid rightturn(short wait){\n byte i;\n for(i=0; i<=nLEDs; i++) {\n stripRight.setPixelColor(i, 127,0,0); // Set new pixel 'on'\n stripRight.show(); // Refresh LED states\n }\ndelay(100+200);\n for(i=0; i<=nLEDs; i++) {\n stripRight.setPixelColor(i, 0); // Set new pixel 'on'\n stripRight.show(); // Refresh LED states\n }\ndelay(100);\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n/*\n// Chase one dot down the full strip. Good for testing purposes.\nvoid colorChase(uint32_t c, uint8_t wait) {\n int i;\n \n // Start by turning all pixels off:\n for(i=0; i<strip.numPixels(); i++) strip.setPixelColor(i, 0);\n strip2.setPixelColor(i, 0);\n\n // Then display one pixel at a time:\n for(i=0; i<strip.numPixels(); i++) {\n strip.setPixelColor(i, c); // Set new pixel 'on'\n strip2.setPixelColor(i, c);\n strip.show(); \n strip2.show(); // Refresh LED states\n strip.setPixelColor(i, 0);\n strip2.setPixelColor(i, 0);// Erase pixel, but don't refresh!\n delay(wait);\n }\n\n strip.show();\n strip2.show();// Refresh to turn off last pixel\n}\n\n*/\n"
},
{
"alpha_fraction": 0.46444520354270935,
"alphanum_fraction": 0.5362418293952942,
"avg_line_length": 17.761289596557617,
"blob_id": "5c4ad802e39a925ba98ba35aeab54883d132bc62",
"content_id": "f624f93ad06446032411e88299d3949c024d3ed3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2911,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 155,
"path": "/led_tie/led_tie.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include \"LPD8806.h\"\n\n\nint nLEDs = 12;\nint dataPin = 2;\nint clockPin = 3;\n\nLPD8806 strip = LPD8806(nLEDs, dataPin, clockPin);\n\n\n\nvoid setup() {\n // put your setup code here, to run once:\n strip.begin();\n strip.show(); \n}\n\nint led_array[] = {0,0,0,0,0,0};\nint prev_led[] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0};\nint j, i;\nint count = 0;\nint random_num= random(0,12);\n\nint red = (0,255,0);\nint blue = (255,0,0);\nint green = (0,0,255);\nint three_colors[] = {red,blue,green};\n\nvoid loop() {\n // put your main code here, to run repeatedly:\n\n //rainbow(10);\n//rainbowCycle(10);\n\n\nrandom_leds();\n//up_one();\n\n}\n\n\n\nvoid up_one() {\nint counter_1 = 0;\nint i = 0;\nint random_384 = random(50,384);\n\nfor(i=12;i>0;i--){\n strip.setPixelColor(i, Wheel(random_384));\n strip.show(); \n delay(300);\n counter_1++;\n}\nif( counter_1 == 11){\nfor(i=0;i<12;i++){\n strip.setPixelColor(i, 0);\n strip.show();\n}\n counter_1 = 0;\n}\n \n}\n\n\n\nvoid random_leds(){\n \nrandom_num = random(0,20);\nint random_384 = random(50,384);\n\n\nif(1 == prev_led[random_num]){\n random_leds();\n\n}\n\nelse{\n prev_led[random_num] = 1;\n strip.setPixelColor(random_num, Wheel(random_384));\n strip.show(); \n delay(300);\n\nfor (i = 0; i < 12; i++){\n if (prev_led[i] == 1) {\n count++;\n }\n }\n if (count == 12){\n delay(275);\n for (i= 0; i < 12; i++){\n prev_led[i] = 0;\n strip.setPixelColor(i, 0);\nstrip.show(); \ndelay(75);\n }\n }\n else{\ncount = 0;\n }\n}\n}\n\n\n\nvoid rainbow(uint8_t wait) {\n int i, j;\n \n for (j=0; j < 384; j++) { // 3 cycles of all 384 colors in the wheel\n for (i=0; i < strip.numPixels(); i++) {\n strip.setPixelColor(i, Wheel( (i + j) % 384));\n } \n strip.show(); // write all the pixels out\n delay(wait);\n }\n}\n\nvoid rainbowCycle(uint8_t wait) {\n uint16_t i, j;\n \n for (j=0; j < 384 * 5; j++) { // 5 cycles of all 384 colors in the wheel\n for (i=0; i < strip.numPixels(); i++) {\n // tricky math! we use each pixel as a fraction of the full 384-color wheel\n // (thats the i / strip.numPixels() part)\n // Then add in j which makes the colors go around per pixel\n // the % 384 is to make the wheel cycle around\n strip.setPixelColor(i, Wheel( ((i * 384 / strip.numPixels()) + j) % 384) );\n } \n strip.show(); // write all the pixels out\n delay(wait);\n }\n}\n\nuint32_t Wheel(uint16_t WheelPos)\n{\n byte r, g, b;\n switch(WheelPos / 128)\n {\n case 0:\n r = 127 - WheelPos % 128; //Red down\n g = WheelPos % 128; // Green up\n b = 0; //blue off\n break; \n case 1:\n g = 127 - WheelPos % 128; //green down\n b = WheelPos % 128; //blue up\n r = 0; //red off\n break; \n case 2:\n b = 127 - WheelPos % 128; //blue down \n r = WheelPos % 128; //red up\n g = 0; //green off\n break; \n }\n return(strip.Color(r,g,b));\n}\n\n\n\n"
},
{
"alpha_fraction": 0.5772851705551147,
"alphanum_fraction": 0.5927725434303284,
"avg_line_length": 26.906780242919922,
"blob_id": "b98cb8eceb0a9ecd0a840ecfbfe6370c457c3836",
"content_id": "344ae25b5747b10e6268f5fcc46f5adad1695c34",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3293,
"license_type": "permissive",
"max_line_length": 128,
"num_lines": 118,
"path": "/Leaf_Node_GPS_Gieger/Leaf_Node_GPS_Gieger.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include <SPI.h>\n#include \"RF24.h\"\n\nconst byte NodeID = 1;\nfloat NodeData = 420;\n\nconst int Max_Nodes = 20;\nbyte Received_ID_Tags[Max_Nodes]; //write max number of Nodes. \n\nRF24 radio(7,8);\nconst uint64_t pipe = 0xE8E8F0F0E1LL; //channel to receive\nbyte addresses[][6] = {\"1Node\",\"2Node\"};\n\n\nbyte TransAMOUNT = 15;\n\n\n\n//Define Node Information\ntypedef struct {\n byte ID; //Node ID number\n byte path [Max_Nodes]; //Up to 256 Node names but a MAXIMUM PATH of 30\n byte Place_In_Path; //Where in the array are we\n byte cmd; //go to sleep, other odd commands\n bool return_flag;//Return to home node, go from ++ to --\n float sensor1;\n}MsgData;\n\n MsgData My_Data;\n MsgData Received_Data;\n\n\n\nvoid setup() {\n Serial.begin(9600);\n radio.begin();\n radio.setAutoAck(false);\n radio.openReadingPipe(1,pipe);\n radio.startListening();\n My_Data.ID = NodeID;\n My_Data.sensor1 = NodeData;\n}\n\nvoid loop() { \n receive(); //wait until we get something\n \n //If next number is -1 (end of path) AND we are the next in line\n if(0 == Received_Data.path[Received_Data.Place_In_Path + 1] && Received_Data.path[Received_Data.Place_In_Path] == My_Data.ID){\n Received_Data.return_flag = 1; //Return to the home node\n //put data into the send back variable\n Received_Data.sensor1 = My_Data.sensor1;\n \n Received_Data.Place_In_Path --; //go back a step, in the path\n Serial.println(\"*******END NODE***** Passed Back Data\");\n transmit(Received_Data);\n }\n \n //Not End Node so Pass Along Data\n else if(Received_Data.path[Received_Data.Place_In_Path] == My_Data.ID){\n //check flag\n if(Received_Data.return_flag == 1){ //if going back to home node\n Received_Data.Place_In_Path --; //de-increment\n Serial.println(\"Passed Backwards Data\");\n\n }\n else{\n Received_Data.Place_In_Path ++; //otherwise, increment (follow up along path)\n Serial.println(\"Passed Fowards Data\");\n }\n transmit(Received_Data);\n }\n else{\n //do nothing otherwise\n }\n \n\n}\n\n\nvoid receive(){ //Recieve Data from another node\n radio.openReadingPipe(1,addresses[0]);\n radio.startListening(); \n if(radio.available()){ \n while(radio.available()){ \n radio.read(&Received_Data, sizeof(MsgData)); //byte value\n Serial.println(\"\\nRecieved Data\");\n \n Serial.print(\"ID: \");\n Serial.println(Received_Data.ID);\n\n Serial.print(\"Place_In_Path: \");\n Serial.println(Received_Data.Place_In_Path);\n\n Serial.print(\"Path: \");\n for (int i=0;i<Max_Nodes;i++){\n Serial.print(Received_Data.path[i]);\n Serial.print(\", \");\n }\n Serial.println(\"\");\n\n Serial.print(\"Return_Flag: \");\n Serial.println(Received_Data.return_flag);\n \n delay(5);\n }\n }\n return;\n}\n\nvoid transmit(MsgData Transmit_Msg){ //Transmit Data to Another Node\n radio.openWritingPipe(addresses[0]);\n radio.stopListening();\n for(byte i=0; i<TransAMOUNT; i++){\n Serial.println(\"Transmitted Data\"); \n radio.write(&Transmit_Msg, sizeof(MsgData));\n delay(5);\n }\n}\n"
},
{
"alpha_fraction": 0.5799999833106995,
"alphanum_fraction": 0.6142857074737549,
"avg_line_length": 19.58823585510254,
"blob_id": "1a3bb5ce1c485fdef0d056db8aab3b098de3bbd4",
"content_id": "3204222010830490ce4c336a8cc2cd50bceda4dc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 350,
"license_type": "permissive",
"max_line_length": 63,
"num_lines": 17,
"path": "/TEST_1/TEST_1.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include <SoftwareSerial.h>\n #define RFID_START 0x00 // RFID Reader Start and Stop bytes\n\nvoid setup() {\n // put your setup code here, to run once:\n Serial.begin(9600);\n}\n\nvoid loop() {\n // put your main code here, to run repeatedly:\n\nSerial.println(\"TEST_1\");\n for(int i=0x00; i<0xFF; i++){\n Serial.println(i, HEX);\n }\n\n}\n"
},
{
"alpha_fraction": 0.4459307789802551,
"alphanum_fraction": 0.49857598543167114,
"avg_line_length": 19.57904052734375,
"blob_id": "8f24f1b04994bad52ec5e6e66cdad1bd59d3798a",
"content_id": "18a30607d32b9e7a5989c0b8463887334c4ec2a8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 11587,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 563,
"path": "/BandReciever2016Final/BandReciever2016Final.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include <SPI.h>\n#include <nRF24L01.h>\n#include <RF24.h>\n#include \"LPD8806.h\"\n#include \"SPI.h\" // Comment out this line if using Trinket or Gemma\n#ifdef __AVR_ATtiny85__\n #include <avr/power.h>\n#endif\n\n\nint drum = 2; //Change this variable depending on the drum start at 0\nint offset =20 * drum;\nint nLEDs = 180;\n\n\nRF24 radio(8, 9);\n\nconst byte rxAddr[6] = \"00001\";\n\nint dataPin = 7;\nint clockPin = 6;\nLPD8806 strip = LPD8806(nLEDs, dataPin, clockPin);\nvolatile int pattern = 0;\nint num = 0;\nvolatile int quit = 0;\nint flag = 0;\nint parity = 0;\n\n\nvoid setup(){\n \n attachInterrupt(0, interrupt, FALLING);\n strip.begin();\n strip.show();\n Serial.begin(9600);\n radio.begin();\n radio.openReadingPipe(0, rxAddr);\n radio.startListening();\n randomSeed(analogRead(0));\n}\n\nvoid interrupt(){\n char text[6] = {0};\n radio.read(&text, sizeof(text));\n String str(text);\n num = str.toInt();\n Serial.println(num);\n\n if(num != pattern){\n pattern = num;\n quit = 1;\n parity = 0;\n }\n \n}\n\nint led_array[] = {0,0,0,0,0,0};\nint prev_led[] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0};\nint j, i;\nint count = 0;\nint random_num= random(0,12);\n\nint red = (0,255,0);\nint blue = (255,0,0);\nint green = (0,0,255);\nint three_colors[] = {red,blue,green};\n\nvoid loop(){\n switch(pattern){\n case 0:\n colorWipe(strip.Color(127, 0, 0), 30);\n break;\n case 1:\n rainbowCycle(0); \n break;\n case 2:\n parity = 0;\n colorChase(strip.Color(127, 127, 0), 40);\n break;\n case 3:\n cycloneScroll();\n break;\n case 4:\n flashRandom();\n break;\n case 5:\n colorWipeRandom(30);\n break;\n case 6:\n theaterChase(strip.Color(127, 50, 50), 50);\n break;\n case 7:\n colorChaseThree(strip.Color(50, 70, 80), 35);\n break;\n case 8:\n colorWipe(strip.Color(100, 50, 0), 30);\n break;\n case 9:\n theaterChaseRainbow(50);\n break;\n case 10:\n colorWipe(strip.Color(random(128), random(128), random(128)), 30);\n break;\n case 11:\n random_leds();\n break;\n case 12:\n colorChaseThreeOneDrum(strip.Color(127, 0, 127), 45);\n break;\n default:\n stripClear;\n break;\n }\n}\n\nvoid random_leds(){\n quit = 0;\nrandom_num = random(0,20);\nint random_384 = random(50,384);\n\nif(quit == 0){\nif(1 == prev_led[random_num]){\n random_leds();\n\n}\n else{\n prev_led[random_num] = 1;\n strip.setPixelColor(random_num, Wheel(random_384));\n strip.show(); \n delay(100);\n \n for (i = 0; i < 20; i++){\n if (prev_led[i] == 1) {\n count++;\n }\n }\n if (count == 20){\n delay(100);\n for (i= 0; i < 20; i++){\n prev_led[i] = 0;\n strip.setPixelColor(i, 0);\n strip.show(); \n delay(25);\n }\n }\n else{\n count = 0;\n }\n }\n }\n else if(quit == 1){\n stripClear();\n return;\n }\n}\n/*\nvoid pixel_switch(){\n quit = 0;\n if(quit == 0){\n if(drum >= 0 && <= 3){\n for (i=0; i < (strip.numPixels() / 2) + 20; i++) {\n strip.setPixelColor(i - offset, 100, 150, 50);\n strip.show();\n delay(50);\n \n }\n else if(drum >= 5 && <= 8){\n for (i=strip.numPixels; i > (strip.numPixels()/2) + 20 ; i--) {\n strip.setPixelColor(i + offset, 200, 100, 200);\n strip.show();\n delay(50);\n }\n else if(drum == 4){\n for( i = offset\n strip.setPixelColor(i, 127, 80, 50);\n strip.show();\n \n }\n \n }\n }\n }\n else if(quit == 1){\n stripClear();\n return;\n }\n \n\n\n\n \n\n}\n*/\n\n\n\n/*void cycloneScroll(){\n quit = 3;\n parity++; \n parity = parity % 2;\n\n if(drum < 3){\n do{stripClear();}while(0);\n for (int i = 0; i < strip.numPixels(); i++) {\n if(quit == 0){\n if(parity == 0){strip.setPixelColor(i - offset, 127, 0, 0);}\n else {strip.setPixelColor(i - offset, 100, 50, 0);}\n strip.show();\n delay(10);\n }\n else{\n stripClear();\n return;\n }\n }\n }\n else if(drum > 5){\n do{stripClear();}while(0);\n for (int i=0; i < strip.numPixels(); i++) {\n if(quit == 0){\n if(parity == 0){strip.setPixelColor(i - offset + 120, 127, 0, 0);}\n else {strip.setPixelColor(i - offset + 120, 100, 50, 0);}\n strip.show();\n delay(10);\n }\n else{\n stripClear();\n return;\n }\n }\n }\n else if (drum > 2 && drum < 6){\n do{stripClear();}while(0);\n for (int i=0; i < strip.numPixels(); i++){\n if(quit == 0){\n if(parity == 0){strip.setPixelColor(i - offset + 60, 100, 50, 0);}\n else {strip.setPixelColor(i - offset + 60, 127, 0, 0);}\n strip.show();\n delay(10);\n }\n else{\n stripClear();\n return;\n }\n }\n } \n}*/\n\n\nvoid cycloneScroll(){\n quit = 0;\n parity++; \n parity = parity % 2;\n\n if(drum < 3){\n do{stripClear();}while(0);\n for (int i = 0; i < strip.numPixels(); i++) {\n if(quit == 0){\n if(parity == 0){strip.setPixelColor(i - offset, 127, 0, 0);}\n else {strip.setPixelColor(i - offset, 100, 50, 0);}\n strip.show();\n delay(10);\n }\n else{\n stripClear();\n return;\n }\n }\n }\n else if (drum == 3 || drum == 4 || drum == 5){\n do{stripClear();}while(0);\n for (int i=0; i < strip.numPixels(); i++) {\n if(quit == 0){\n if(parity == 0){strip.setPixelColor(i - offset + 60, 100, 50, 0);}\n else {strip.setPixelColor(i - offset + 60, 127, 0, 0);}\n strip.show();\n delay(10);\n }\n else{\n stripClear();\n return;\n }\n }\n }\n else if(drum > 5){\n for (int i=0; i < strip.numPixels(); i++) {\n if(quit == 0){\n if(parity == 0){strip.setPixelColor(i - offset + 120, 127, 0, 0);}\n else {strip.setPixelColor(i - offset + 120, 100, 50, 0);}\n strip.show();\n delay(10);\n }\n else{\n stripClear();\n return;\n }\n }\n } \n}\n\n\n\n// Fill the dots progressively along the strip.\nvoid colorWipe(uint32_t c, uint8_t wait) {\n int i;\n quit = 0;\n\n do{stripClear();} while(0);\n for (i=0; i < strip.numPixels() + 20; i++) {\n if(quit == 0){\n strip.setPixelColor(i-offset, c);\n strip.show();\n delay(wait);\n }\n else if(quit == 1){\n stripClear();\n return;\n }\n }\n}\n\n\nvoid colorWipeRandom(uint8_t wait) {\n int i;\n quit = 0;\n\n do{stripClear();} while(0);\n for (i=0; i < strip.numPixels() + 20; i++) {\n if(quit == 0){\n strip.setPixelColor(i-offset, strip.Color(random(128), random(128), random(128)));\n strip.show();\n delay(wait);\n }\n else if(quit == 1){\n stripClear();\n return;\n }\n }\n}\n\n\nvoid colorChase(uint32_t c, uint8_t wait) {\n int i;\n quit = 0;\n \n for(i=0; i<strip.numPixels(); i++) {\n if(quit == 0){\n strip.setPixelColor(i-offset, c); // Set new pixel 'on'\n strip.show(); // Refresh LED states\n strip.setPixelColor(i-offset, 0); // Erase pixel, but don't refresh!\n delay(wait);\n }\n else{\n stripClear();\n return;\n } \n }\n\n strip.show(); // Refresh to turn off last pixel\n}\n\n\nvoid colorChaseThree(uint32_t c, uint8_t wait) {\n int i;\n quit = 0;\n \n for(i=0; i < strip.numPixels(); i++) {\n if(quit == 0){\n strip.setPixelColor(i - offset, c); // Set new pixel 'on'\n strip.setPixelColor(i - offset - 1, c);\n strip.setPixelColor(i - offset - 2, c);\n strip.setPixelColor(i - offset - 3, 0); // Erase pixel, but don't refresh!\n strip.show(); // Refresh LED states\n delay(wait);\n }\n else{\n stripClear();\n return;\n } \n }\n\n strip.show(); // Refresh to turn off last pixel\n}\n\n\nvoid colorChaseThreeOneDrum(uint32_t c, uint8_t wait) {\n int i;\n quit = 0;\n \n for(i=0; i < 23; i++) {\n if(quit == 0){\n strip.setPixelColor(i, c); // Set new pixel 'on'\n strip.setPixelColor(i - 1, c);\n strip.setPixelColor(i - 2, c);\n strip.setPixelColor(i - 3, 0); // Erase pixel, but don't refresh!\n strip.show(); // Refresh LED states\n delay(wait);\n }\n else{\n stripClear();\n return;\n } \n }\n\n strip.show(); // Refresh to turn off last pixel\n}\n\n\n//Flash random colors on the strip\nvoid flashRandom(){\n int i;\n int r = 0;\n int g = 0;\n int b = 0;\n quit = 0;\n \n stripClear();\n\n// delay(random(80,120));\n \n switch(random(0,4)){\n case 0: // Red\n r = 127;\n g = 0;\n b = 0;\n break;\n case 1: // Green\n r = 0;\n g = 127;\n b = 0;\n break;\n case 2: // Blue\n r = 0;\n g = 0;\n b = 127;\n break;\n case 3: //White\n r = 127;\n g = 127;\n b = 127;\n break;\n }\n \n // set pixel colors\n for (i=0; i < strip.numPixels(); i++) {\n strip.setPixelColor(i, r, g, b); \n }\n \n strip.show(); \n delay(random(100, 170));\n\n if(quit){\n stripClear();\n return;\n }\n}\n\n\nvoid theaterChase(uint32_t c, uint8_t wait) {\n quit = 0;\n for (int j=0; j<10; j++) { //do 10 cycles of chasing\n for (int q=0; q < 3; q++) {\n if(quit == 0){\n for (int i=0; i < strip.numPixels(); i=i+3) {\n strip.setPixelColor(i+q, c); //turn every third pixel on\n }\n strip.show();\n \n delay(wait);\n \n for (int i=0; i < strip.numPixels(); i += 3) {\n strip.setPixelColor(i+q, 0); //turn every third pixel off\n }\n }\n else{\n stripClear();\n return;\n }\n }\n }\n}\n\n\nvoid theaterChaseRainbow(uint8_t wait) {\n quit = 0;\n for (int j=0; j < 384; j+=5) { // cycle all 384 colors in the wheel\n for (int q=0; q < 3; q++) {\n if(quit == 0){\n for (int i=0; i < strip.numPixels(); i=i+3) {\n strip.setPixelColor(i+q, Wheel( (i+j) % 384)); //turn every third pixel on\n }\n strip.show();\n \n delay(wait);\n \n for (int i=0; i < strip.numPixels(); i=i+3) {\n strip.setPixelColor(i+q, 0); //turn every third pixel off\n }\n }\n else{\n stripClear();\n return;\n }\n }\n }\n}\n\n\nuint32_t Wheel(uint16_t WheelPos)\n{\n byte r, g, b;\n switch(WheelPos / 128)\n {\n case 0:\n r = 127 - WheelPos % 128; //Red down\n g = WheelPos % 128; // Green up\n b = 0; //blue off\n break; \n case 1:\n g = 127 - WheelPos % 128; //green down\n b = WheelPos % 128; //blue up\n r = 0; //red off\n break; \n case 2:\n b = 127 - WheelPos % 128; //blue down \n r = WheelPos % 128; //red up\n g = 0; //green off\n break; \n }\n return(strip.Color(r,g,b));\n}\n\nvoid rainbowCycle(uint8_t wait) {\n uint16_t i, j;\n quit = 0;\n for (j=0; j < 384 * 5; j++) { // 5 cycles of all 384 colors in the wheel\n if(quit == 0){\n for (i=0; i < strip.numPixels(); i++) {\n // tricky math! we use each pixel as a fraction of the full 384-color wheel\n // (thats the i / strip.numPixels() part)\n // Then add in j which makes the colors go around per pixel\n // the % 384 is to make the wheel cycle around\n strip.setPixelColor(i, Wheel( ((i * 384 / strip.numPixels()) + (j + offset)) % 384) );\n } \n \n strip.show(); // write all the pixels out\n delay(wait);\n }\n else{\n stripClear();\n return;\n }\n }\n}\n\n\nvoid stripClear(){\n for(int i = 0; i < strip.numPixels(); i++){\n strip.setPixelColor(i, strip.Color(0,0,0));\n }\n strip.show();\n}\n\n"
},
{
"alpha_fraction": 0.46672943234443665,
"alphanum_fraction": 0.5188323855400085,
"avg_line_length": 20.102649688720703,
"blob_id": "77f96268af15d130315ae7c2410a6ede89aeac00",
"content_id": "11921d07d37a67ae1d4b992c6ca20e76e27e47a9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6372,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 302,
"path": "/MIDI-Arduino-2-master/Interface.py",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "from serial import Serial\nfrom time import time, sleep\nfrom rtmidi import MidiIn, MidiOut\nfrom _thread import start_new_thread\n\n\nmode = 0\npir_mode_color = [False, False, True]\n\n\nopen_ports = []\nLEDs = []\nPIRs = []\n\nm_out = MidiOut().open_virtual_port(\"Python PIR Output\")\nmelody_in = MidiIn().open_virtual_port(\"Python Melody Input\")\ndrum_in = MidiIn().open_virtual_port(\"Python Drum Input\")\n\nlast_drum = 0\nDRUM_TIMEOUT = 0.1\nPIR_TIMEOUT = 0.7\n\n\nstair_modes = [\n [48, 50, 52, 53, 55, 57, 59, 60],\n [\n [48, 52, 55],\n [50, 53, 57],\n [52, 55, 59],\n [53, 57, 60],\n [55, 59, 62],\n [57, 60, 64],\n [59, 62, 65],\n [60, 64, 67],\n [62, 65, 69],\n [64, 67, 71],\n [65, 69, 72],\n [67, 71, 74],\n [69, 72, 76],\n [71, 74, 77],\n [72, 76, 79]\n ],\n [48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78]\n]\n\n\nmelody_data = [-1] * 255\n\n\nlight_data = []\nfor N in range(0, 21):\n light_data += [[False, False, False, False]]\n\n\ndef get_port(collection, n):\n return collection[n][0]\n\n\ndef get_id(collection, n):\n return collection[n][1]\n\n\ndef open_port(port):\n global open_ports\n\n open_ports += [Serial(port, 19200)]\n return open_ports[-1]\n\n\ndef call(port, data, rets=0):\n global open_ports\n\n if isinstance(port, (int, float)):\n port = open_ports[int(port)]\n\n port.flushInput()\n port.write(data)\n\n timeout = time() + 0.016 + 0.0005 * len(data)\n while time() < timeout and port.inWaiting() < rets:\n pass\n\n if port.inWaiting() < rets:\n return call(port, data, rets)\n\n ret = []\n while port.inWaiting():\n ret += port.read()\n\n return ret\n\n\ndef add_led(port, red, green, blue):\n global LEDs\n\n led = call(open_ports[port], [0, red, green, blue], 1)[0]\n\n LEDs += [(open_ports[port], led)]\n\n\ndef count_leds(port=-1):\n if port == -1:\n return len(LEDs)\n return call(port, [7], 1)\n\n\ndef count_pirs(port=-1):\n if port == -1:\n return len(PIRs)\n return call(port, [8], 1)\n\n\ndef set_led(led, red=1, green=1, blue=1):\n call(get_port(LEDs, led), [2, get_id(LEDs, led), (red << 2) | (green << 1) | blue])\n\n\ndef reset_led(led):\n call(get_port(LEDs, led), [3, get_id(LEDs, led)])\n\n\ndef add_pir(port, pin):\n global PIRs\n\n pir = call(open_ports[port], [1, pin], 1)[0]\n\n PIRs += [(open_ports[port], pir)]\n\n\ndef update_pirs():\n for n in open_ports:\n call(n, [4])\n\n\ndef check_pir(pir):\n global PIRs\n\n return call(get_port(PIRs, pir), [5, get_id(PIRs, pir)], 1)[0]\n\n\ndef check_pir_edge(pir, edge=-1):\n global PIRs\n\n if isinstance(edge, int) and edge == -1:\n pir_id = get_id(PIRs, pir)\n return call(get_port(PIRs, pir), [6, pir_id, 0, 6, pir_id, 1], 2)\n else:\n return call(get_port(PIRs, pir), [6, get_id(PIRs, pir), edge], 1)[0]\n\n\ndef midi_write_on(notes, velocity=127):\n midi_write_off(notes)\n if not hasattr(notes, \"__iter__\"):\n notes = [notes]\n\n for n in notes:\n m_out.send_message([144, n, velocity])\n\n\ndef midi_write_off(notes):\n if not hasattr(notes, \"__iter__\"):\n notes = [notes]\n\n for n in notes:\n m_out.send_message([128, n, 0])\n\n\ndef melody_callback(data, useless):\n global melody_data\n data = data[0]\n\n if data[0] == 144:\n melody_data[data[1]] = time()\n elif data[0] == 128:\n melody_data[data[1]] = -1\n\n\ndef drum_callback(data, useless):\n global last_drum\n data = data[0]\n\n if data[0] == 144:\n last_drum = time()\n\n\ndef update_led(n, colors):\n data = light_data[n]\n if data[0] != colors[0] or data[1] != colors[1] or data[2] != colors[2]:\n data[0] = colors[0]\n data[1] = colors[1]\n data[2] = colors[2]\n data[3] = True\n\n\ndef get_color(n):\n if n == -1:\n return [False, False, False]\n if n < 57:\n return [False, False, True]\n elif n < 63:\n return [False, True, True]\n elif n < 69:\n return [False, True, True]\n elif n < 75:\n return [True, True, False]\n else:\n return [True, False, False]\n\n\ndef midi_input_runner():\n global mode\n\n temp_melody_data = []\n for n in range(0, 12):\n temp_melody_data += [-1]\n\n while True:\n if mode is not \"midi\":\n sleep(0.1)\n continue\n\n for i in range(0, 12):\n temp_melody_data[i] = -1\n for n in range(i, len(melody_data), 12):\n if melody_data[n] > melody_data[temp_melody_data[i]]:\n temp_melody_data[i] = n\n\n for i in range(0, 12):\n to_set = get_color(temp_melody_data[i])\n\n if time() < last_drum + DRUM_TIMEOUT:\n if i > 0 and temp_melody_data[i - 1] != -1:\n to_set = [True, False, False]\n elif i < 11 and temp_melody_data[i + 1] != -1:\n to_set = [True, False, False]\n\n update_led(i + 4, to_set)\n\n sleep(0.01)\n\n\ndef pir_input_runner():\n global mode\n while True:\n if mode is not \"pir\":\n sleep(0.1)\n continue\n\n sleep(0.01)\n\n\ndef light_runner():\n global light_data, mode\n while True:\n if mode is not \"midi\":\n sleep(0.1)\n continue\n\n for n in range(0, len(light_data)):\n data = light_data[n]\n if data[3]:\n red = data[0]\n green = data[1]\n blue = data[2]\n\n string = str()\n if n < 10:\n string += \"0\"\n string += str(n) + \": \" + str(int(red)) + str(int(green)) + str(int(blue))\n print(string)\n\n set_led(n, red, green, blue)\n data[3] = False\n\n sleep(0.01)\n\n\ndef set_mode(m=0):\n global mode\n mode = m\n\n\ndef test():\n open_port(\"/dev/tty.usbmodem1d1141\")\n open_port(\"/dev/tty.usbserial-LI5AE6AA\")\n\n while count_leds() < 4:\n add_led(0, 0, 0, 0)\n add_led(1, 7, 7, 7)\n add_led(0, 7, 7, 7)\n add_led(0, 8, 8, 8)\n while count_leds() < 21:\n add_led(0, 0, 0, 0)\n\n for n in range(0, 21):\n reset_led(n)\n\n\nmelody_in.set_callback(melody_callback)\ndrum_in.set_callback(drum_callback)\nstart_new_thread(midi_input_runner, ())\nstart_new_thread(pir_input_runner, ())\nstart_new_thread(light_runner, ())"
},
{
"alpha_fraction": 0.5295610427856445,
"alphanum_fraction": 0.5516366362571716,
"avg_line_length": 20.41847801208496,
"blob_id": "accbc055fb6101942fcfada579cb59c735614806",
"content_id": "850ab010df486f624446979c685061e9d99569ed",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3941,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 184,
"path": "/MIDI/MIDI.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "#include \"Tlc5940.h\"\n#include <MIDI.h>\n\n\nMIDI_CREATE_DEFAULT_INSTANCE();\n\nint inte=0, clean=0, contador=1;\nbyte velocidad=0;\nbyte max_velocidad=127;\n\nint vel=0;\n\n\nint ledon[12];\nint led=1;\nint nota=0;\nint nota_tocada=0;\nint nota_soltada=0;\nint note=0;\nbool esta=false;\n\nvolatile int cambio_modo=1;\n\n\n// -----------------------------------------------------------------------------\n\n// This function will be automatically called when a NoteOn is received.\n// It must be a void-returning function with the correct parameters,\n// see documentation here:\n// http://arduinomidilib.fortyseveneffects.com/a00022.html\n\nvoid handleNoteOn(byte channel, byte pitch, byte velocity)\n{\n nota =(int) pitch;\n velocidad=velocity;\n inte=1;\n if(cambio_modo==1){\n nota_tocada =(int) pitch;\n ledon[led]=nota_tocada; \n }\n}\n\n\nvoid handleNoteOff(byte channel, byte pitch, byte velocity)\n{\n clean=1;\n nota =(int) pitch;\n if(cambio_modo==1){\n nota_soltada=nota;\n for(int i=1; i< 12 || !esta;i++){\n if(ledon[i]==nota_soltada){\n ledon[i]=-1;\n Tlc.set(i,0);\n Tlc.update();\n esta=true; \n }else esta=false;\n }\n }\n}\n\n// -----------------------------------------------------------------------------\n\nvoid setup()\n{\n Serial.begin(31250);\n pinMode(2, INPUT);\n \n attachInterrupt( 0, change_mode, CHANGE);\n // Connect the handleNoteOn function to the library,\n // so it is called upon reception of a NoteOn.\n MIDI.setHandleNoteOn(handleNoteOn); // Put only the name of the function\n\n // Do the same for NoteOffs\n MIDI.setHandleNoteOff(handleNoteOff);\n\n // Initiate MIDI communications, listen to all channels\n MIDI.begin(MIDI_CHANNEL_OMNI);\n /* Call Tlc.init() to setup the tlc.\n You can optionally pass an initial PWM value (0 - 4095) for all channels.*/\n Tlc.init();\n \n for (int i=0; i<12;i++){\n ledon[i]=-1;\n }\n}\n\n/* This loop will create a Knight Rider-like effect if you have LEDs plugged\n into all the TLC outputs. NUM_TLCS is defined in \"tlc_config.h\" in the\n library folder. After editing tlc_config.h for your setup, delete the\n Tlc5940.o file to save the changes. */\n\nvoid loop()\n{\n // Call MIDI.read the fastest you can for real-time performance.\n MIDI.read();\n\n // There is no need to check if there are messages incoming\n // if they are bound to a Callback function.\n // The attached method will be called automatically\n // when the corresponding message has been received.\n switch(cambio_modo){\n case 1://ejecutamos modo 1\n if (inte){\n enciende_led(led);\n if (led==12){\n led=1;\n }else\n led ++;\n \n inte=0;\n }\n if (clean){\n clean=0;\n Tlc.update(); \n }\n break;\n \n case 2: //ejecutamos modo 2\n \n if (inte){\n //Tlc.clear();\n calcula_contador(nota);\n enciende_led(contador);\n inte=0;\n }\n if (clean){\n calcula_contador(nota);\n Tlc.set(contador,0);\n Tlc.update();\n clean=0;\n }\n \n break;\n \n default: //ejecutamos modo 2\n //cambio_modo=1;\n \n if (inte){\n \n calcula_contador(nota);\n enciende_led(contador);\n inte=0;\n }\n if (clean){\n calcula_contador(nota);\n Tlc.set(contador,0);\n Tlc.update();\n clean=0;\n }\n \n break;\n }\n}\n\n\n\nvoid enciende_led(int contador){\n Tlc.set(contador,4095);\n Tlc.update();\n}\n\nint calcula_nota (int nota_a){\n while (nota_a > 11){\n nota_a=nota_a-12;\n }\n return nota_a;\n}\n\nvoid calcula_contador (int nota){\n while (nota > 11){\n nota=nota-12;\n }\n contador=nota+1;\n }\n\n \nvoid change_mode() \n { \n if (cambio_modo==1)\n cambio_modo=2;\n \n else cambio_modo=1;\n Tlc.clear();\n }\n"
},
{
"alpha_fraction": 0.6065815091133118,
"alphanum_fraction": 0.6296660304069519,
"avg_line_length": 28.492753982543945,
"blob_id": "12a107603829ff74c88e933b5ba90c05afcb03f7",
"content_id": "3f469b2cd2e35b567d2e57eab47de941d0948dd4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2036,
"license_type": "permissive",
"max_line_length": 109,
"num_lines": 69,
"path": "/Chicken_Head/Chicken_Head.ino",
"repo_name": "ddrmaster1000/Arduino",
"src_encoding": "UTF-8",
"text": "/* To be used to make a Puppet Chicken Head in Spooky Attics turn around when flashlights are pointed at them\n * \n * \n */\n\n#include <Servo.h>\n\nServo myservo; // create servo object to control a servo\n\nint sensorPin = A0; // select the input pin for the potentiometer\nint ledPin = 13; // select the pin for the LED\nint sensorValue = 0; // variable to store the value coming from the sensor\n\nint current_pos = 0;\nint pos = 0;\nint OnValue = 22;\nint OffValue = OnValue - 1;\n\nvoid setup() {\n // declare the ledPin as an OUTPUT:\n myservo.attach(9); // attaches the servo on pin 9 to the servo object\n Serial.begin(9600);\n}\n\nvoid loop() {\n // read the value from the sensor:\n if(sensorValue >= OnValue){\n LightOn_RotateFoward();\n }\n else{\n LightOff_RotateBackward();\n }\n}\n\nvoid LightOff_RotateBackward(){\n for (pos = current_pos; pos >= 0; pos -= 1) { // goes from 0 degrees to 180 degrees\n if(sensorValue <= OffValue){\n // in steps of 1 degree\n myservo.write(pos); // tell servo to go to position in variable 'pos'\n current_pos = pos;\n delay(10); // waits 15ms for the servo to reach the position\n sensorValue = analogRead(sensorPin);\n Serial.println(sensorValue);\n }\n }\n while(current_pos == 0 && sensorValue <= OffValue){\n delay(10);\n sensorValue = analogRead(sensorPin);\n Serial.println(sensorValue);\n }\n}\n\nvoid LightOn_RotateFoward(){\n for (pos = current_pos; pos <= 180; pos += 1) { // goes from 0 degrees to 180 degrees\n if(sensorValue >= OnValue){\n // in steps of 1 degree\n myservo.write(pos); // tell servo to go to position in variable 'pos'\n current_pos = pos;\n delay(25); // waits 15ms for the servo to reach the position\n sensorValue = analogRead(sensorPin);\n Serial.println(sensorValue);\n }\n }\n while(current_pos == 180 && sensorValue >= OnValue){\n delay(10);\n sensorValue = analogRead(sensorPin);\n Serial.println(sensorValue);\n }\n}\n\n"
}
] | 39 |
stern1978/PythonCodeWork
|
https://github.com/stern1978/PythonCodeWork
|
7edc1785ea9817f607d939ca1b45b68bd6ba039d
|
0b8ebe7f8c4cb575ccb95ec829002aa6fd309c2c
|
74572ffec6d573a510690228de280a26ba05fde7
|
refs/heads/master
| 2021-05-14T22:37:17.336840 | 2017-10-10T20:21:27 | 2017-10-10T20:21:27 | 106,466,165 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5546218752861023,
"alphanum_fraction": 0.5654261708259583,
"avg_line_length": 25.766666412353516,
"blob_id": "318e3bfd6d7d56ccb563cbe3b6e2f05ddfc7b057",
"content_id": "4160f29e327a3c34a9e38b62843d8bff4e28dcc2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 833,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 30,
"path": "/autoOriginalsFolder.pyw",
"repo_name": "stern1978/PythonCodeWork",
"src_encoding": "UTF-8",
"text": "#! python3\r\n# -*- coding: utf-8 -*-\r\n\"\"\"Creates new folder with todays date in AEC Work folder.\"\"\"\r\n\r\nimport os\r\nimport ctypes\r\nimport datetime\r\nimport time\r\n\r\nNOW = datetime.datetime.now()\r\nNEW_FOLDER = NOW.strftime('%m%d%Y')\r\n\r\nUPDATE = os.path.join('C:\\\\', 'AEC Work', NEW_FOLDER, 'New folder')\r\nOPATH = os.path.join('C:\\\\', 'AEC Work', NEW_FOLDER, 'New folder', 'ORIGINALS')\r\n\r\ndef originals():\r\n \"\"\"Originals folder is created when 'New folder' is detected in my\r\n work folder.\r\n \"\"\"\r\n while True:\r\n while os.path.exists(UPDATE):\r\n try:\r\n os.makedirs(OPATH)\r\n time.sleep(120)\r\n except WindowsError:\r\n ctypes.windll.user32.MessageBoxW(0, WindowsError, \"Error!\", 0)\r\n continue\r\n\r\nif __name__ == \"__main__\":\r\n originals()\r\n"
},
{
"alpha_fraction": 0.49957719445228577,
"alphanum_fraction": 0.5173346996307373,
"avg_line_length": 34.05487823486328,
"blob_id": "5e6ab9fd00e744ba29b00eeda85b96fbe888fe7b",
"content_id": "7cd613266cd25389154513768472b539535c65ef",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5913,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 164,
"path": "/odd_even_pdf2.pyw",
"repo_name": "stern1978/PythonCodeWork",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\r\n\"\"\"Scans odd folder and creates even numbered pdf's in even folder.\"\"\"\r\n\r\n\r\nimport os\r\nimport shutil\r\nimport ctypes\r\nimport glob\r\nimport PyPDF2\r\nimport wx\r\n\r\nclass WINDOWCLASS(wx.Frame):\r\n \"\"\"To do Doc String\"\"\"\r\n\r\n def __init__(self, parent, *args, **kwargs):\r\n \"\"\"To do Doc String\"\"\"\r\n wx.Frame.__init__(self, parent)\r\n\r\n self.panel = wx.Panel(self)\r\n\r\n self.vbox = wx.BoxSizer(wx.VERTICAL)\r\n\r\n '''self.oddbox = wx.BoxSizer(wx.HORIZONTAL)\r\n self.oddtext = wx.StaticText(panel, label='Odd File Location')\r\n self.oddbox.Add(oddtext, flag=wx.RIGHT, border=8)\r\n self.oddtextbox = wx.TextCtrl(panel)\r\n self.oddbox.Add(oddtextbox, proportion=1)\r\n self.vbox.Add(oddbox, flag=wx.EXPAND|wx.LEFT|wx.RIGHT|wx.TOP, border=10)\r\n self.oddtextbox.SetValue('C:\\\\Users\\\\as$646\\\\Desktop\\\\ODD')\r\n self.oddtextbox.SetValue(oddtextbox.GetValue())\r\n self.vbox.Add((-1, 10))\r\n\r\n self.hbox2 = wx.BoxSizer(wx.HORIZONTAL)\r\n self.st2 = wx.StaticText(panel, label='Even File Location')\r\n self.hbox2.Add(st2, flag=wx.RIGHT, border=8)\r\n self.tc2 = wx.TextCtrl(panel)\r\n self.hbox2.Add(tc2, proportion=1)\r\n self.vbox.Add(hbox2, flag=wx.EXPAND|wx.LEFT|wx.RIGHT|wx.TOP, border=10)\r\n self.tc2.SetValue('C:\\\\Users\\\\as$646\\\\Desktop\\\\EVEN')\r\n\r\n\r\n self.vbox.Add((-1, 10))'''\r\n\r\n self.hbox3 = wx.BoxSizer(wx.HORIZONTAL)\r\n self.st3 = wx.StaticText(self.panel, label='Processed')\r\n self.hbox3.Add(self.st3)\r\n self.vbox.Add(self.hbox3, flag=wx.CENTER | wx.TOP, border=10)\r\n\r\n self.vbox.Add((-1, 10))\r\n\r\n self.hbox4 = wx.BoxSizer(wx.HORIZONTAL)\r\n self.tc4 = wx.TextCtrl(self.panel, style=wx.TE_MULTILINE)\r\n self.hbox4.Add(self.tc4, proportion=1, flag=wx.EXPAND)\r\n self.vbox.Add(self.hbox4, proportion=1, flag=wx.LEFT|wx.RIGHT|wx.EXPAND, \r\n border=10)\r\n self.vbox.Add((-1, 10))\r\n\r\n self.hbox5 = wx.BoxSizer(wx.HORIZONTAL)\r\n self.tc5 = wx.TextCtrl(self.panel, style=wx.TE_MULTILINE)\r\n self.hbox4.Add(self.tc5, proportion=1, flag=wx.EXPAND)\r\n self.vbox.Add(self.hbox5, proportion=1, flag=wx.LEFT|wx.RIGHT|wx.EXPAND, \r\n border=10)\r\n self.vbox.Add((-1, 10))\r\n \r\n self.buttons = wx.BoxSizer(wx.HORIZONTAL)\r\n \r\n self.okbutton = wx.Button(self.panel, label='Process', size=(70, 30))\r\n self.okbutton.Bind(wx.EVT_BUTTON, self.Convert)\r\n self.buttons.Add(self.okbutton)\r\n self.closebutton = wx.Button(self.panel, label='Close', size=(70, 30))\r\n self.closebutton.Bind(wx.EVT_BUTTON, self.OnClose)\r\n self.buttons.Add(self.closebutton, flag=wx.LEFT|wx.BOTTOM, border=5)\r\n \r\n self.vbox.Add(self.buttons, flag=wx.ALIGN_RIGHT|wx.RIGHT, border=10)\r\n\r\n self.panel.SetSizer(self.vbox)\r\n\r\n self.SetSize((250, 500))\r\n self.SetTitle('Odd to Even pdf Converter')\r\n self.Centre()\r\n self.Show(True)\r\n\r\n def OnClose(self, e):\r\n self.Close(True)\r\n\r\n def Convert(self, e):\r\n #CHANGE TO WORK WITH PANNEL\r\n odd_box = wx.TextEntryDialog(None,\r\n 'Location of odd files?',\r\n 'odd',\r\n 'C:\\\\Users\\\\as$646\\\\Desktop\\\\odd')\r\n odd_box_input = odd_box.GetValue()\r\n odd_box.Destroy()\r\n\r\n even_box = wx.TextEntryDialog(None,\r\n 'Location to convere even file to?',\r\n 'even',\r\n 'C:\\\\Users\\\\as$646\\\\Desktop\\\\even')\r\n even_box_input = even_box.GetValue()\r\n even_box.Destroy()\r\n\r\n files = os.listdir\r\n odd = odd_box_input\r\n even = even_box_input\r\n blank = os.path.join('C:\\\\', 'apps', 'blank.pdf')\r\n file_list = []\r\n\r\n try:\r\n for file in files(odd):\r\n pdf = (odd + '\\\\' + file)\r\n pdf_scan = open(pdf, 'rb')\r\n pdf_reader = PyPDF2.PdfFileReader(pdf_scan)#1\r\n pages = pdf_reader.numPages\r\n file_list.append(file)\r\n\r\n if pages %2 == 0:\r\n shutil.copy(pdf, even)\r\n pdfeven = (file + ' - File copied.')\r\n print(pdfeven)\r\n self.tc4.AppendText(pdfeven + '\\n')\r\n \r\n else:\r\n pdfodd = (file + ' - Page added.')\r\n pdfblank = PyPDF2.PdfFileReader(open(blank, 'rb'))#2\r\n pdf_writer = PyPDF2.PdfFileWriter()\r\n #file.addBlankPage()\r\n\r\n for page_num in range(pdf_reader.numPages):\r\n pdf_pages = pdf_reader.getPage(page_num)\r\n pdf_writer.addPage(pdf_pages)\r\n\r\n\r\n for page_num in range(pdfblank.numPages):\r\n pdf_pages = pdfblank.getPage(page_num)\r\n pdf_writer.addPage(pdf_pages)\r\n\r\n pdf_output_file = open(even + '\\\\' + file, 'wb')\r\n pdf_writer.write(pdf_output_file)\r\n\r\n print(pdfodd)\r\n self.tc5.AppendText(pdfodd + '\\n') \r\n\r\n pdf_output_file.close()\r\n pdf_scan.close()\r\n \r\n\r\n except:\r\n ctypes.windll.user32.MessageBoxW(0, 'Check. ' + file, \"Error\", 0)\r\n\r\n nfiles = (len(file_list))\r\n\r\n '''odd_files = glob.glob(odd + '\\\\*')\r\n for filename in odd_files:\r\n os.remove(filename)'''\r\n\r\n ctypes.windll.user32.MessageBoxW(0, 'Converted - ' + str(nfiles) + ' files.', \"Done\", 0)\r\n\r\n\r\n\r\n\r\nif __name__ == '__main__':\r\n app = wx.App()\r\n WINDOWCLASS(None)\r\n app.MainLoop()\r\n"
},
{
"alpha_fraction": 0.4975019097328186,
"alphanum_fraction": 0.5219061970710754,
"avg_line_length": 35.17142868041992,
"blob_id": "2a52e6563611a2fc8630097d6d447cb633549b0c",
"content_id": "76dd94a50561d9f8a7e9bfe34172e2d27736f804",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5204,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 140,
"path": "/sizeSplit.pyw",
"repo_name": "stern1978/PythonCodeWork",
"src_encoding": "UTF-8",
"text": "import PyPDF2\r\nimport os\r\nimport glob\r\nimport wx\r\n'''11\"x17\" = 792x1224 points'''\r\n\r\nclass WINDOWCLASS(wx.Frame):\r\n \"\"\"To do Doc String\"\"\"\r\n\r\n def __init__(self, parent, *args, **kwargs):\r\n \"\"\"To do Doc String\"\"\"\r\n wx.Frame.__init__(self, parent)\r\n\r\n self.panel = wx.Panel(self)\r\n\r\n self.vbox = wx.BoxSizer(wx.VERTICAL)\r\n\r\n self.hbox1 = wx.BoxSizer(wx.HORIZONTAL)\r\n self.st1 = wx.StaticText(self.panel, label='Files to split location')\r\n self.hbox1.Add(self.st1, flag=wx.RIGHT, border=8)\r\n self.tc1 = wx.TextCtrl(self.panel)\r\n self.hbox1.Add(self.tc1, proportion=1)\r\n self.vbox.Add(self.hbox1, flag=wx.EXPAND|wx.LEFT|wx.RIGHT|wx.TOP, border=10)\r\n self.tc1.SetValue('C:\\\\Apps\\\\TEST Folder\\\\splitter test')\r\n self.location = self.tc1.GetValue()\r\n self.vbox.Add((-1, 10))\r\n\r\n '''self.hbox2 = wx.BoxSizer(wx.HORIZONTAL)\r\n self.st2 = wx.StaticText(self.panel, label='Split location')\r\n self.hbox2.Add(self.st2, flag=wx.RIGHT, border=8)\r\n self.tc2 = wx.TextCtrl(self.panel)\r\n self.hbox2.Add(self.tc2, proportion=1)\r\n self.vbox.Add(self.hbox2, flag=wx.EXPAND|wx.LEFT|wx.RIGHT|wx.TOP, border=10)\r\n self.tc2.SetValue('C:\\\\Users\\\\as$646\\\\Desktop\\\\EVEN')\r\n self.vbox.Add((-1, 10))'''\r\n\r\n self.hbox3 = wx.BoxSizer(wx.HORIZONTAL)\r\n self.st3 = wx.StaticText(self.panel, label='Processed')\r\n self.hbox3.Add(self.st3)\r\n self.vbox.Add(self.hbox3, flag=wx.CENTER | wx.TOP, border=10)\r\n self.vbox.Add((-1, 10))\r\n\r\n self.hbox4 = wx.BoxSizer(wx.HORIZONTAL)\r\n self.tc4 = wx.TextCtrl(self.panel, style=wx.TE_MULTILINE)\r\n self.hbox4.Add(self.tc4, proportion=1, flag=wx.EXPAND)\r\n self.vbox.Add(self.hbox4, proportion=1, flag=wx.LEFT|wx.RIGHT|wx.EXPAND, \r\n border=10)\r\n self.vbox.Add((-1, 10))\r\n\r\n '''self.hbox5 = wx.BoxSizer(wx.HORIZONTAL)\r\n self.tc5 = wx.TextCtrl(self.panel, style=wx.TE_MULTILINE)\r\n self.hbox4.Add(self.tc5, proportion=1, flag=wx.EXPAND)\r\n self.vbox.Add(self.hbox5, proportion=1, flag=wx.LEFT|wx.RIGHT|wx.EXPAND, \r\n border=10)\r\n self.vbox.Add((-1, 10))'''\r\n \r\n self.buttons = wx.BoxSizer(wx.HORIZONTAL)\r\n self.okbutton = wx.Button(self.panel, label='Process', size=(70, 30))\r\n self.okbutton.Bind(wx.EVT_BUTTON, self.split)\r\n self.buttons.Add(self.okbutton)\r\n self.closebutton = wx.Button(self.panel, label='Close', size=(70, 30))\r\n self.closebutton.Bind(wx.EVT_BUTTON, self.OnClose)\r\n self.buttons.Add(self.closebutton, flag=wx.LEFT|wx.BOTTOM, border=5)\r\n self.vbox.Add(self.buttons, flag=wx.ALIGN_RIGHT|wx.RIGHT, border=10)\r\n\r\n self.panel.SetSizer(self.vbox)\r\n\r\n self.SetSize((250, 500))\r\n self.SetTitle('PDF size splitter')\r\n self.Centre()\r\n self.Show(True)\r\n\r\n\r\n def OnClose(self, e):\r\n self.Close(True)\r\n\r\n def split(self, e):\r\n self.location = self.tc1.GetValue()\r\n #print(self.location)\r\n location = self.location\r\n try:\r\n os.makedirs(self.location + '\\\\Large Format')\r\n os.makedirs(self.location + '\\\\Small Format')\r\n except:\r\n pass\r\n \r\n lf = []\r\n sf = []\r\n g = glob.glob(location + '\\\\*pdf')\r\n\r\n for pdf in g:\r\n #print(pdf)\r\n pdfread = PyPDF2.PdfFileReader(open(pdf, 'rb'))\r\n\r\n for pages in range(0, pdfread.numPages):\r\n pagesize = pdfread.getPage(pages).mediaBox\r\n psize1 = pagesize[2]/72\r\n psize2 = pagesize[3]/72\r\n #print(psize1, psize2)\r\n\r\n if pagesize[2] > 792 or pagesize[3] > 1224:\r\n pdf_writer = PyPDF2.PdfFileWriter()\r\n lf.append(pages)\r\n \r\n\r\n for page in lf:\r\n pdf_pages = pdfread.getPage(page)\r\n pdf_writer.addPage(pdf_pages)\r\n \r\n pdf_output_file = open(self.location + '\\\\large format\\\\' + os.path.basename(pdf), 'wb')\r\n pdf_writer.write(pdf_output_file)\r\n \r\n\r\n pdf_output_file.close()\r\n \r\n \r\n else:\r\n pdf_writer = PyPDF2.PdfFileWriter()\r\n sf.append(pages)\r\n \r\n\r\n for page in sf:\r\n pdf_pages = pdfread.getPage(page)\r\n pdf_writer.addPage(pdf_pages)\r\n \r\n pdf_output_file = open(location + '\\\\small format\\\\' + os.path.basename(pdf), 'wb')\r\n pdf_writer.write(pdf_output_file)\r\n \r\n\r\n pdf_output_file.close()\r\n self.tc4.AppendText(os.path.basename(pdf))\r\n self.tc4.AppendText(' - ' + str(psize1) + 'X' + str(psize2) + '\\n')\r\n \r\n del lf[:]\r\n del sf[:]\r\n\r\nif __name__ == '__main__':\r\n app = wx.App()\r\n WINDOWCLASS(None)\r\n app.MainLoop()\r\n"
},
{
"alpha_fraction": 0.6268796920776367,
"alphanum_fraction": 0.6400375962257385,
"avg_line_length": 28.399999618530273,
"blob_id": "796ba20dc214f8b8650d70d675a7f529cf0d9c74",
"content_id": "8c9e6800f45bdf256ac4fcd7ae28ef32f78dd5f4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1064,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 35,
"path": "/moveDaysWorkToArchive.pyw",
"repo_name": "stern1978/PythonCodeWork",
"src_encoding": "UTF-8",
"text": "#! python3\r\n# -*- coding: utf-8 -*-\r\n\"\"\" Moves folders from AEC Work to Aaron's archive jobs folder at end of day.\"\"\"\r\n\r\nimport shutil\r\nimport os\r\nimport datetime\r\nimport time\r\nimport sys\r\nimport ctypes\r\n\r\nos.system('TASKKILL /F /IM FlashRen.exe')\r\ncwd = os.getcwd()\r\n\r\nNOW = datetime.datetime.now()\r\nNEW_FOLDER = \"{:%m%d%Y}\".format(NOW)\r\n\r\nSRC = os.path.join('C:\\\\', 'AEC Work', NEW_FOLDER)\r\nBACKUP = os.path.join('\\\\\\\\fileserver', 'Data Service', 'AARON')\r\nDST = os.path.join(BACKUP, \"{:%Y}\".format(NOW), \"{:%m. %B}\".format(NOW).upper(), NEW_FOLDER)\r\n\r\ntry:\r\n shutil.move(SRC, DST)\r\n\r\nexcept FileExistsError:\r\n ctypes.windll.user32.MessageBoxW(0, \"Folder Exists. Can't Archive Folder.\", \"Error\", 0)\r\n \r\nexcept FileNotFoundError:\r\n ctypes.windll.user32.MessageBoxW(0, \"No Folder found in AEC Work. Can't Archive.\", \"Error\", 0)\r\n\r\nexcept:\r\n filehandler = open('archive_error.log', 'a')\r\n filehandler.write(str(time.ctime()) + 'error' + '\\n')\r\n filehandler.close()\r\n #ctypes.windll.user32.MessageBoxW(0, \"Drive is down!\", \"Error\", 0)\r\n"
},
{
"alpha_fraction": 0.608547031879425,
"alphanum_fraction": 0.6188034415245056,
"avg_line_length": 24.590909957885742,
"blob_id": "ade62a746caeee77a4d35d582013867f96bbc3b0",
"content_id": "10515d9bd32bcfb04e484aaf732747ae4d119b17",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 585,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 22,
"path": "/createNewWorkFolder.pyw",
"repo_name": "stern1978/PythonCodeWork",
"src_encoding": "UTF-8",
"text": "#! python3\r\n# -*- coding: utf-8 -*-\r\n\"\"\"Creates new folder with todays date in AEC Work folder.\"\"\"\r\n\r\nimport os\r\nimport datetime\r\nimport ctypes\r\n\r\nNOW = datetime.datetime.now()\r\nNEW_FOLDER = NOW.strftime('%m%d%Y')\r\n\r\ndef newfolder():\r\n \"\"\"Adds a new folder to the AEC Work folder with todays date.\r\n If folder with todays date exists an error box will appear.\"\"\"\r\n\r\n try:\r\n os.makedirs('C:\\\\AEC Work\\\\' + NEW_FOLDER)\r\n except WindowsError:\r\n ctypes.windll.user32.MessageBoxW(0, \"Folder Exists\", \"Error\", 0)\r\n\r\nif __name__ == \"__main__\":\r\n newfolder()\r\n"
}
] | 5 |
srzvan/ARMS-esports-statistics
|
https://github.com/srzvan/ARMS-esports-statistics
|
03e2b0a9d162324f1046966f18b277a6bfbb2f47
|
e4b42b358c9d40e2a5b77e29b9443b164c0ce658
|
8da5b01bda2a5ecbb1b9f15489a5b35abacb81aa
|
refs/heads/master
| 2020-04-25T04:55:08.179152 | 2019-04-19T16:57:36 | 2019-04-19T16:57:36 | 172,526,119 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.8399999737739563,
"alphanum_fraction": 0.8399999737739563,
"avg_line_length": 25,
"blob_id": "a374929476e38405ec30661f12626ff7c37c135d",
"content_id": "7a236cd7809d0359a974b9a2a862c0d63d225b56",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 25,
"license_type": "permissive",
"max_line_length": 25,
"num_lines": 1,
"path": "/README.md",
"repo_name": "srzvan/ARMS-esports-statistics",
"src_encoding": "UTF-8",
"text": "# ARMS-esports-statistics"
},
{
"alpha_fraction": 0.70330411195755,
"alphanum_fraction": 0.71004718542099,
"avg_line_length": 43,
"blob_id": "1b1468b5a57453891223d3cccaabb879185cdb09",
"content_id": "c054af239a8c5ac0c3e9b5f9adf4fc608984faed",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1495,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 33,
"path": "/data/resources.md",
"repo_name": "srzvan/ARMS-esports-statistics",
"src_encoding": "UTF-8",
"text": "# Resurse proiect\r\n## Listă site-uri\r\n* ~~e-sports market~~\r\n* [esports.com](https://www.esports.com/)\r\n* [indian esports scene](https://www.afkgaming.com/)\r\n* [esl play](https://play.eslgaming.com/romania)\r\n* [esl gaming](https://www.eslgaming.com/)\r\n* [team liquid](https://www.teamliquid.net/)\r\n* [major league gaming](http://www.mlg.com/)\r\n* [gosu gamers](https://www.gosugamers.net)\r\n* [esreality - quake](http://www.esreality.com/)\r\n* [G2 Esports - a World Premier Esports Club](https://www.g2esports.com/#about)\r\n* [bleacher report](https://bleacherreport.com/gaming)\r\n* [espn - esports](http://www.espn.com/esports/)\r\n* [esports heavean](https://www.esportsheaven.com/)\r\n* [esports observer - bussines side](https://esportsobserver.com/)\r\n* [kotaku](https://kotaku.com/tag/esports)\r\n* [redbull esports](https://www.redbull.com/ca-en/tags/esports)\r\n* [/r/esportsnews](https://www.reddit.com/r/esportsnews/)\r\n* [/r/esports](https://www.reddit.com/r/esports/)\r\n* [the score esports](https://www.thescoreesports.com/home)\r\n\r\nTODO:\r\n* agregarea informațiilor pe perioada 2018-2019\r\n* clasificare resurse pe continente/tip de informație\r\n* identificare evenimente majore din cadrul unui an ESports\r\n* DeepMind's AlphaStar\r\n* Din perspectiva unui jucător\r\n * De ce anume e nevoie pentru a intra într-o competiție?\r\n * De ce anume e nevoie pentru a câștiga o competiție?\r\n* Din perspectiva unui investitor\r\n * De ce aș vrea să investesc în scena ESports ?\r\n * Cum pot face asta ?"
},
{
"alpha_fraction": 0.5600489974021912,
"alphanum_fraction": 0.5692402124404907,
"avg_line_length": 28.79245376586914,
"blob_id": "303b76642e26f17bb48fa5b4f93c5e3216a146e0",
"content_id": "e8b7418706c375b54b4a631030fc3ca647af7e3b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1632,
"license_type": "permissive",
"max_line_length": 84,
"num_lines": 53,
"path": "/src/arms.py",
"repo_name": "srzvan/ARMS-esports-statistics",
"src_encoding": "UTF-8",
"text": "import csv\r\nimport json\r\nimport plotly\r\nimport plotly.graph_objs as go\r\n\r\n# CSV files\r\ng2_esports_cs_go = \"data/scraped-data/g2-esports/g2-esports-articles-meta-cs-go.csv\"\r\ng2_esports_lol = \"data/scraped-data/g2-esports/g2-esports-articles-meta-lol.csv\"\r\ng2_esports_hs = \"data/scraped-data/g2-esports/g2-esports-articles-meta-hs.csv\"\r\nesl_gaming_dota = \"data/scraped-data/esl-gaming/esl-gaming-articles-meta-dota.csv\"\r\n\r\n# data dictionaries\r\nesports_dict = dict()\r\n\r\ndef get_csv_data(file = None, game_name = \"\"):\r\n global esports_dict\r\n\r\n excluded_strings = [\"\", \"Source\"]\r\n\r\n if file == None or game_name == \"\":\r\n return\r\n \r\n with open(file, encoding='utf-8-sig') as csvData:\r\n csv_reader = csv.reader(csvData)\r\n\r\n for row in csv_reader:\r\n # print(row[4])\r\n title = row[4]\r\n\r\n if title in excluded_strings:\r\n continue\r\n \r\n if game_name in esports_dict.keys():\r\n esports_dict[game_name].append(title)\r\n else:\r\n esports_dict[game_name] = [title]\r\n\r\nget_csv_data(g2_esports_cs_go, \"cs_go\")\r\nget_csv_data(g2_esports_lol, \"lol\")\r\nget_csv_data(g2_esports_hs, \"hs\")\r\nget_csv_data(esl_gaming_dota, \"dota\")\r\n\r\nplotly.offline.plot({\r\n \"data\": [ go.Pie( labels = list(esports_dict.keys()), \r\n values = [ len(esports_dict[\"cs_go\"]),\r\n len(esports_dict[\"lol\"]),\r\n len(esports_dict[\"hs\"]),\r\n len(esports_dict[\"dota\"])\r\n ]\r\n )\r\n ],\r\n \"layout\": go.Layout(title=\"Esports Articles\")\r\n}, auto_open=True)\r\n"
}
] | 3 |
Seth3511/Atomic_Learning_Project
|
https://github.com/Seth3511/Atomic_Learning_Project
|
aef81b0209f24bf8a2a07c1dff32c9e67f238ed8
|
8677eb7d83aec304efa240bafcf34f4e458b6e2e
|
316380b3212ac3bca3cedafa8465d5e3e81c2a7b
|
refs/heads/master
| 2021-07-25T01:37:40.529839 | 2017-11-03T21:04:19 | 2017-11-03T21:04:19 | 109,717,369 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5012962818145752,
"alphanum_fraction": 0.5196794867515564,
"avg_line_length": 32.41732406616211,
"blob_id": "1e5387ac3407da6f14ca580c2e5fb20e7a0146cc",
"content_id": "3b1e29195495cf93cd81d6f5554d24582fb45d56",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4243,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 127,
"path": "/topsecret.py",
"repo_name": "Seth3511/Atomic_Learning_Project",
"src_encoding": "UTF-8",
"text": "from datetime import datetime\nimport time as t\nimport csv\nimport os\n\nreports = []\nwith open('AL Report 8.28.17 - 10.16.17.csv') as csvfile:\n csvReader = csv.reader(csvfile, delimiter=',')\n\n i=0\n for row in csvReader:\n if(i>0):\n\n grade=\"unknown\"\n if(i>1):\n newName=row[0]+\" \"+row[1]\n if(newName==name):\n newTime=t.mktime(datetime.strptime(row[4]+\",\"+row[5],\"%Y-%m-%d,%I:%M:%S %p\").timetuple())\n\n videoLength =row[9]\n minutes=videoLength[0]+videoLength[1]+\"\"\n minutes=int(minutes)\n minutes=minutes*60\n seconds=videoLength[4]+videoLength[5]+\"\"\n seconds=int(seconds)\n videoLength=minutes+seconds\n\n if((time-newTime)>=(videoLength-5)):\n grade=\"pass\"\n else:\n grade=\"fail\"\n\n\n name=row[0]+\" \"+row[1]\n time=t.mktime(datetime.strptime(row[4]+\",\"+row[5],\"%Y-%m-%d,%I:%M:%S %p\").timetuple())\n application=row[6]\n series=row[7]\n title=row[8]\n\n report=(name,series,title,grade,application)\n reports.append(report)\n\n i=i+1\n\nfor i in range(len(reports)-2,-1,-1):\n for j in range(len(reports)-1,i,-1):\n if reports[i][0] == reports[j][0]:\n if reports[i][2] == reports[j][2]:\n if reports[i][3] != \"pass\" and reports[j][3] == \"pass\":\n temp=reports[i]\n reports[i]=reports[j]\n reports[j]=temp\n\n del reports[j]\n\nif not os.path.exists('output/'):\n os.makedirs('output')\n\nwith open('output/output1.csv','w',newline='') as out:\n csv_out = csv.writer(out)\n csv_out.writerow(['name','series title','video title','grade','application'])\n for report in reports:\n if report[3]=='pass':\n csv_out.writerow(report)\n\nvideos = {}\nwith open('Canvas Word and Excel Lists.csv') as csvfile:\n csvReader = csv.reader(csvfile, delimiter=',')\n for row in csvReader:\n if row[0] not in videos.keys():\n videos[row[0]]={}\n if row[1] not in videos[row[0]].keys():\n videos[row[0]][row[1]]=[]\n videos[row[0]][row[1]].append(row[2])\ndel videos['Application']\n\nstudents = []\nwith open('Student List.csv') as csvfile:\n csvReader = csv.reader(csvfile, delimiter=',')\n for row in csvReader:\n students.append(row[0])\ndel students[0]\n\nreports = {}\nwith open('output/output1.csv') as csvfile:\n csvReader = csv.reader(csvfile, delimiter=',')\n for row in csvReader:\n if row[4] not in reports.keys():\n reports[row[4]]={}\n if row[0] not in reports[row[4]].keys():\n reports[row[4]][row[0]]={}\n if row[1] not in reports[row[4]][row[0]].keys():\n reports[row[4]][row[0]][row[1]]=[]\n reports[row[4]][row[0]][row[1]].append(row[2])\ndel reports['application']\n\nfor application in videos.keys():\n with open(\"output/\"+application+'_output.csv', 'w', newline='') as out:\n title=[]\n title.append(\"\")\n for key in videos[application].keys():\n title.append(key)\n title.append(\"Total\")\n csv_out = csv.writer(out)\n csv_out.writerow(title)\n\n for student in students:\n if student in reports[application].keys():\n row=[]\n row.append(student)\n total=0\n for series in sorted(videos[application].keys()):\n if series in reports[application][student].keys():\n row.append(len(reports[application][student][series]))\n total+=len(reports[application][student][series])\n else:\n row.append(0)\n row.append(total)\n csv_out.writerow(row)\n\n row=[\"Videos In Series: \"]\n total=0\n for series in sorted(videos[application].keys()):\n row.append(len(videos[application][series]))\n total+=len(videos[application][series])\n row.append(total)\n csv_out.writerow(row)"
}
] | 1 |
mutsinghua/FulibaPhotoDownloader
|
https://github.com/mutsinghua/FulibaPhotoDownloader
|
35f198a94a9287f5b458318a5fd4e53319d3d434
|
92f3c22747462faae819e56980da254a485e4e94
|
75bd83d6ce3afee5becdcaf6c80382693cf8ea10
|
refs/heads/master
| 2021-08-10T09:16:15.518623 | 2017-11-12T12:34:55 | 2017-11-12T12:34:55 | 110,430,759 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5785627365112305,
"alphanum_fraction": 0.5919610261917114,
"avg_line_length": 28.026548385620117,
"blob_id": "2de1f94fa971ffb3cd60cf6759402962c79bd9ad",
"content_id": "d0032aa2115a3aa650d18540c46e0fb245eab5f1",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3302,
"license_type": "permissive",
"max_line_length": 159,
"num_lines": 113,
"path": "/fuliba.py",
"repo_name": "mutsinghua/FulibaPhotoDownloader",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\nimport os\nimport requests\nimport time\nfrom bs4 import BeautifulSoup\nfrom concurrent.futures import ThreadPoolExecutor, wait, as_completed\n\nSTORE_PATH = 'fuliba2'\n\ndef download_img(link, abspath):\n filename = os.path.basename(link)\n filepath = (os.path.join(abspath,filename))\n try:\n f = open(filepath, 'ab')\n try:\n data = requests.get(link, headers=headers).content\n except:\n time.sleep(1)\n data = requests.get(link, headers=headers).content\n try:\n f.write(data)\n f.close()\n except:\n f.close()\n pass\n except:\n fail = True\n pass\n\ndef parse_img(data):\n url = data['url']\n abspath = data['abspath']\n print ('parse_img',url)\n content = requests.get(url)\n article = BeautifulSoup(content.text, 'lxml').find('article', class_='article-content')\n imgags = article.find_all('img')\n for img in imgags:\n link = img['src']\n print('Downloading:',link)\n download_img(link,abspath)\n print('parse_img & download_img finish',url)\n return content.text\n\n\n\ndef parse_page(url,pool,abspath):\n print('parse_page', url)\n data = {'url': url, 'abspath': abspath}\n text = parse_img(data)\n if text is None:\n content = requests.get(url)\n text = content.text\n pages = BeautifulSoup(text, 'lxml').find('div', class_='article-paging')\n if pages is None:\n return\n pagelinks = pages.find_all('a')\n futures = []\n for pagelink in pagelinks:\n link = pagelink['href']\n data = {'url':link,'abspath':abspath}\n futures.append(pool.submit(parse_img,(data)))\n # parse_img(link)\n return futures\n\n\ndef parse_article(h2):\n if h2.find('a') is not None:\n alink = h2.find('a')\n link = alink['href']\n title = alink.get_text().strip(r'/\\*:\"|<> ? , !?()()?(')\n if os.path.exists(title):\n print(title, 'pass')\n return\n if not os.path.exists(title):\n try:\n os.makedirs(title)\n abspath = os.path.abspath(title)\n except:\n pass\n print(title, '开始下载')\n pool = ThreadPoolExecutor(max_workers=10)\n wait(parse_page(link, pool, abspath))\n pool.shutdown()\n print('parse_artitcl finish ',title)\n\ndef parse_index(text):\n archives = BeautifulSoup(text, 'lxml').find_all('h2')\n poolG = ThreadPoolExecutor(max_workers=10)\n futuresG = []\n for h2 in archives:\n # parse_article(h2)\n futuresG.append(poolG.submit(parse_article,(h2)))\n wait(futuresG)\n poolG.shutdown()\n\n\nif not os.path.exists(STORE_PATH):\n os.makedirs(STORE_PATH)\nos.chdir(STORE_PATH)\n\nheaders = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:53.0) Gecko/20100101 Firefox/53.0', 'Referer':'https://www.fxfuli.org/category/fuliba/'}\nurl = 'https://www.fxfuli.org/category/fuliba/page/'\npage = 1\nprint('getting page',url+str(page))\nhtml = requests.get(url+str(page))\nwhile html.status_code == 200:\n print('processing page',page)\n parse_index(html.text)\n page=page+1\n print('getting page', url + str(page))\n html = requests.get(url + str(page))\n\n\n\n\n"
},
{
"alpha_fraction": 0.8091602921485901,
"alphanum_fraction": 0.8091602921485901,
"avg_line_length": 17.428571701049805,
"blob_id": "0e11027dae7a66fdfc3fd694d9610ba4f103f70a",
"content_id": "6e476cf342848ac45e4a4a09552fb2a1dc6ba125",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 261,
"license_type": "permissive",
"max_line_length": 45,
"num_lines": 7,
"path": "/README.md",
"repo_name": "mutsinghua/FulibaPhotoDownloader",
"src_encoding": "UTF-8",
"text": "# FulibaPhotoDownloader,photo crawler\n\n## 说明\n某个网站的图片下载器,也是轻量级的爬虫,根据分析网页中的图片地址把全站精彩的照片下载到本地\n\n## 环境\n- 需要安装BeautifulSoup,安装方式请自行搜索。\n\n\n"
}
] | 2 |
chetan0402/crypto-sim
|
https://github.com/chetan0402/crypto-sim
|
c2c72376b19f28ed173137f7847e0acad9c43ff4
|
9519cb834b939dc1e1289692291c4a4a41b5e88b
|
9ca0c2ffe3a73daadc6860d979ae6f7d83b153e6
|
refs/heads/master
| 2023-02-01T19:28:57.445806 | 2020-12-20T05:13:39 | 2020-12-20T05:13:39 | 273,654,356 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6951219439506531,
"alphanum_fraction": 0.7109755873680115,
"avg_line_length": 21.428571701049805,
"blob_id": "9d8d9d2e0860d7b9c4dd86d0dd0d07fcdffaf82a",
"content_id": "e5a2dfa0fcd290c903921c5a0afb719b58abe538",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1640,
"license_type": "permissive",
"max_line_length": 107,
"num_lines": 70,
"path": "/README.md",
"repo_name": "chetan0402/crypto-sim",
"src_encoding": "UTF-8",
"text": "# Crypto-sim\r\n\r\nCrypto_sim/crypto-sim is a Python library for dealing with cryptography(secure encryption and decryption).\r\n\r\n## Installation\r\n\r\nUse the package manager [pip](https://pip.pypa.io/en/stable/) to install crypto-sim.\r\n\r\n```\r\npip install crypto-sim\r\n```\r\n\r\n## Usage\r\n\r\n### To generate a key\r\n```python\r\nfrom crypto_sim import crypto\r\n\r\ncrypto.generate_key()\r\n>>> b'KFH1RhgN_JydLaZzAv6QbpQzsET1SQLjjaFz9dR2-To=' #A key will be returned\r\n```\r\n### To save the key in default dir\r\n```python\r\nfrom crypto_sim import crypto\r\n\r\ncrypto.save_and_create_key() #A key will be automatically generate and saved\r\n```\r\n### To get the saved key\r\n```python\r\nfrom crypto_sim import crypto\r\n\r\ncrypto.get_key()\r\n```\r\nNote:- this only works when the save_and_create_key() is used\r\n\r\n### To encrypt a str\r\n```python\r\nfrom crypto_sim import\r\n\r\nkey=crypto.generate_key()\r\nplain_message=\"hi\"\r\nencrypted=crypto.encrypt(plain_message,key)\r\nprint(encrypted)\r\n```\r\nOutput:-\r\n```python\r\n>>> b'gAAAAABe7uOq4PYOSe9usicMgivc0oVJWaWBzBQmbZbICgrG7TfbdxK8bIuXqttaw-rnScJBGd2RUxgSaC3_CTK0NwbJS_THJA=='\r\n```\r\n### To decrypt\r\nFirst, we will execute the above code and then we will get the key and the encrypted message\r\n\r\nAfter that,\r\n```python\r\n# This full code is after the above code\r\ndecrypted=crypto.decrypt(encryption,key)\r\nprint(decrypted)\r\nplain_message==decrypted\r\n```\r\nOutput:-\r\n```python\r\n>>> hi\r\n>>> True\r\n```\r\n\r\n## Links\r\n[CODE](https://github.com/chetan0402/crypto-sim/)\r\n\r\n[LICENSE](https://github.com/chetan0402/crypto-sim/blob/master/LICENSE.txt)\r\n\r\n[See what's new in the next project](https://github.com/chetan0402/crypto-sim/blob/master/new.txt)\r\n"
},
{
"alpha_fraction": 0.6408759355545044,
"alphanum_fraction": 0.6408759355545044,
"avg_line_length": 18.571428298950195,
"blob_id": "2ea93cec3f608bf3ebc3b202267c0c66dea14ada",
"content_id": "0cceffca2d54345af5593641639ab8dafba6e4c5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 685,
"license_type": "permissive",
"max_line_length": 41,
"num_lines": 35,
"path": "/crypto_sim/crypto.py",
"repo_name": "chetan0402/crypto-sim",
"src_encoding": "UTF-8",
"text": "from cryptography.fernet import Fernet\n\n\ndef generate_key():\n key = Fernet.generate_key()\n return key\n\n\ndef save_and_create_key():\n key = Fernet.generate_key()\n file = open('key.key', 'wb')\n file.write(key)\n file.close()\n\n\ndef get_key():\n file = open('key.key', 'rb')\n key = file.read()\n file.close()\n return key\n\n\ndef encrypt(message_to_encrypt, key):\n message = message_to_encrypt.encode()\n f = Fernet(key)\n encrypted = f.encrypt(message)\n return encrypted\n\n\ndef decrypt(message_to_decrypt, key):\n message = message_to_decrypt\n f = Fernet(key)\n decrypted = f.decrypt(message)\n decrypted = decrypted.decode()\n return decrypted\n"
},
{
"alpha_fraction": 0.560606062412262,
"alphanum_fraction": 0.5735930800437927,
"avg_line_length": 23.66666603088379,
"blob_id": "f5249cc4ba158af83c1630c25a005b749b443480",
"content_id": "6c4febf1a9bc8461dc5dcc68ae6bbe812739a2ff",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 462,
"license_type": "permissive",
"max_line_length": 49,
"num_lines": 18,
"path": "/setup.py",
"repo_name": "chetan0402/crypto-sim",
"src_encoding": "UTF-8",
"text": "import setuptools\r\n\r\nsetuptools.setup(\r\n name=\"crypto_sim\",\r\n version=\"0.0.3\",\r\n author=\"Chetan Sharma\",\r\n author_email=\"[email protected]\",\r\n description=\"A API for cryptography lib\",\r\n packages=setuptools.find_packages(),\r\n classifiers=[\r\n \"Programming Language :: Python :: 3\",\r\n \"License :: OSI Approved :: MIT License\",\r\n \"Operating System :: OS Independent\",\r\n ],\r\n install_requires=[\r\n \"cryptography>=3.2\"\r\n ]\r\n)\r\n"
}
] | 3 |
jadiribnahasan/Forum-Webapp
|
https://github.com/jadiribnahasan/Forum-Webapp
|
38c29bc1d5c2b6e7d03c15a1e07afe6b1ea02d86
|
a1028d381953cb98475c1b57d898c391dbfc6997
|
3756e1991beaac520106157a301b23c7d1c84b5c
|
refs/heads/master
| 2021-07-01T11:38:56.007003 | 2020-12-31T13:42:44 | 2020-12-31T13:42:44 | 206,982,956 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6473317742347717,
"alphanum_fraction": 0.6473317742347717,
"avg_line_length": 33.91666793823242,
"blob_id": "824afcf34c486e4494dd0785eb09e5746cc6381d",
"content_id": "92c20985f243202d5c89d2bdbffdd81eb491fa99",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 431,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 12,
"path": "/mysite/forum/urls.py",
"repo_name": "jadiribnahasan/Forum-Webapp",
"src_encoding": "UTF-8",
"text": "from django.urls import path, include\r\nfrom . import views\r\n\r\napp_name = \"forum\"\r\n\r\nurlpatterns = [\r\n path('', views.forums, name='forums'),\r\n path('<int:pk>/', views.threads, name='threads'),\r\n path('create_forum/', views.create_forum, name='create_forum'),\r\n path('create_thread/<int:pk>', views.create_thread, name='create_thread'),\r\n path('thread<int:pk>/', views.thread_detail_view, name='thread_details'),\r\n]\r\n"
},
{
"alpha_fraction": 0.5670102834701538,
"alphanum_fraction": 0.5670102834701538,
"avg_line_length": 18.57575798034668,
"blob_id": "664c5d561cf1fdcd53692946690e1eedb7f894a3",
"content_id": "24affbdb0d1cd97d196299ca0b8ed66b9a816d0c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 679,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 33,
"path": "/mysite/forum/forms.py",
"repo_name": "jadiribnahasan/Forum-Webapp",
"src_encoding": "UTF-8",
"text": "from django import forms\r\nfrom django.contrib.auth.models import User\r\nfrom .models import Thread, Forum, Post\r\nfrom ckeditor.widgets import CKEditorWidget\r\nfrom ckeditor.fields import RichTextField\r\n\r\n\r\nclass ThreadCreationForm(forms.ModelForm):\r\n\r\n class Meta:\r\n model = Thread\r\n fields = [\r\n 'title',\r\n 'discussion',\r\n ]\r\n\r\n\r\nclass ForumCreationForm(forms.ModelForm):\r\n\r\n class Meta:\r\n model = Forum\r\n fields = [\r\n 'name',\r\n 'description',\r\n ]\r\n\r\n\r\nclass PostCreationForm(forms.ModelForm):\r\n class Meta:\r\n model = Post\r\n fields = [\r\n 'comment'\r\n ]\r\n"
},
{
"alpha_fraction": 0.6164383292198181,
"alphanum_fraction": 0.6183953285217285,
"avg_line_length": 32.01333236694336,
"blob_id": "39986b8884fbf562ac681db63894349e5b7e676a",
"content_id": "faa2cd2495d061399e5d6faa6420e256caa78bb6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2555,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 75,
"path": "/mysite/forum/views.py",
"repo_name": "jadiribnahasan/Forum-Webapp",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render, redirect, get_object_or_404\r\nfrom django.contrib.auth.models import User\r\nfrom django.contrib.auth.decorators import login_required\r\nfrom django.contrib.auth.decorators import login_required\r\nfrom .models import Forum, Thread, Post\r\nfrom django.views import generic\r\nfrom .forms import ForumCreationForm, ThreadCreationForm, PostCreationForm\r\nfrom django.contrib import messages\r\nfrom django.urls import reverse\r\n\r\n\r\n@login_required\r\ndef forums(request):\r\n context = {\r\n 'forums': Forum.objects.all(),\r\n }\r\n return render(request, 'forum/forums.html', context)\r\n\r\n\r\ndef threads(request, pk):\r\n forum_ = Forum.objects.raw('SELECT * FROM forum_forum WHERE id = %s', [pk])\r\n threads = Thread.objects.filter(forum_id=pk)\r\n context = {\r\n 'threads': threads,\r\n 'forum': forum_[0]\r\n }\r\n return render(request, 'forum/threads.html', context)\r\n\r\n\r\ndef create_forum(request):\r\n if request.method == 'POST':\r\n form = ForumCreationForm(request.POST, request.FILES)\r\n if form.is_valid():\r\n form.save()\r\n messages.success(request, f'Forum Created!')\r\n return redirect('forum:forums')\r\n else:\r\n form = ForumCreationForm()\r\n return render(request, 'forum/create_forum.html', {'form': form})\r\n\r\n\r\ndef create_thread(request, pk):\r\n if request.method == 'POST':\r\n form = ThreadCreationForm(request.POST, request.FILES)\r\n if form.is_valid():\r\n thread = form.save(commit=False)\r\n thread.forum_id = pk\r\n print(thread)\r\n thread.creator = request.user\r\n thread.save()\r\n messages.success(request, f'Thread Added to {thread.forum}')\r\n return redirect('forum:forums')\r\n else:\r\n form = ThreadCreationForm()\r\n return render(request, 'forum/create_thread.html', {'form': form})\r\n\r\n\r\ndef thread_detail_view(request, pk):\r\n if request.method == 'POST':\r\n form = PostCreationForm(request.POST, request.FILES)\r\n if form.is_valid():\r\n post = form.save(commit=False)\r\n post.creator = request.user\r\n post.thread_id = pk\r\n post.save()\r\n else:\r\n form = PostCreationForm()\r\n posts = Post.objects.filter(thread_id=pk)\r\n thread = Thread.objects.raw('SELECT * FROM forum_thread WHERE id = %s', [pk])\r\n context = {\r\n 'form': form,\r\n 'posts': posts,\r\n 'thread': thread[0]\r\n }\r\n return render(request, 'forum/thread_view.html', context)\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.6848030090332031,
"alphanum_fraction": 0.692307710647583,
"avg_line_length": 28.457143783569336,
"blob_id": "dd3762648a1ebf6e518d07d713fc679ecf080701",
"content_id": "51bed71ffe4afcfda44ebc877dea194d0999bdea",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1066,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 35,
"path": "/mysite/forum/models.py",
"repo_name": "jadiribnahasan/Forum-Webapp",
"src_encoding": "UTF-8",
"text": "from django.db import models\r\nfrom ckeditor.fields import RichTextField\r\nfrom django.contrib.auth.models import User\r\nfrom django.utils import timezone\r\nfrom django.urls import reverse\r\n\r\n\r\nclass Forum(models.Model):\r\n name = models.CharField(max_length=50)\r\n description = RichTextField()\r\n\r\n def __str__(self):\r\n return self.name\r\n\r\n\r\nclass Thread(models.Model):\r\n title = models.CharField(max_length=200)\r\n discussion = RichTextField()\r\n forum = models.ForeignKey(Forum, on_delete=models.CASCADE)\r\n creator = models.ForeignKey(User, on_delete=models.CASCADE)\r\n\r\n def __str__(self):\r\n return self.title\r\n\r\n\r\nclass Post(models.Model):\r\n title = models.CharField(max_length=10, default=\"a\")\r\n comment = RichTextField()\r\n create_date = models.DateTimeField(default=timezone.now)\r\n creator = creator = models.ForeignKey(User, on_delete=models.CASCADE)\r\n thread = models.ForeignKey(Thread, on_delete=models.CASCADE)\r\n likes = models.IntegerField(default=0)\r\n\r\n def __str__(self):\r\n return self.comment\r\n"
}
] | 4 |
hompoth/uWindsor-POTW-Leaderboard
|
https://github.com/hompoth/uWindsor-POTW-Leaderboard
|
b53b6267c84b5eef5369016fe585fbb2f8b8f768
|
bc8c9fe1b3a4017990c9a9c5cc4ef2262f5b86fc
|
e9d6402845be812491ed0c93de5fbfe4c806e388
|
refs/heads/master
| 2020-12-30T21:59:15.915164 | 2015-10-18T04:58:45 | 2015-10-18T04:58:45 | 44,400,407 | 0 | 0 | null | 2015-10-16T17:28:46 | 2015-10-16T17:28:47 | 2015-10-13T13:50:41 |
HTML
|
[
{
"alpha_fraction": 0.7348394989967346,
"alphanum_fraction": 0.7348394989967346,
"avg_line_length": 37.227272033691406,
"blob_id": "414626bc7e1606d88ef7d9543ea5efe7d0ddcfd3",
"content_id": "d839dd541713a5d655b00afd82c00ac7e3ac2cd3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 841,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 22,
"path": "/solution/models.py",
"repo_name": "hompoth/uWindsor-POTW-Leaderboard",
"src_encoding": "UTF-8",
"text": "from django.db import models\nfrom student.models import Student\n\nclass Solution(models.Model):\n student = models.ForeignKey(Student)\n year = models.IntegerField()\n week = models.IntegerField()\n accepted = models.BooleanField(default=False)\n public = models.BooleanField(default=False)\n # Default set just to make django happy\n source = models.FileField(upload_to = \"source/%Y/%m/%d\", default='settings.MEDIA_ROOT/helloworld.c')\n\n def __str__(self):\n return str(self.year) + \" Week \" + str(self.week)\n\n# Receive the pre_delete signal and delete the file associated with the model instance.\nfrom django.db.models.signals import pre_delete\nfrom django.dispatch.dispatcher import receiver\n\n@receiver(pre_delete, sender=Solution)\ndef solution_delete(sender, instance, **kwargs):\n instance.source.delete(False)\n"
},
{
"alpha_fraction": 0.41525423526763916,
"alphanum_fraction": 0.4194915294647217,
"avg_line_length": 22.600000381469727,
"blob_id": "2493b439cd6d6951ef339d0dd7baa776cd8cf6f4",
"content_id": "fde2c26fe79ce8d615677dce92392e61ec19e5b1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 473,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 20,
"path": "/templates/solution/all.html",
"repo_name": "hompoth/uWindsor-POTW-Leaderboard",
"src_encoding": "UTF-8",
"text": "{% extends 'staticpages/main.html' %}\n\n{% block content %}\n\n{% for p in problems %}\n <h1><a href=\"/problem/{{p.year}}/{{p.week}}\">{{ p }}</a></h1>\n <hr />\n <table class=\"table table-striped table-bordered\">\n {% for s in p.solutions %}\n <tr>\n <td>\n <a href=\"/solution/{{s.pk}}\">{{ s.student }}</a>\n </td>\n </tr>\n {% endfor %}\n </table>\n <hr />\n{% endfor %}\n\n{% endblock %}\n"
}
] | 2 |
lordematus/desafio-byebnk
|
https://github.com/lordematus/desafio-byebnk
|
bcb47fe8fdeb8d5084b4a265e4ef06461f7f0d19
|
4deec7b2af27a51a15109166d971b64c2eddf89d
|
b6e6054c9599ed2edaf9cf0eec4b9be43d9a4966
|
refs/heads/main
| 2023-06-26T17:57:42.798021 | 2021-07-27T20:00:42 | 2021-07-27T20:00:42 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6118756532669067,
"alphanum_fraction": 0.6154434084892273,
"avg_line_length": 32.25423812866211,
"blob_id": "2900639fbe37cefc1c06d0c8630d6047e1f28704",
"content_id": "279674d303cbbf040390297ce7ad66d263f9171f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3924,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 118,
"path": "/api/tests/test_models.py",
"repo_name": "lordematus/desafio-byebnk",
"src_encoding": "UTF-8",
"text": "from django.test import TestCase\nfrom django.core.exceptions import ValidationError\nfrom api.models import Ativo, Operacao, User, Taxa\n\n\nclass ConfiguracaoModelTest(TestCase):\n @classmethod\n def setUpTestData(cls):\n cls.ativo_generico = {\n 'nome': 'GNC',\n 'modalidade': 'CRIPTO',\n 'preco_mercado_em_centavos': 100\n }\n\n\nclass TestAtivoModel(ConfiguracaoModelTest):\n def test_criar_ativo_com_modalidades_disponiveis(self):\n \"\"\"\n Verificar se os Ativos sao criados com sucesso\n \"\"\"\n modalidades_disponiveis = ['RENDA FIXA', 'RENDA VARIAVEL', 'CRIPTO']\n for modalidade in modalidades_disponiveis:\n self.ativo_generico['modalidade'] = modalidade\n ativo_avulso = Ativo.objects.create(**self.ativo_generico)\n ativo_avulso.save()\n\n total_ativos = Ativo.objects.count()\n qtd_modalidades = len(modalidades_disponiveis)\n self.assertEqual(total_ativos, qtd_modalidades)\n\n def test_criar_ativo_com_modalidade_nao_disponivel(self):\n \"\"\"\n Nao devem ser criados ativos cuja modalidade nao pertencam a lista\n de modalidades disponiveis.\n \"\"\"\n self.ativo_generico['modalidade'] = 'NOT EXISTENT'\n self.assertRaises(\n ValidationError,\n Ativo.objects.create,\n **self.ativo_generico\n )\n\n def test_salvar_ativo_com_modalidade_nao_disponivel(self):\n \"\"\"\n Ativos ja criados nao podem ter modalidade alterada para uma modalidade\n nao disponivel.\n \"\"\"\n ativo_generico = Ativo.objects.create(**self.ativo_generico)\n ativo_generico.save()\n ativo_generico.modalidade = 'NOT EXISTENT'\n self.assertRaises(ValidationError, ativo_generico.save)\n\n\nclass TestOperacaoModel(ConfiguracaoModelTest):\n @classmethod\n def setUpTestData(cls):\n super().setUpTestData()\n usuario = User.objects.create(username='user', password='abc123')\n usuario.save()\n\n ativo_generico = Ativo.objects.create(**cls.ativo_generico)\n ativo_generico.save()\n\n cls.operacao_generica = {\n 'usuario': usuario,\n 'ativo': ativo_generico,\n 'operacao': 'APLICACAO',\n 'quantidade': 10,\n 'preco_unitario_em_centavos': 100\n }\n\n def test_criar_operacao_com_operacoes_disponiveis(self):\n \"\"\"\n Criar operacoes com os tipos 'APLICACAO' e 'RESGATE'.\n \"\"\"\n operacoes_disponiveis = ['APLICACAO', 'RESGATE']\n for operacao in operacoes_disponiveis:\n self.operacao_generica['operacao'] = operacao\n operacao_generica = Operacao.objects.create(\n **self.operacao_generica\n )\n operacao_generica.save()\n\n total_operacoes = Operacao.objects.count()\n qtd_operacoes_disponiveis = len(operacoes_disponiveis)\n self.assertEqual(total_operacoes, qtd_operacoes_disponiveis)\n \n def test_criar_operacao_nao_disponivel(self):\n \"\"\"\n Nao devem ser criadas operacoes cujo tipos nao estejam definidos como\n 'APLICACAO' ou 'RESGATE'.\n \"\"\"\n self.operacao_generica['operacao'] = 'NOT EXISTENT'\n self.assertRaises(\n ValidationError,\n Operacao.objects.create,\n **self.operacao_generica\n )\n\n\nclass TestTaxaModel(ConfiguracaoModelTest):\n @classmethod\n def setUpTestData(cls):\n super().setUpTestData()\n cls.ativo = Ativo.objects.create(**cls.ativo_generico)\n cls.ativo.save()\n\n def testar_validacao_de_taxa(self):\n \"\"\"\n A taxa incidida sobre qualquer ativo nao podera ultrapassar 100%\n \"\"\"\n self.assertRaises(\n ValidationError,\n Taxa.objects.create,\n nome='Taxa Corretagem',\n ativo=self.ativo,\n percentual=150\n )\n"
},
{
"alpha_fraction": 0.6683915257453918,
"alphanum_fraction": 0.6701164245605469,
"avg_line_length": 37.3305778503418,
"blob_id": "688943f5d6a7299e6bbca464526bcd0987b2484a",
"content_id": "0eedf63b2a80bce5775963fda83b6cb5986b69c7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4652,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 121,
"path": "/api/views.py",
"repo_name": "lordematus/desafio-byebnk",
"src_encoding": "UTF-8",
"text": "from django.core.exceptions import ValidationError\nfrom rest_framework import generics, status\nfrom rest_framework.views import APIView\nfrom rest_framework.response import Response\nfrom rest_framework.exceptions import ParseError\n\nfrom .models import Ativo, Operacao, User, Taxa\nfrom .serializers import AtivoSerializer, OperacaoSerializer\n\n\nclass AtivosView(generics.ListCreateAPIView):\n serializer_class = AtivoSerializer\n\n def get_queryset(self):\n modalidade = self.request.query_params.get('modalidade')\n queryset = Ativo.objects.all()\n if modalidade is not None:\n queryset = queryset.filter(modalidade=modalidade.upper())\n return queryset\n\n\nclass OperacoesView(generics.ListCreateAPIView):\n\t# Aplicar filter diretamente não funcionaria uma vez que o request não está\n\t# definido no momento da declaração da classe, e como o request é uma \n # property então request.user.id não funcionaria também, pois request.user\n # não estaria definido. Uma das soluções possíveis é escrever um queryset\n # base, e depois o sobreescrever com get_queryset.\n queryset = Operacao.objects.all()\n serializer_class = OperacaoSerializer\n\n def get_queryset(self):\n return Operacao.objects.filter(usuario=self.request.user)\n\n def perform_create(self, serializer):\n cotas_disponiveis = self.calcula_cotas_disponiveis()\n cotas_requisitadas = int(self.request.data['quantidade'])\n\n operacao_selecionada = self.request.data['operacao']\n qtd_esta_disponivel = (cotas_disponiveis < cotas_requisitadas)\n\n if qtd_esta_disponivel and operacao_selecionada == 'RESGATE':\n raise ParseError('qtd requisitada > qtd disponivel')\n else:\n serializer.save()\n\n def calcula_cotas_disponiveis(self):\n operacoes_no_ativo = Operacao.objects.filter(\n ativo=self.request.data['ativo'],\n usuario=self.request.user\n )\n sum = 0\n for operacao in operacoes_no_ativo:\n qtd_cotas = operacao.quantidade\n sum += qtd_cotas\n return sum\n\n\t\t\nclass CarteiraView(APIView):\n def get(self, request, format=None):\n usuario_atual = User.objects.get(pk=request.user.id)\n\n aplicacoes_realizadas = self.calcula_aplicacoes_realizadas(request)\n resgates_realizados = self.calcula_resgates_realizados(request)\n preco_mercado_aplicacoes = self.calcular_preco_mercado(\n aplicacoes_realizadas\n )\n preco_mercado_resgates = self.calcular_preco_mercado(resgates_realizados)\n\t\t\n saldo = preco_mercado_aplicacoes - preco_mercado_resgates\n\n preco_unitario_aplicacoes = self.calcular_preco_unitario(aplicacoes_realizadas)\n preco_unitario_resgates = self.calcular_preco_unitario(resgates_realizados)\n precos_unitarios = preco_unitario_aplicacoes - preco_unitario_resgates\n\n dados_carteira = {\n 'usuario': usuario_atual.username, \n 'saldo': saldo,\n 'resultado': saldo - precos_unitarios, \n 'aplicacoes': len(aplicacoes_realizadas), \n 'resgates': len(resgates_realizados)\n }\n return Response(dados_carteira)\n\n def calcula_aplicacoes_realizadas(self, request):\n aplicacoes_realizadas = Operacao.objects.filter(\n operacao='APLICACAO', \n usuario=request.user.id\n )\n return aplicacoes_realizadas\n\n def calcula_resgates_realizados(self, request):\n resgates_realizados = Operacao.objects.filter(\n operacao='RESGATE', \n usuario=request.user.id\n\t\t)\n return resgates_realizados\n\t\n def calcular_preco_mercado(self, operacoes):\n preco_mercado = 0\n for operacao in operacoes:\n ativo = operacao.ativo\n preco = ativo.preco_mercado_em_centavos\n cotas = operacao.quantidade\n taxa = self.calcular_taxa_operacao(operacao.ativo)\n preco_mercado += (cotas * preco) * (1 - taxa)\n return preco_mercado\n\n def calcular_taxa_operacao(self, ativo):\n taxas_sobre_ativo = Taxa.objects.filter(ativo=ativo)\n total_percentual = sum([taxa.percentual for taxa in taxas_sobre_ativo])\n total_decimal = total_percentual / 100\n return total_decimal\n\n def calcular_preco_unitario(self, operacoes):\n total = 0\n for operacao in operacoes:\n cotas = operacao.quantidade\n preco = operacao.preco_unitario_em_centavos\n taxa = self.calcular_taxa_operacao(operacao.ativo)\n total += (cotas * preco) * (1 - taxa)\n return total\n"
},
{
"alpha_fraction": 0.6445439457893372,
"alphanum_fraction": 0.651872992515564,
"avg_line_length": 31.3157901763916,
"blob_id": "5d6ddc039496f4807578d2b9f4df8e8bb8bcdde7",
"content_id": "c79288247cf81f18c7b12ef019608716642710d7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2456,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 76,
"path": "/api/models.py",
"repo_name": "lordematus/desafio-byebnk",
"src_encoding": "UTF-8",
"text": "from django.core.exceptions import ValidationError\nfrom django.core.validators import MaxValueValidator\nfrom django.db import models\nfrom django.contrib.auth.models import User\n\n\nclass Ativo(models.Model):\n MODALIDADES_DISPONIVEIS = (\n ('RENDA FIXA', 'RENDA FIXA'),\n ('RENDA VARIAVEL', 'RENDA VARIAVEL'),\n ('CRIPTO', 'CRIPTO'),\n )\n nome = models.CharField(max_length=250)\n modalidade = models.CharField(\n max_length=15,\n choices=MODALIDADES_DISPONIVEIS\n )\n preco_mercado_em_centavos = models.PositiveIntegerField()\n\n def __str__(self):\n return self.nome\n\n def clean(self):\n tupla_modalidades = (self.modalidade, self.modalidade)\n if tupla_modalidades not in self.MODALIDADES_DISPONIVEIS:\n raise ValidationError({'modalidade': 'Modalidade does not exist'})\n\n def save(self, *args, **kwargs):\n self.clean()\n super().save(*args, **kwargs)\n\n\nclass Operacao(models.Model):\n OPERACOES_DISPONIVEIS = (\n ('APLICACAO', 'APLICACAO'),\n ('RESGATE', 'RESGATE'),\n )\n usuario = models.ForeignKey(User, on_delete=models.PROTECT)\n operacao = models.CharField(max_length=10, choices=OPERACOES_DISPONIVEIS)\n ativo = models.ForeignKey('Ativo', on_delete=models.PROTECT)\n data_de_solicitacao = models.DateField(auto_now_add=True)\n quantidade = models.PositiveIntegerField()\n preco_unitario_em_centavos = models.PositiveIntegerField()\n endereco_ip = models.GenericIPAddressField(null=True, blank=True)\n\n def __str__(self):\n return f'Operacao {self.id}'\n\n def clean(self):\n tupla_operacoes = (self.operacao, self.operacao)\n if tupla_operacoes not in self.OPERACOES_DISPONIVEIS:\n raise ValidationError({'operacao': 'operacao does not exist'})\n\n def save(self, *args, **kwargs):\n self.clean()\n super().save(*args, **kwargs)\n\n\nclass Taxa(models.Model):\n nome = models.CharField(max_length=64)\n ativo = models.ForeignKey('Ativo', on_delete=models.PROTECT)\n percentual = models.PositiveSmallIntegerField(\n validators=[MaxValueValidator(100)]\n )\n\n def __str__(self):\n return f\"{self.nome.title()}\"\n\n def clean(self):\n if self.percentual > 100:\n raise ValidationError(\"Percentual must be less than or \\\n equals to 100\")\n\n def save(self, *args, **kwargs):\n self.clean()\n super().save(*args, **kwargs)\n"
},
{
"alpha_fraction": 0.7385621070861816,
"alphanum_fraction": 0.7385621070861816,
"avg_line_length": 23.157894134521484,
"blob_id": "7abafd262a2b70ce6e3c4a9509c3582d42d11a57",
"content_id": "d04c9ffb4ca2e7a6a3cab22142079c038bfb9692",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 459,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 19,
"path": "/api/admin.py",
"repo_name": "lordematus/desafio-byebnk",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom .models import Ativo, Operacao, Taxa\n\n\nclass AtivoAdmin(admin.ModelAdmin):\n list_display = ('nome', 'modalidade')\n\n\nclass OperacaoAdmin(admin.ModelAdmin):\n list_display = ('usuario', 'operacao', 'ativo')\n\n\nclass TaxaAdmin(admin.ModelAdmin):\n list_display = ('nome', 'ativo', 'percentual')\n\n\nadmin.site.register(Ativo, AtivoAdmin)\nadmin.site.register(Operacao, OperacaoAdmin)\nadmin.site.register(Taxa, TaxaAdmin)\n"
},
{
"alpha_fraction": 0.5988665819168091,
"alphanum_fraction": 0.6083414554595947,
"avg_line_length": 35.55016326904297,
"blob_id": "1732e3cbad58f81817b666ed472b9707742a8d8f",
"content_id": "7b06cc41cd7f594fcd883fac592b2dd1104b3aa6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11293,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 309,
"path": "/api/tests/tests_views.py",
"repo_name": "lordematus/desafio-byebnk",
"src_encoding": "UTF-8",
"text": "from rest_framework.test import APITestCase\nfrom rest_framework import status\n\nfrom api.models import User, Ativo, Operacao, Taxa\n\n\nclass ConfiguracaoDeTestes(APITestCase):\n @classmethod\n def setUpTestData(cls):\n cls.ativo_generico = {\n 'nome': 'BNB',\n 'modalidade': 'CRIPTO',\n 'preco_mercado_em_centavos': 100\n }\n\n cls.usuario = User.objects.create(username=\"user\", password=\"abc123\")\n cls.usuario.save()\n\n cls.ativo = Ativo.objects.create(**cls.ativo_generico)\n cls.ativo.save()\n\n def setUp(self):\n self.client.force_authenticate(user=self.usuario)\n\n\nclass AtivosTestCase(ConfiguracaoDeTestes):\n @classmethod\n def setUpTestData(cls):\n super().setUpTestData()\n cls.base_url = '/api/ativos'\n\n def test_cadastro_com_nome_e_modalidade_corretos(self):\n \"\"\"\n Como USUARIO eu gostaria de CADASTRAR UM ATIVO para REAZLIZAR\n APLICACOES/RESGATES.\n \"\"\"\n self.ativo_generico['nome'] = 'BTC'\n response = self.client.post(self.base_url, self.ativo_generico)\n\n novo_total_ativos = 2\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n self.assertEqual(Ativo.objects.count(), novo_total_ativos)\n\n def test_acessar_ativos_usuario(self):\n \"\"\"\n Como USUARIO gostaria de ACESSAR TODOS OS ATIVOS QUE JA OPEREI para\n COMPREENDER A SITUACAO DOS MEUS INVESTIMENTOS.\n \"\"\"\n response = self.client.get(self.base_url)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n def test_visualizar_todos_ativos_cadastrados(self):\n \"\"\"\n Como USUARIO eu gostaria de VISUALIZAR TODOS OS DADOS DOS ATIVOS para\n SABER AS OPCOES DISPONIVEIS PARA APLICACAO\n \"\"\"\n response = self.client.get(self.base_url)\n dados_finais_response = response.data[0]\n self.assertEqual(dados_finais_response['id'], 1)\n self.assertEqual(dados_finais_response['nome'], 'BNB')\n self.assertEqual(dados_finais_response['modalidade'], 'CRIPTO')\n\n def test_filtrar_ativos_pela_modalidade(self):\n \"\"\"\n Como USUARIO eu gostaria de FILTRAR OS ATIVOS DISPONIVEIS POR TIPO para\n que eu POSSA TER UMA MELHOR VISAO DOS ATIVOS DISPONIVEIS\n \"\"\"\n modalidades_disponiveis = ['RENDA FIXA', 'RENDA VARIAVEL', 'CRIPTO']\n for modalidade in modalidades_disponiveis:\n ativos = Ativo.objects.filter(modalidade=modalidade)\n response = self.client.get(\n f'{self.base_url}?modalidade={modalidade}'\n )\n qtd_ativos_retornados = len(response.data)\n\n self.assertEqual(qtd_ativos_retornados, ativos.count())\n\n\nclass OperacoesTestCase(ConfiguracaoDeTestes):\n @classmethod\n def setUpTestData(cls):\n super().setUpTestData()\n cls.base_url = '/api/operacoes'\n cls.dados_base_operacao = {\n 'usuario': cls.usuario,\n 'ativo': cls.ativo,\n 'quantidade': 3,\n 'preco_unitario_em_centavos': 100\n }\n operacao_avulsa = Operacao.objects.create(\n operacao=\"APLICACAO\",\n **cls.dados_base_operacao\n )\n operacao_avulsa.save()\n\n def test_realizar_aplicacao_em_ativo(self):\n \"\"\"\n Como um USUARIO eu gostaria de FAZER APLICACOES EM UM ATIVO para\n INICIAR UM INVESTIMENTO\n \"\"\"\n novo_total_operacoes = 2\n self.dados_base_operacao['ativo'] = self.ativo.id\n self.dados_base_operacao['operacao'] = 'APLICACAO'\n response = self.client.post(self.base_url, self.dados_base_operacao)\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n self.assertEqual(Operacao.objects.count(), novo_total_operacoes)\n\n def test_realizar_resgate_em_ativo(self):\n \"\"\"\n Como um USUARIO eu gostaria de FAZER RESGATES EM UM ATIVO para RETIRAR\n O MEU LUCRO\n \"\"\"\n novo_total_operacoes = 2\n self.dados_base_operacao['operacao'] = 'RESGATE'\n self.dados_base_operacao['ativo'] = self.ativo.id\n response = self.client.post(self.base_url, self.dados_base_operacao)\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n self.assertEqual(Operacao.objects.count(), novo_total_operacoes)\n\n def test_realizar_resgate_com_quantidade_maior_que_a_disponivel(self):\n \"\"\"\n O USUARIO nao pode realizar um resgate em quantidade maior do que a \n disponivel\n \"\"\"\n self.dados_base_operacao['operacao'] = 'RESGATE'\n self.dados_base_operacao['ativo'] = self.ativo.id\n self.dados_base_operacao['quantidade'] = 4\n response = self.client.post(self.base_url, self.dados_base_operacao)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n def test_realizar_aplicacao_em_ativo_de_outro_usuario(self):\n \"\"\"\n Como USUARIO eu gostaria de FAZER APLICACOES EM UM ATIVO DE OUTRO\n USUARIO para RETIRAR O MEU LUCRO.\n \"\"\"\n second_user = User.objects.create(username='user2', password='abc123')\n second_user.save()\n self.client.force_authenticate(user=second_user)\n\n self.dados_base_operacao['operacao'] = 'APLICACAO'\n self.dados_base_operacao['ativo'] = self.ativo.id\n response = self.client.post(self.base_url, self.dados_base_operacao)\n\n novo_total_operacoes = 2\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n self.assertEqual(Operacao.objects.count(), novo_total_operacoes)\n\n def test_usuario_visualiza_apenas_suas_operacoes(self):\n \"\"\"\n Como USUARIO eu gostaria de VISUALIZAR APENAS MINHAS OPERACOES\n para ATESTAR A SEGURANCA DA APLICACAO\n \"\"\"\n response = self.client.get(self.base_url)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n for response_item in response.data:\n operacao_id = response_item['id']\n operacao = Operacao.objects.get(pk=operacao_id)\n self.assertEqual(operacao.usuario.id, self.usuario.id)\n\n def test_salva_endereco_ip_usuario_na_operacao(self):\n \"\"\"\n Como USUARIO eu gostaria de VISUALIZAR O MEU ENDERECO DE IP para\n ATESTAR A SEGURANCA DA APPLICACAO\n \"\"\"\n endereco_ip_client = self.client._base_environ()['REMOTE_ADDR']\n self.dados_base_operacao['operacao'] = 'RESGATE'\n self.dados_base_operacao['ativo'] = self.ativo.id\n response = self.client.post(self.base_url, self.dados_base_operacao)\n self.assertEqual(response.data['endereco_ip'], endereco_ip_client)\n\n\nclass CarteiraTestCase(ConfiguracaoDeTestes):\n @classmethod\n def setUpTestData(cls):\n super().setUpTestData()\n cls.base_url = '/api/carteira'\n\n dados_a_cadastrar = [\n {\n 'nome': 'BTC',\n 'modalidade': 'CRIPTO',\n 'preco_mercado_em_centavos': 50*100,\n 'taxa_associada': 1\n },\n {\n 'nome': 'CDI',\n 'modalidade': 'RENDA FIXA',\n 'preco_mercado_em_centavos': 30*100,\n 'taxa_associada': 5\n },\n {\n 'nome': 'FII',\n 'modalidade': 'RENDA VARIAVEL',\n 'preco_mercado_em_centavos': 60*100,\n 'taxa_associada': 3\n }\n ]\n\n for ativo in dados_a_cadastrar:\n ativo_cadastrado = Ativo.objects.create(\n nome=ativo['nome'],\n modalidade=ativo['modalidade'],\n preco_mercado_em_centavos=ativo['preco_mercado_em_centavos']\n )\n ativo_cadastrado.save()\n\n taxa_cadastrada = Taxa.objects.create(\n nome=f\"Taxa {ativo['nome']}\",\n ativo=ativo_cadastrado,\n percentual=ativo['taxa_associada']\n )\n taxa_cadastrada.save()\n\n cls.operacao1 = Operacao.objects.create(\n usuario=cls.usuario,\n operacao=\"APLICACAO\",\n ativo=Ativo.objects.get(nome='BTC'),\n quantidade=10,\n preco_unitario_em_centavos=30*100,\n )\n\n cls.operacao2 = Operacao.objects.create(\n usuario=cls.usuario,\n operacao=\"APLICACAO\",\n ativo=Ativo.objects.get(nome='CDI'),\n quantidade=8,\n preco_unitario_em_centavos=50*100\n )\n\n cls.operacao3 = Operacao.objects.create(\n usuario=cls.usuario,\n operacao=\"APLICACAO\",\n ativo=Ativo.objects.get(nome='FII'),\n quantidade=5,\n preco_unitario_em_centavos=50*100,\n )\n\n cls.operacao4 = Operacao.objects.create(\n usuario=cls.usuario,\n operacao=\"RESGATE\",\n ativo=Ativo.objects.get(nome='BTC'),\n quantidade=3,\n preco_unitario_em_centavos=20*100\n )\n\n cls.operacao5 = Operacao.objects.create(\n usuario=cls.usuario,\n operacao=\"RESGATE\",\n ativo=Ativo.objects.get(nome='CDI'),\n quantidade=3,\n preco_unitario_em_centavos=20*100\n )\n\n def setUp(self):\n super().setUp()\n self.response = self.client.get(self.base_url)\n\n def test_acessar_carteira(self):\n \"\"\"\n Como USUARIO gostaria de ACESSAR TODOS OS DADOS REFERENTES A CARTEIRA\n para VER TODAS AS INFORMACOES EM UM UNICO LOCAL\n \"\"\"\n self.assertEqual(self.response.status_code, status.HTTP_200_OK)\n\n def test_visualizar_saldo_carteira(self):\n \"\"\"\n Como USUARIO gostaria de VISUALIZAR O SALDO DA MINHA CARTEIRA para \n ACOMPANHAR OS MEUS RESULTADOS.\n \"\"\"\n saldo_final_manualmente_calculado = 78000\n self.assertEqual(\n self.response.data['saldo'],\n saldo_final_manualmente_calculado\n )\n \n def test_visualizar_lucro_ou_prejuizo_carteira(self):\n \"\"\"\n Como USUARIO gostaria de VERIFICAR SE OBTIVE LUCRO OU PREJUIZO para\n que eu POSSA AVALIAR MEU DESEMPENHO GERAL\n \"\"\"\n resultado_final_manualmente_calculado = -2310\n self.assertEqual(\n self.response.data['resultado'],\n resultado_final_manualmente_calculado\n )\n\n def test_visualizar_total_de_aplicacoes(self):\n \"\"\"\n Como USUARIO gostaria de VISUALIZAR O TOTAL DE APLICACOES REALIZADAS\n para TER UM ENTENDIMENTO MELHOR DAS OPERACOES\n \"\"\"\n total_aplicacoes = 3\n self.assertEqual(self.response.data['aplicacoes'], total_aplicacoes)\n \n def test_visualizar_total_de_resgates(self):\n \"\"\"\n Como USUARIO gostaria de VISUALIZAR O TOTAL DE RESGATES REALIZADOS para\n TER UM ENTENDIMENTO MELHOR DAS OPERACOES\n \"\"\"\n total_resgates = 2\n self.assertEqual(self.response.data['resgates'], total_resgates)\n\n def test_usuario_visualizar_apenas_sua_carteira(self):\n \"\"\"\n Como USUARIO gostaria de VISUALIZAR APENAS A MINHA CARTEIRA para \n ATESTAR A SEGURANCA DA APLICACAO\n \"\"\"\n nome_usuario = 'user'\n self.assertEqual(self.response.data['usuario'], nome_usuario)"
},
{
"alpha_fraction": 0.7087827324867249,
"alphanum_fraction": 0.7087827324867249,
"avg_line_length": 37.235294342041016,
"blob_id": "422952be38dbdf427afd97dff6b3a68ff71f57a5",
"content_id": "8414867c7c3dbe100316e67374d301b95f0a9cd7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 665,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 17,
"path": "/README.md",
"repo_name": "lordematus/desafio-byebnk",
"src_encoding": "UTF-8",
"text": "# desafio-byebnk\n\n# Base Permission\nA permissão primária para todos os endpoint é que o usuário esteja autenticado no sistema.\n\n## APIs Endpoints\n\n| URI | Inputs | Outputs |\n| --- | --- | --- |\n| `GET api/ativos` | | Todos os ativos do user |\n| `POST api/ativos` | Nome, Modalidade | |\n| `GET api/operacoes` | | Todas as aplicacoes/resgates do user |\n| `POST api/operacoes` | Ativo, Operação, Quantidade, Preco Unitário | |\n| `GET api/carteira`| | Saldo e informações relevantes da carteira |\n\n## Como acessar a página de administração?\nPara acessar a página de administração como superusuário, basta utilizar o username `root` e senha `root`."
},
{
"alpha_fraction": 0.6819788217544556,
"alphanum_fraction": 0.6819788217544556,
"avg_line_length": 24.727272033691406,
"blob_id": "01e3a42f91c492d4d2cef60b8440e82bc0085d4f",
"content_id": "9a5a397f97184061248024d1e4c1850f4bbcbb1e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 566,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 22,
"path": "/api/serializers.py",
"repo_name": "lordematus/desafio-byebnk",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\nfrom .models import Ativo, Operacao\n\n\nclass AtivoSerializer(serializers.ModelSerializer):\n class Meta:\n model = Ativo\n fields = '__all__'\n\n\nclass OperacaoSerializer(serializers.ModelSerializer):\n class Meta:\n model = Operacao\n fields = '__all__'\n\n usuario = serializers.HiddenField(\n default=serializers.CurrentUserDefault()\n\t)\n endereco_ip = serializers.SerializerMethodField()\n\n def get_endereco_ip(self, obj):\n return f\"{self.context['request'].META['REMOTE_ADDR']}\"\n"
},
{
"alpha_fraction": 0.6974063515663147,
"alphanum_fraction": 0.6974063515663147,
"avg_line_length": 33.70000076293945,
"blob_id": "13953d07772fa7939a780425a96ef84f2c8323fd",
"content_id": "7f93d15b494fd70a392db4428defced6ce133ada",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 347,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 10,
"path": "/api/urls.py",
"repo_name": "lordematus/desafio-byebnk",
"src_encoding": "UTF-8",
"text": "from django.urls import path\nfrom django.urls.conf import re_path\nfrom .views import AtivosView, OperacoesView, CarteiraView\n\nurlpatterns = [\n path('ativos', AtivosView.as_view()),\n re_path('^ativos/(?P<modalidade>.+)/$', AtivosView.as_view()),\n path('operacoes', OperacoesView.as_view()),\n path('carteira', CarteiraView.as_view()),\n]\n"
},
{
"alpha_fraction": 0.5897994637489319,
"alphanum_fraction": 0.6015692949295044,
"avg_line_length": 45.81632614135742,
"blob_id": "3039d903d0b9ecc07ee04a6d381c4b6db0a74c2e",
"content_id": "293b3f67bb275f85f9b515a020d5c4b0425baf87",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2294,
"license_type": "no_license",
"max_line_length": 164,
"num_lines": 49,
"path": "/api/migrations/0001_initial.py",
"repo_name": "lordematus/desafio-byebnk",
"src_encoding": "UTF-8",
"text": "# Generated by Django 3.2.5 on 2021-07-27 03:57\n\nfrom django.conf import settings\nimport django.core.validators\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Ativo',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('nome', models.CharField(max_length=250)),\n ('modalidade', models.CharField(choices=[('RENDA FIXA', 'RENDA FIXA'), ('RENDA VARIAVEL', 'RENDA VARIAVEL'), ('CRIPTO', 'CRIPTO')], max_length=15)),\n ('preco_mercado_em_centavos', models.PositiveIntegerField()),\n ],\n ),\n migrations.CreateModel(\n name='Taxa',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('nome', models.CharField(max_length=64)),\n ('percentual', models.PositiveSmallIntegerField(validators=[django.core.validators.MaxValueValidator(100)])),\n ('ativo', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='api.ativo')),\n ],\n ),\n migrations.CreateModel(\n name='Operacao',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('operacao', models.CharField(choices=[('APLICACAO', 'APLICACAO'), ('RESGATE', 'RESGATE')], max_length=10)),\n ('data_de_solicitacao', models.DateField(auto_now_add=True)),\n ('quantidade', models.PositiveIntegerField()),\n ('preco_unitario_em_centavos', models.PositiveIntegerField()),\n ('endereco_ip', models.GenericIPAddressField(blank=True, null=True)),\n ('ativo', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='api.ativo')),\n ('usuario', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),\n ],\n ),\n ]\n"
}
] | 9 |
richardkchapman/rastercarve
|
https://github.com/richardkchapman/rastercarve
|
f2678f9017f47b2aa4a733711dacb6a17c621581
|
0ba7a03ab2b4a2ae56d9c1a5501ebe2c713b5145
|
12d67726d822ec87b047502ca96ab5d253ea643a
|
refs/heads/master
| 2020-09-21T21:16:20.230575 | 2019-11-29T04:18:39 | 2019-11-29T04:18:39 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7556935548782349,
"alphanum_fraction": 0.759834349155426,
"avg_line_length": 29.1875,
"blob_id": "39f5efcb019ae5ca1940a4f9a9061dd109ce6b31",
"content_id": "7d6647c34f7b8819bcb0b5a72089282e54c9ed35",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 483,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 16,
"path": "/README.md",
"repo_name": "richardkchapman/rastercarve",
"src_encoding": "UTF-8",
"text": "# RasterCarve: Generate G-code to engrave raster images\n\nThis is a little Python script I wrote to generate 3-axis toolpaths to\nengrave raster images.\n\n## Getting Started\n\nYou just need Python 3, OpenCV, and NumPy (i.e. `pip install ...`).\n\nThen, just run `python src/rastercarve.py IMAGE`, where `IMAGE` is a\nbitmap image in any format supported by OpenCV. G-code is output to\nstandard output.\n\n## Configuration\n\nEdit `rastercarve.py` to change the material and engraving settings.\n"
},
{
"alpha_fraction": 0.5853461027145386,
"alphanum_fraction": 0.6065308451652527,
"avg_line_length": 30.402542114257812,
"blob_id": "b2d2489ba06711bdd9b1532103b2bbf6fe070400",
"content_id": "3e9471345ac805348abdedbc348e88ceda3e5a17",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7411,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 236,
"path": "/src/rastercarve.py",
"repo_name": "richardkchapman/rastercarve",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\"\"\"rastercarve: a raster engraving G-code generator\nUsage: rastercarve.py IMAGE\n\nThis program outputs G-code to engrave a bitmap image on a 3-axis\nmilling machine.\n\"\"\"\n\nimport cv2\nimport math\nimport numpy as np\nimport sys\n\n#### Machine configuration\nFEEDRATE = 100 # in / min\nPLUNGE_RATE = 10 # in / min\nSAFE_Z = .2 # tool will start/end this high from material\nTRAVERSE_Z = 1\nMAX_DEPTH = .080 # full black is this many inches deep\nTOOL_ANGLE = 60 # included angle of tool (we assume a V-bit). change if needed\n\n#### Image size\nDESIRED_WIDTH = 10 # desired width in inches (change this to scale image)\n\n#### Cutting Parameters\nLINE_SPACING_FACTOR = 1.1 # Vectric recommends 10-20% for wood\nLINE_ANGLE = 22.5 # angle of lines across image, [0-90) degrees\nLINEAR_RESOLUTION = .01 # spacing between image samples along a line (inches)\n\n#### Image interpolation\nSUPERSAMPLE = 5 # interpolate the image by this factor (caution: this scales the image by the square of its value)\n\n#### G-Code options\nLINE_NOS = True # Generate line \"N\"umbers\n\n#### Internal stuff - don't mess with this\nDEG2RAD = math.pi / 180\nDEPTH_TO_WIDTH = 2 * math.tan(TOOL_ANGLE / 2 * DEG2RAD) # multiply by this to get the width of a cut\nLINE_WIDTH = MAX_DEPTH * DEPTH_TO_WIDTH\nLINE_SPACING = LINE_SPACING_FACTOR * LINE_WIDTH # orthogonal distance between lines\n\n# floating-point range\ndef frange(x, y, jump):\n while x < y:\n yield x\n x += jump\n\ndef eprint(s):\n print(s, file=sys.stderr)\n\nline = 1\ndef gcode(s):\n global line\n print((\"N%d %s\" % (line, s)) if LINE_NOS else s)\n line += 1\n\npathlen = 0\nlastpos = None\n\ndef updatePos(pos):\n global pathlen, lastpos\n if lastpos is None:\n lastpos = pos\n return\n pathlen += np.linalg.norm(pos - lastpos)\n lastpos = pos\n\n# reflect as needed\ndef transform(x, y):\n return x, -y\n\n# we will negate the Y axis in all these\ndef move(x, y, z, f = FEEDRATE):\n x, y = transform(x, y)\n gcode(\"G1 F%d X%f Y%f Z%f\" % (f, x, y, z))\n updatePos(np.array([x, y, z]))\n\ndef moveRapid(x, y, z):\n x, y = transform(x, y)\n gcode(\"G0 X%f Y%f Z%f\" % (x, y, z))\n updatePos(np.array([x, y, z]))\n\ndef moveSlow(x, y, z):\n # we don't want to transform X, Y here\n move(x, y, z, PLUNGE_RATE)\n updatePos(np.array([x, y, z]))\n\ndef moveRapidXY(x, y):\n x, y = transform(x, y)\n gcode(\"G0 X%f Y%f\" % (x, y))\n updatePos(np.array([x, y, lastpos[2]]))\n\ndef moveZ(z, f = PLUNGE_RATE):\n gcode(\"G1 F%d Z%f\" % (f, z))\n newpos = lastpos\n newpos[2] = z\n updatePos(newpos)\n\ndef getPix(image, x, y):\n # clamp\n x = max(0, min(int(x), image.shape[1]-1))\n y = max(0, min(int(y), image.shape[0]-1))\n\n return image[y, x]\n\n# return how deep to cut given a pixel value\ndef getDepth(pix):\n # may want to do gamma mapping\n return -float(pix) / 256 * MAX_DEPTH\n\ndef inBounds(img_size, x):\n return 0 <= x[0] and x[0] < img_size[0] and 0 <= x[1] and x[1] < img_size[1]\n\n# Engrave one line across the image. start and d are vectors in the\n# output space representing the start point and direction of\n# machining, respectively. start should be on the border of the image,\n# and d should point INTO the image.\ndef engraveLine(img_interp, img_size, ppi, start, d, step = LINEAR_RESOLUTION):\n v = start\n d = d / np.linalg.norm(d)\n\n if not inBounds(img_size, v):\n print(\"NOT IN BOUNDS (PROGRAMMING ERROR): \", img_size, v, file=sys.stderr)\n\n moveZ(SAFE_Z)\n moveRapidXY(v[0], v[1])\n\n first = True\n\n while inBounds(img_size, v):\n img_x = int(round(v[0] * ppi))\n img_y = int(round(v[1] * ppi))\n x, y = v\n depth = getDepth(getPix(img_interp, img_x, img_y))\n if not first:\n move(x, y, depth)\n else:\n first = False\n moveSlow(x, y, depth)\n\n v += step * d\n # return last engraved point\n return v - step * d\n\ndef doEngrave(filename):\n # check parameter sanity\n if ( not(0 <= LINE_ANGLE < 90) or\n not(0 < TOOL_ANGLE < 180) or\n not(0 < FEEDRATE) or\n not(0 < PLUNGE_RATE) or\n not(0 < SAFE_Z) or\n not(0 < TRAVERSE_Z) or\n not(0 < MAX_DEPTH) or\n not(0 < DESIRED_WIDTH) or\n not(1 <= LINE_SPACING_FACTOR) or\n not(0 < LINEAR_RESOLUTION) or\n not(1 <= SUPERSAMPLE) ):\n eprint(\"WARNING: Invalid parameter(s).\")\n\n # invert and convert to grayscale\n img = ~cv2.cvtColor(cv2.imread(filename), cv2.COLOR_BGR2GRAY)\n\n orig_h, orig_w = img.shape[:2]\n\n img_w, img_h = img_size = DESIRED_WIDTH, DESIRED_WIDTH * (orig_h / orig_w)\n img_ppi = orig_w / img_w # should be the same for X and Y directions\n\n # scale up the image with interpolation\n img_interp = cv2.resize(img, None, fx = SUPERSAMPLE, fy = SUPERSAMPLE)\n interp_ppi = img_ppi * SUPERSAMPLE\n\n # preamble: https://www.instructables.com/id/How-to-write-G-code-basics/\n print(\"( Generated by rastercarve: github.com/built1n/rastercarve )\")\n print(\"( Image name: %s )\" % (filename))\n gcode(\"G00 G90 G80 G28 G17 G20 G40 G49\\n\")\n\n d = np.array([math.cos(LINE_ANGLE * DEG2RAD),\n -math.sin(LINE_ANGLE * DEG2RAD)])\n\n max_y = img_h + img_w * -d[1] / d[0] # highest Y we'll loop to\n yspace = LINE_SPACING / math.cos(LINE_ANGLE * DEG2RAD) # vertical spacing between lines\n xspace = LINE_SPACING / math.sin(LINE_ANGLE * DEG2RAD) if LINE_ANGLE != 0 else 0 # horizontal space\n\n nlines = round(max_y / yspace)\n\n ### Generate toolpath\n moveRapid(0, 0, SAFE_Z)\n end = None\n\n for y in frange(0, max_y - yspace, yspace * 2):\n start = np.array([0, y]).astype('float64')\n\n # start some vectors on the bottom edge of the image\n if d[1] != 0:\n c = (img_h - y) / d[1] # solve (start + cd)_y = h for c\n if c >= 0:\n start += (c + LINEAR_RESOLUTION) * d\n\n start = engraveLine(img_interp, img_size, interp_ppi, start, d)\n\n # now engrave the other direction\n # we just need to flip d and move start over\n\n # see which side of the image the last line ran out on (either top or right side)\n if (start + LINEAR_RESOLUTION * d)[1] < 0:\n start[0] += xspace\n else:\n start[1] += yspace\n\n end = engraveLine(img_interp, img_size, interp_ppi, start, -d)\n\n moveSlow(end[0], end[1], TRAVERSE_Z)\n moveRapid(0, 0, TRAVERSE_Z)\n\n ### Dump stats\n eprint(\"=== Statistics ===\")\n eprint(\"Image dimensions: %.2f\\\" wide by %.2f\\\" tall = %.1f in^2 (%.1f PPI)\" % (img_w, img_h, img_w * img_h, img_ppi))\n eprint(\"Max line depth: %.3f in\" % (MAX_DEPTH))\n eprint(\"Max line width: %.3f in (%.1f deg V-bit)\" % (LINE_WIDTH, TOOL_ANGLE))\n eprint(\"Line spacing: %.3f in (%d%%)\" % (LINE_SPACING, int(round(100 * LINE_SPACING_FACTOR))))\n eprint(\"Line angle: %.1f deg\" % (LINE_ANGLE))\n eprint(\"Number of lines: %d\" % (nlines))\n eprint(\"Interpolated image by f=%.1f (%.1f PPI)\" % (SUPERSAMPLE, interp_ppi))\n eprint(\"Toolpath length: %.1f in\" % (pathlen))\n eprint(\"Feed rate: %.1f in/min\" % (FEEDRATE))\n eprint(\"Plunge rate: %.1f in/min\" % (PLUNGE_RATE))\n eprint(\"Approximate machining time: %.1f sec\" % (pathlen / (FEEDRATE / 60)))\n\ndef main():\n if len(sys.argv) != 2:\n eprint(\"Usage: rastercarve.py IMAGE\")\n return\n doEngrave(sys.argv[1])\n\nif __name__==\"__main__\":\n main()\n"
}
] | 2 |
jonwesneski/test_framework
|
https://github.com/jonwesneski/test_framework
|
1de95de2f872b6fc98baca6fa0c8b0ef5195f91f
|
0845c82cf9e74bf4441c3eed8ad0c9a57df11e0f
|
465f53262d39c386385363e36eac601e50f3eb58
|
refs/heads/main
| 2023-06-17T19:08:56.483212 | 2022-11-12T21:11:33 | 2022-11-12T21:11:33 | 289,189,374 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6343648433685303,
"alphanum_fraction": 0.6547231078147888,
"avg_line_length": 23.559999465942383,
"blob_id": "ea0a2df1615142eabd534c39a1d426602d76427e",
"content_id": "5c5803c5aec161c92af51cbffa3fb0bbc210bca8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1228,
"license_type": "permissive",
"max_line_length": 78,
"num_lines": 50,
"path": "/examples/fake_clients/regression/sample3.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import end2\n\n__run_mode__ = end2.PARALLEL\n__tags__ = ['product']\n\n\[email protected]_test\ndef my_setup_test(client, async_client):\n client.logger.info('running setup test')\n assert client.get() == {}\n\n\[email protected]\ndef my_teardown(client, async_client):\n client.logger.info('running teardown')\n assert client.delete() is None\n\n\[email protected]_failures_in_module\ndef this_is_my_recovery(client, async_client):\n async_client.logger.info('on_failures_in_module: doing what is necessary')\n\n\ndef test_31(client, async_client):\n client.logger.info('hi')\n assert client.put({'hi': 31}) is None\n assert client.put({'hi': 32}) is None\n assert client.put({'hi': 33}) is None\n\n\ndef test_32(client, async_client, *, end):\n pub = client.pub_sub\n def handler():\n end()\n client.logger.info('hi12')\n assert client.post({'hi': 32}) is None\n client.on(handler)\n assert client.post({'hi': 33}) is None\n pub.publish('event')\n\n\nasync def test_33(client, async_client, *, end):\n def handler():\n end()\n client.on(handler)\n assert await async_client.get() == await async_client.get()\n\n\nasync def test_34(client, async_client):\n assert await async_client.get() == await async_client.get()\n"
},
{
"alpha_fraction": 0.624199628829956,
"alphanum_fraction": 0.6356920003890991,
"avg_line_length": 39.07236862182617,
"blob_id": "47bfbb119214c369a7eebde8215e688268af503d",
"content_id": "cf4d9d0e873203cd0ca6b2cd109fe525801ec57e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6091,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 152,
"path": "/tests/unit/runner.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import asyncio\nimport unittest\n\nfrom end2 import runner\nfrom end2.constants import Status\nfrom end2.logger import empty_logger\nfrom end2 import exceptions\n\n\nclass TestRunMethod(unittest.TestCase):\n def test_method_passed(self):\n def test_1():\n assert True\n result = runner.run_test_func(empty_logger, None, test_1)\n self.assertEqual(result.status, Status.PASSED)\n self.assertEqual(result.record, \"\")\n self.assertIsNotNone(result.end_time)\n\n def test_method_failed(self):\n def test_2(a):\n assert False\n result = runner.run_test_func(empty_logger, None, test_2, 1)\n self.assertEqual(result.status, Status.FAILED)\n self.assertNotEqual(result.record, \"\")\n self.assertIsNotNone(result.end_time)\n \n def test_method_skipped(self):\n def test_3(a, b):\n raise exceptions.SkipTestException(\"I skip\")\n result = runner.run_test_func(empty_logger, None, test_3, a=1, b=2)\n self.assertEqual(result.status, Status.SKIPPED)\n self.assertEqual(result.record, \"I skip\")\n self.assertIsNotNone(result.end_time)\n\n def test_method_ignore_reraises(self):\n def test_4(a, b):\n raise exceptions.IgnoreTestException(\"Error\")\n with self.assertRaises(exceptions.IgnoreTestException):\n runner.run_test_func(empty_logger, None, test_4, 1, 2)\n \n def test_method_encountered_some_other_exception(self):\n def test_4(a, b, c):\n raise Exception(\"Error\")\n result = runner.run_test_func(empty_logger, None, test_4, 1, 2, 3)\n self.assertEqual(result.status, Status.FAILED)\n self.assertIn(\"Encountered an exception\", result.record)\n self.assertIsNotNone(result.end_time)\n\n def test_method_end_callback(self):\n def test_4(*, end):\n end()\n ender = runner.Ender()\n end = ender.create()\n result = runner.run_test_func(empty_logger, ender, test_4, end=end)\n self.assertEqual(result.status, Status.PASSED)\n\n def test_method_end_fail_callback(self):\n expected_record = \"i fail\"\n def test_4(*, end):\n end.fail(expected_record)\n ender = runner.Ender()\n end = ender.create()\n result = runner.run_test_func(empty_logger, ender, test_4, end=end)\n self.assertEqual(result.status, Status.FAILED)\n self.assertIn(expected_record, result.record)\n\n def test_method_end_callback_timeout(self):\n expected_timeout = 1.0\n def test_4(*, end):\n pass\n ender = runner.Ender(expected_timeout)\n end = ender.create()\n result = runner.run_test_func(empty_logger, ender, test_4, end=end)\n self.assertEqual(result.status, Status.FAILED)\n self.assertIn(str(expected_timeout), result.record)\n\n\nclass TestRunMethodAsync(unittest.TestCase):\n def test_async_method_passed(self):\n async def test_1():\n await asyncio.sleep(0.1)\n assert True\n result = asyncio.run(runner.run_async_test_func(empty_logger, None, test_1))\n self.assertEqual(result.status, Status.PASSED)\n self.assertEqual(result.record, \"\")\n self.assertIsNotNone(result.end_time)\n\n def test_async_method_failed(self):\n async def test_2(a):\n await asyncio.sleep(0.1)\n assert False\n result = asyncio.run(runner.run_async_test_func(empty_logger, None, test_2, 1))\n self.assertEqual(result.status, Status.FAILED)\n self.assertNotEqual(result.record, \"\")\n self.assertIsNotNone(result.end_time)\n \n def test_async_method_skipped(self):\n async def test_3(a, b):\n await asyncio.sleep(0.1)\n raise exceptions.SkipTestException(\"I skip\")\n result = asyncio.run(runner.run_async_test_func(empty_logger, None, test_3, a=1, b=2))\n self.assertEqual(result.status, Status.SKIPPED) and self.assertEqual(result.record, \"I skip\") \n self.assertIsNotNone(result.end_time)\n\n def test_async_method_ignore_reraises(self):\n async def test_4():\n await asyncio.sleep(0.1)\n raise exceptions.IgnoreTestException(\"Error\")\n\n def run_to_completion():\n return asyncio.run(runner.run_async_test_func(empty_logger, None, test_4))\n\n self.assertRaises(exceptions.IgnoreTestException, run_to_completion)\n \n def test_async_method_encountered_some_other_exception(self):\n async def test_4(a, b, c):\n await asyncio.sleep(0.1)\n raise Exception(\"Error\")\n result = asyncio.run(runner.run_async_test_func(empty_logger, None, test_4, 1, 2, 3))\n self.assertEqual(result.status, Status.FAILED)\n self.assertIn(\"Encountered an exception\", result.record)\n self.assertIsNotNone(result.end_time)\n\n def test_async_method_end_callback(self):\n async def test_4(*, end):\n await asyncio.sleep(0.1)\n end()\n ender = runner.Ender()\n end = ender.create()\n result = asyncio.run(runner.run_async_test_func(empty_logger, ender, test_4, end=end))\n self.assertEqual(result.status, Status.PASSED)\n\n def test_async_method_end_fail_callback(self):\n expected_record = \"i fail\"\n async def test_4(*, end):\n end.fail(expected_record)\n await asyncio.sleep(0.1)\n ender = runner.Ender()\n end = ender.create()\n result = asyncio.run(runner.run_async_test_func(empty_logger, ender, test_4, end=end))\n self.assertEqual(result.status, Status.FAILED)\n self.assertIn(expected_record, result.record)\n\n def test_async_method_end_callback_timeout(self):\n expected_timeout = 1.0\n async def test_4(*, end):\n await asyncio.sleep(0.1)\n ender = runner.Ender(expected_timeout)\n end = ender.create()\n result = asyncio.run(runner.run_async_test_func(empty_logger, ender, test_4, end=end))\n self.assertEqual(result.status, Status.FAILED)\n self.assertIn(str(expected_timeout), result.record)\n"
},
{
"alpha_fraction": 0.6231647729873657,
"alphanum_fraction": 0.631321370601654,
"avg_line_length": 14.717948913574219,
"blob_id": "9fb3608b5b2e1d66d259e44431534a3844d5f0be",
"content_id": "84fb47da456b8de84ddf15510e9f4aa638a1dbc0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 613,
"license_type": "permissive",
"max_line_length": 39,
"num_lines": 39,
"path": "/examples/simple/smoke/sample1.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from end2 import (\n RunMode,\n setup_test,\n teardown\n)\n\n__run_mode__ = RunMode.PARALLEL\n\n\n_my_list = []\n\n\n@setup_test\ndef my_setup_test(logger):\n logger.info('running setup test')\n _my_list.append(len(_my_list))\n\n\n@teardown\ndef my_teardown(logger):\n logger.info('running teardown')\n global _my_list\n _my_list = []\n\n\ndef test_1(logger):\n assert len(_my_list) > 0\n logger.info(_my_list)\n\n\ndef test_2(logger):\n assert len(_my_list) != 0\n logger.info(_my_list)\n assert False\n logger.info('Unreachable')\n\n\ndef test_ignored_test(logger):\n assert False, \"I SHOULD BE IGNORED\"\n"
},
{
"alpha_fraction": 0.5971329808235168,
"alphanum_fraction": 0.6010875105857849,
"avg_line_length": 21.230770111083984,
"blob_id": "bc569364323fe1b0ebf06533e540815d915cbcd7",
"content_id": "d8603dbb8f74016ae0567dcf0e38a5daea1551af",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2023,
"license_type": "permissive",
"max_line_length": 67,
"num_lines": 91,
"path": "/examples/fake_clients/run.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\nfrom asyncio import sleep as aio_sleep\nimport os\nfrom random import randint\nimport sys\nfrom time import sleep\n\nsys.path.insert(0, os.path.join('..', '..'))\nfrom end2.runner import start_test_run\nfrom end2.arg_parser import default_parser\n\n\n\nclass SimplePubSub:\n def __init__(self) -> None:\n self.subscribers = {}\n\n def unsubscribe(self, event: str) -> None:\n self.subscribers.pop(event, None)\n\n def subscribe(self, event: str, callback) -> None:\n self.subscribers[event] = callback\n\n def publish(self, event: str, *args) -> None:\n if event in self.subscribers.keys():\n self.subscribers[event](*args)\n\n\nclass Client:\n def __init__(self, logger):\n self.logger = logger\n self.pub_sub = SimplePubSub()\n\n @staticmethod\n def _sleep():\n sleep(randint(1, 3))\n\n def get(self):\n self._sleep()\n return {}\n\n def post(self, payload):\n self.logger.info(payload)\n self._sleep()\n\n def put(self, payload):\n self.logger.info(payload)\n self._sleep()\n \n def delete(self):\n self._sleep()\n\n def on(self, handler):\n self.pub_sub.subscribe(\"event\", handler)\n\n\nclass AsyncClient:\n def __init__(self, logger):\n self.logger = logger\n\n @staticmethod\n async def _sleep():\n await aio_sleep(randint(1, 3))\n\n async def get(self):\n await self._sleep()\n return {}\n\n async def post(self, payload):\n self.logger.info(payload)\n await self._sleep()\n return {}\n\n async def put(self, payload):\n self.logger.info(payload)\n await self._sleep()\n \n async def delete(self):\n await self._sleep()\n\n\nif __name__ == '__main__':\n args = default_parser().parse_args()\n\n def test_parameters(logger, package_object):\n return (Client(logger), AsyncClient(logger)), {}\n\n results, failed_imports = start_test_run(args, test_parameters)\n\n print(failed_imports)\n exit(results.exit_code)\n"
},
{
"alpha_fraction": 0.6424580812454224,
"alphanum_fraction": 0.6648044586181641,
"avg_line_length": 33.64516067504883,
"blob_id": "14ca486e8e688e02d22fafcd6dacc12e72a48ae5",
"content_id": "74c16baf56222fe5ea046236ccf5741365b5d5f9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1074,
"license_type": "permissive",
"max_line_length": 77,
"num_lines": 31,
"path": "/examples/simple/run.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\nimport os\nimport sys\n\nsys.path.insert(0, os.path.join('..', '..'))\nfrom end2.runner import start_test_run\nfrom end2.arg_parser import default_parser\n\n\n\nif __name__ == '__main__':\n # Run from inside examples\\simple\n ## --suite smoke\\\\!ignored_module.py;sample1.py::test_ignored_test,test_2\n ## --suite smoke/!ignored_module.py;sample1.py::test_ignored_test,test_2\n ## --suite non_existent.py\n ## --suite smoke\\\\!ignored_module.py\n ## --suite smoke/!ignored_module.py\n ## --suite smoke\\\\sample1.py regression.py\n ## --suite smoke/sample1.py regression.py\n ## --suite regression\\\\sample4.py::test_11\n ## --suite regression/sample4.py::test_11\n ## --suite regression\\\\sample4.py::test_11[4]\n ## --suite regression/sample4.py::test_11[4]\n args = default_parser().parse_args()\n\n def test_parameters(logger, package_object):\n return (logger,), {}\n\n test_suite_result, failed_imports = start_test_run(args, test_parameters)\n print(test_suite_result, failed_imports)\n exit(test_suite_result.exit_code)\n"
},
{
"alpha_fraction": 0.7263427376747131,
"alphanum_fraction": 0.7289002537727356,
"avg_line_length": 19.578947067260742,
"blob_id": "af2533178e44022511bd64073aede33c76ccce0c",
"content_id": "8e63ee7dfbe126f8d0562d944f8b635bab81627a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 392,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 19,
"path": "/end2/logger/__init__.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import logging\n\nfrom .har_logger import (\n HarFileHandler,\n HarLogger\n)\nfrom .log_manager import (\n LogManager,\n SuiteLogManager\n)\n\n\nempty_logger = logging.getLogger('end²EMPTY')\nempty_logger.addHandler(logging.NullHandler())\nempty_logger.propagate = False\nempty_logger.disabled = True\n\n\n__all__ = ['empty_logger', 'HarFileHandler', 'HarLogger', 'LogManager', 'SuiteLogManager']\n"
},
{
"alpha_fraction": 0.5519785284996033,
"alphanum_fraction": 0.5741113424301147,
"avg_line_length": 18.8799991607666,
"blob_id": "d3a8ca2b8c58bf7e3af9bf21840ae1e05dec656d",
"content_id": "56bf5539f5fdb5e796b63bad3222dae9767f03ec",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1491,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 75,
"path": "/examples/simple/regression/sample4.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from end2.constants import RunMode\nfrom end2 import (\n parameterize,\n RunMode,\n setup,\n teardown_test\n)\n\n__run_mode__ = RunMode.SEQUENTIAL # This is required for every test module\n\n\n_letter = 'a'\n_my_dict = {}\n_my_var = 423956\n_my_bool = True\n\n\n@setup\ndef my_setup(logger):\n logger.info('running setup')\n _my_var = ''\n _my_dict['key'] = ord(_letter)\n\n\n@teardown_test\ndef my_teardown_test(logger):\n logger.info('running teardown test')\n global _my_bool\n _my_bool = not _my_bool\n\n\n@parameterize([\n (False, 'B', 'AB'),\n (True, 2, 2),\n (True, [], []),\n (False, [1], (1,)),\n (False, object(), object()),\n (True, 2.0, 2)\n])\ndef test_1(logger, it_works, lhs, rhs):\n logger.info(f'Does it work? {\"Yes\" if it_works else \"No\"}')\n if it_works:\n assert lhs == rhs\n else:\n assert lhs != rhs\n\n\n\n@parameterize([\n (False, 'B', 'AB'),\n (True, 2, 2),\n (True, [], []),\n (False, [1], (1,)),\n (False, object(), object()),\n (True, 2.0, 2)\n])\ndef test_11(logger, it_works, lhs, rhs):\n logger.info(f'Does it work? {\"Yes\" if it_works else \"No\"}')\n if it_works:\n assert lhs == rhs\n else:\n assert lhs != rhs\n\n\ndef test_2(logger):\n assert chr(_my_dict['key']) == _letter\n\n\ndef test_python_gotcha_4(logger):\n assert _my_var == 423956\n logger.info('You must specify global keyword when re-assigning a global variable')\n\n\ndef test_my_bool(logger):\n assert _my_bool if _my_bool else not _my_bool\n"
},
{
"alpha_fraction": 0.6866891980171204,
"alphanum_fraction": 0.6949238777160645,
"avg_line_length": 35.18367385864258,
"blob_id": "93a283ce20d143102cbf08bc5b885abe8c707900",
"content_id": "b34dce684092b0f3be19e2d9dcb6e588b575d235",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 17732,
"license_type": "permissive",
"max_line_length": 635,
"num_lines": 490,
"path": "/README.md",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "# end² Test Automation Framework\n\nThe focus of this framework is:\n\n- A minimal framework only using the standard library\n- More for E2E and/or Functional type of testing\n- For testing that has heavy logging and needs to analyze failures in logs rather than test case code\n- For folks that like programatic ways instead of plugins with configuration files\n\n## Contents\n\n- [Intent/Philosophy](#intent-philosophy)\n- [Features](#features)\n- [Getting Started](#getting-started)\n- [CLI](#cli)\n- [Resource Files](#resource-files)\n- [Log Manager](#log-manager)\n- [Reserved Keywords](#reserved-keywords)\n\n## Intent/Philosophy\n\n- Shuffling:\n - By having tests run in random order, we are ensuring that tests don't need to run in a specific order. If test-1 fails, then test-2 will obviously fail, but test-2 is a false negative. It might be better to consider test-1 and test-2 as test steps and just combine test-1 and test-2 in one test case instead. Another plus to Shuffling is the test writer will be able to find out if there are any side effects on the test case side or the SUT and be able to fix what is necessary. This will make them have a better understanding of there own coding, others members coding, and the SUT as well if the side effect is on the SUT itself\n- Create you own script entry point:\n - This is the entrypoint for your testing. It is your own python module that you will write that defines what the test parameters are and uses `default_parser()` to add any additional args before you start your testing. You can name it whatever you want but in below examples I refer to it as `run.py`\n- Declaring:\n - Test case design is very important and the design should speak for itself in the file/module. Declaring the concurrency/run-mode in the file lets everyone know that that particular file can run in parallel. Passing that info in the command line can be confusing over time because not everyone will remember what can and can't run parallel\n- 1 set of parameters per suite:\n - When we do a suite run we are only testing 1 system, therefore whatever is needed to communicate to the system should be the same throughout all test cases in that suite. As a result parameters should be the same for all test cases. This always helps keep test cases dry and makes them more step focused\n- Root of truth:\n - Single source of truth is a very good thing to have, when the single source is up-to-date and working then everyone will know it is 100% accurate information. By having your test cases as the single-source of truth, you can then publish your truth anywhere necessary and that destination will always have the info of the latest results. So the test cases should speak for themselves and have any doc strings necessary so that everyone can view the latest version of your testing\n\n## Features\n\n- Test Runner:\n - Discovers tests at runtime\n - Test Pattern Matching: Can run individual tests and test modules\n - Runs tests sequentially and parallelly in 1 run\n - Test Fixtures\n - Test Reserved Keywords\n - Test Module Watcher\n- Fixtures:\n - setup package\n - teardown package\n - setup module\n - teardown module\n - setup test\n - teardown test\n - metadata\n - parameterize\n- Logging:\n - Records are timestamped\n - Assertion failures are logged at `[ERROR]`\n - It will hold folders from the last n test runs\n - Each test module will be in its own folder\n - Each test will be in it's own file\n - Failed tests will be renamed to `FAILED_<test_name>.log`\n - HAR logger\n\n## Getting Started\n\n### Understanding the end² Flow (Psuedo Code)\n\n```python\ndef discover_package(parent_path):\n paths = get_all_paths(parent_path)\n modules = []\n for path in paths:\n if is_dir(path):\n modules += discover_package(path)\n else:\n modules.append(discover_module(path))\n return shuffle(modules)\n\n\ndef discover_module(path):\n module = import_module(path)\n for function in module:\n if is_test(function):\n module.add_test(function)\n return shuffle(module.tests)\n\n\ndef run_tests(discovered_modules):\n for package in discovered_packages:\n package.setup()\n for module in package.discovered_modules:\n module.setup():\n for test in module.tests:\n module.setup_test()\n args, kwargs = test_parameters(logger, package_object)\n test(*args, **kwargs)\n module.teardown_test()\n module.teardown()\n package.teardown()\n\n```\n\n### Simple Example of a Run script\n\n```python\n#!/usr/bin/env python3\nfrom end2.runner import start_test_run\nfrom end2.arg_parser import default_parser\n\n\nif __name__ == '__main__':\n args = default_parser().parse_args() # You can add your own arguments to default_parser if you want before you\n # call parse_args()\n\n def test_parameters(logger, package_object) -> tuple: # This is how parameters for tests are injected. When\n return (create_client(logger),), {} # overriding this you must always return a tuple of tuple\n # and dict. The logger arg here will be the logger\n # specific to the test. This method will be called\n # on every fixture and test\n\n test_suite_result, failed_imports = start_test_run(args, test_parameters)\n exit(test_suite_result.exit_code)\n\n```\n\n### Simple Example of a Test Module\n\nIn order for a method to become a discoverable test you must prefix your method name with `test_`. Each test method will have the same parameters\n\n```python\nfrom end2 import RunMode\n\n\n__run_mode__ = RunMode.SEQUENTIAL # This is required for every test module\n\n\ndef test_1(client):\n assert client.get('Hi') is not None # assert is used for validation; if assertion fails the test fails and exits on that assert\n\n\nasync def test_2(client): # Both sync and async test methods can exist in the same file\n actual = await client.get_stuff()\n assert actual == \"some expected data\"\n\n\ndef helper(): # Not a test method\n return {'a': 1}\n\n```\n\n### Simple Example of Checking Test Case Readiness at Runtime\n\n```python\nfrom end2 import (\n IgnoreTestException,\n RunMode,\n SkipTestException\n)\n\n\n__run_mode__ = RunMode.SEQUENTIAL # This is required for every test module\n\n\ndef test_1(client, logger):\n if not client.something_ready():\n raise IgnoreTestException() # You may ignore tests are runtime if necessary. No test result will be made\n assert client.get_stuff()\n logger.info('Hi')\n\n\nasync def test_2(client, logger): # Both sync and async test methods can exist in the same file\n if not client.something_else_ready():\n raise SkipTestException(\"thing not ready\") # You may skip tests are runtime if necessary as well.\n actual = await client.get_stuff() # A test result will be made with status of skipped and the\n assert actual == \"some expected data\" # message of what was supplied in the SkipTestException()\n logger.info('Hi async')\n\n```\n\n## Fixture Example of a Test Module\n\n```python\nfrom end2 import (\n on_failures_in_module,\n on_test_failure,\n parameterize,\n RunMode,\n setup,\n setup_test,\n teardown,\n teardown_test\n)\n\n\n__run_mode__ = RunMode.SEQUENTIAL # This is required for every test module\n\n\n@setup\ndef my_setup(client):\n client.do('something during setup')\n\n\n@setup_test\ndef my_setup_test(client):\n client.do('something during setup test')\n\n\n@teardown_test\ndef my_teardown_test(client):\n client.do('something during teardown test')\n\n\n@teardown\ndef my_teardown(client):\n client.do('something during teardown')\n\n\n@on_failures_in_module\ndef my_teardown(client): # Runs once at the end of the test module if you have 1 or more failed test cases\n client.do('something')\n\n\n# Parameterize takes 1 argument: list of tuples\n# - Each tuple must be the same length\n@parameterize([\n ('A', 'B', 'AB'),\n (1, 2, 3),\n ([], [1], [1]),\n (1.2, 2.3, 3.5),\n (True, False, 1)\n])\ndef test_1(var1, var2, rhs): # Parameterized parameters will come in after all runner.test_parameters\n assert var1 + var2 == rhs\n\n\n@metadata(defect_id='SR-432', case_id='C-23451') # Use metadata when you want to add extra info to your test\ndef test_2(client): # This data will also be available to you after the test run\n assert True is True\n\n\n@metadata(tags=['yellow', 'potato']) # tags is a special keyword used for Pattern Matching. As long as at\ndef test_3(client): # least 1 tag matches test will run (when using --suite-tag)\n assert True is True\n\n\ndef cleanup(client):\n client.do('some cleanup')\n\n\n@on_test_failure(cleanup) # This fixture will run the function in the decorator argument only if the test fails\ndef test_4(client):\n assert True is True\n\n```\n\n## Reserved Keywords\n\nThese are optional keyword-only-args that can be added at the end of your test case parameters:\n\n- **end** - This is helpful if you have event handling in the app you are testing and need the callback to be called. Only use this if you have to wait for some event otherwise you test will just timeout if **end** is not called:\n\n ```python\n def test_4(client, *, end):\n def handler:\n assert True is True\n end() # ends the test case\n client.onSomeEvent(handler) # This test will not finish until end() is called or has timeout\n\n ```\n\n ```python\n def test_4(client, *, end):\n def handler:\n assert True is True\n end.fail(\"This event should not have been called\") # ends the test case\n client.onSomeEvent(handler) # This test will not finish until end.fail() is called or has timeout\n\n ```\n\n- **logger** - The logger used for that specific test case\n- **step** - This is so you can record test steps in your test case, that may be useful after your test run\n\n ```python\n def test_5(client, *, end):\n # 1st arg is the description of the step\n # 2nd arg is the assertion-lambda, which can be None\n # 3rd arg is the function to call\n # nth args are the parameters for the function\n await step(\"my first step\", lambda x: x.code == 201, client.post, {'hi': 21})\n response = await step(\"my second step\", None, client.post, {'hi': 22})\n await step(\"my third step\", None, client.post, {'hi': 23})\n assert response.code == 201\n\n # Works with async as well\n async def test_6(client, *, end):\n await step(\"my first step\", lambda x: x.code == 201, client.post, {'hi': 21})\n response = await step(\"my second step\", None, client.post, {'hi': 22})\n await step(\"my third step\", None, client.post, {'hi': 23})\n assert response.code == 201\n\n ```\n\n- **package_object** - More on this in the next section\n\n### Packages Object\n\nThis is an object that you can build from within your packages. Since test parameters are always fresh objects you may want to pass data around and be able to access it in packages. This feature is kind of experimental but here are some ideas:\n\n- Build reports in the middle of runs\n- Building metrics\n\n#### Example of Test Package\n\n```python\n# test_package/__init__.py\nfrom end2 import (\n setup,\n teardown\n)\nfrom end2.fixtures import package_test_parameters\n\n\n@setup\ndef my_setup(package_globals):\n package_globals.stuff = ['my_static_stuff']\n\n\n@teardown\ndef my_setup(package_globals):\n package_globals.stuff.clear()\n\n\n@package_test_parameters\ndef my_custom_test_parameters(logger, pacakge_object): # Use if you want to override the test_parameters defined\n return (some_other_client(logger),) {} # in your 'run.py'\n\n```\n\n```python\n# test_package/test_sub_package/__init__.py\nfrom end2 import (\n setup,\n teardown\n)\n\n\n@setup\ndef my_setup(package_globals):\n package_globals.stuff # will be ['my_static_stuff']\n package_globals.sub_package_stuff = ['other stuff']\n\n\n@teardown\ndef my_setup(package_globals):\n package_globals.sub_package_stuff.clear()\n\n```\n\n```python\n# test_package/test_sub_package/my_test_module.py\nfrom end2 import RunMode\n\n\n__run_mode__ = RunMode.PARALLEL # This is required for every test module\n\n\ndef test_1(client, package_globals):\n assert package_globals.stuff == ['my_static_stuff']\n assert package_globals.sub_package_stuff = ['other stuff']\n\n```\n\n## Test Groups\n\nTest groups allow you to organize your tests around setup and teardown. Maybe some of your tests the setup only needs to be run for 2 of your tests. Or maybe you want the same setup for all tests but you want an additional setup for 4 of the tests. Groups are declared as classes and the methods are techincally static but without decorating with `@staticmethod`\n\n```python\n# test_package/test_sub_package/test_module.py\nfrom end2 import (\n RunMode,\n setup_test,\n teardown\n)\n\n\n__run_mode__ = RunMode.PARALLEL # This is required for every test module\n\n\n@setup_test\ndef setup_all(client):\n pass # do something at the start of each test.\n\n\ndef test_1(client):\n assert package_globals.stuff == ['my_static_stuff']\n assert package_globals.sub_package_stuff = ['other stuff']\n\n\nclass Group1:\n @setup_test\n def setup_all1(client):\n pass # do an extra something after setup_all\n\n def test_2(client):\n pass\n\n class Group2:\n @setup_test\n def setup_all2(client):\n pass # do an extra something after setup_all and setup_all1\n\n def test_2(client):\n pass\n\n```\n\n## CLI\n\nIt is best to run the `--help` arg on your \"run.py\" to get the latest information. Since **Pattern Matchers** are a little more complicated below is a more desciptive overview\n\n### Suite Pattern Matchers\n\n#### Default\n\nA suite path is a string that contains the path to the module delimited by a period:\n\n- To run a single test package: `--suite path/to/package`\n- To run a single test module: `--suite path/to/file.py`\n- To run multiple test packages: `--suite path/to/package path2/to/package`\n- To run multiple test packages and modules: `--suite path/to/package path2/to/package path3/to/package/module.py path4/to/package/module.py`\n It can also contains filters:\n- `::` which is to run specific tests in a module: `--suite path/to/package/module.py::test_1`\n- `;` which is delimiter for modules: `--suite path/to/package/module.py;module2.py`\n- `,` which is a delimiter for tests: `--suite path/to/package/module.py::test_1,test_2;module2.py`\n- `!` which means run everything before the `!` but nothing after:\n - `--suite path/to/package/!module.py;module2.py` runs everything in `path/to/package` except `module.py` and `module2.py`\n - `--suite path/to/package/module.py::!test_1,test_2;module2.py` runs `module2.py` and everything in `module.py` except `test_1` and `test_2`\n- `[n]` which will run specific parameterized tests:\n - `--suite path/to/package/module.py::test_name[1]` runs the 2nd test in the parameterized list\n - `--suite path/to/module.py::test_name[2:6]` runs tests 2 through 6 in the parameterized list\n - `--suite path/to/module.py::test_name[2:6:2]` runs the 2nd, 4th, 6th test in the parameterized list\n\n#### Tags\n\nTags can be defined by using `@metadata` in you test as mentioned [above](#fixture-example-of-a-test-module) or at the module. They works pretty similar to the **Default Pattern Matcher** but uses a tag instead of a test name:\n\n- `--suite-tag path/to/module.py::tag_1,tag_2`\n - This will include all tests if `tag_1` or `tag2` exist in `__tags__` variable in `path/to/module.py` or the metadata decorator includes the tags field with the mentioned tags\n- `--suite-tag path/to/package/tag1,tag2`\n - This will include any module that has `tag1` or `tag2` exist in `path/to/package`\n- `--suite-tag path/to/package/tag1,`\n - This is the same as above, but how you would use only 1 tag in your string (notice comma at the end)\n\n#### regex and glob\n\nThese 2 are pretty similar to each and I split module and test the same:\n\n- `--suite-regex <regex for module>::<regex for test>`\n- `--suite-glob <glob for module>::<glob for test>`\n\n#### Last Failed\n\nYou can also run only the tests that failed in the last run\n\n- `--suite-last-failed`\n\n## Resource Files\n\n- `.end2rc`: defines a default value for cli as well as:\n - Aliases: a short name given to a suite that is long. Aliases can also mention other aliases\n - Disabled Suites: The is a list of disabled suites/tests; this way you don't have to remember which ones to disable. Also the list of suites/tests are centralized here; you won't have to hunt them down in each file\n- `logs/.lastrunrc`: defines a list of tests that failed in the last run\n\n## Log Manager\n\nA **Log Manager** is meant to help organize your logging into timestamped folders that rotate every n number of folders. You can subclass **LogManager** if you want, or use the default own. You can use this if you have other tools in you repo that have logging as well\n\n##### Default Suite Log Manager\n\nFor Suite runs you will use a **Suite Log Manager**. The default does what is described below and you can also subclass **SuiteLogManager** if you want:\n\n- Rotates your suite run log folders\n- Logs INFO to stdin\n- Logs INFO to a standalone file as well and it is not interlaced\n- Has a delimiter for both modules and tests\n- Handles events before and after on:\n - suite\n - modules\n - fixtures\n - tests\n- Creates a log subfolder for each module\n- Creates a file for both setup and teardown of a module\n- Creates a log file for each test\n- Marks (Prefixes) file name as PASSED, FAILED, SKIPPED when test is finished\n"
},
{
"alpha_fraction": 0.6291402578353882,
"alphanum_fraction": 0.6323398947715759,
"avg_line_length": 41.29824447631836,
"blob_id": "f920009c2b1eac0df2e2a04a0e1abe8517deb27f",
"content_id": "66a7844a3c7f6583bc9864f9c1e73cce296637a7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 16877,
"license_type": "permissive",
"max_line_length": 162,
"num_lines": 399,
"path": "/end2/logger/log_manager.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from datetime import datetime\nimport logging\nfrom logging.handlers import MemoryHandler\nimport os\nimport platform\nfrom pathlib import Path\nimport shlex\nimport shutil\nimport struct\nimport subprocess\nimport sys\nfrom typing import Tuple\n\n\nfrom end2.constants import Status\nfrom end2.models.result import (\n Result,\n TestMethodResult,\n TestModuleResult,\n TestSuiteResult\n)\n\n\nFOLDER = 'logs'\n_DATEFORMAT = '%Y-%m-%d %H:%M:%S CDT'\n\n\ndef get_terminal_size():\n \"\"\" getTerminalSize()\n - get width and height of console\n - works on linux,os x,windows,cygwin(windows)\n originally retrieved from:\n http://stackoverflow.com/questions/566746/how-to-get-console-window-width-in-python\n \"\"\"\n current_os = platform.system()\n tuple_xy = None\n if current_os == 'Windows':\n tuple_xy = _get_terminal_size_windows()\n if tuple_xy is None:\n tuple_xy = _get_terminal_size_tput()\n # needed for window's python in cygwin's xterm!\n if current_os in ['Linux', 'Darwin'] or current_os.startswith('CYGWIN'):\n tuple_xy = _get_terminal_size_linux()\n if tuple_xy is None:\n print()\n \"default\"\n tuple_xy = (80, 25) # default value\n return tuple_xy\n\ndef _get_terminal_size_windows():\n try:\n from ctypes import windll, create_string_buffer\n # stdin handle is -10\n # stdout handle is -11\n # stderr handle is -12\n h = windll.kernel32.GetStdHandle(-12)\n csbi = create_string_buffer(22)\n res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)\n if res:\n (bufx, bufy, curx, cury, wattr,\n left, top, right, bottom,\n maxx, maxy) = struct.unpack(\"hhhhHhhhhhh\", csbi.raw)\n sizex = right - left + 1\n sizey = bottom - top + 1\n return sizex, sizey\n except:\n pass\n\ndef _get_terminal_size_tput():\n # get terminal width\n # src: http://stackoverflow.com/questions/263890/how-do-i-find-the-width-height-of-a-terminal-window\n try:\n cols = int(subprocess.check_call(shlex.split('tput cols')))\n rows = int(subprocess.check_call(shlex.split('tput lines')))\n return (cols, rows)\n except:\n pass\n\ndef _get_terminal_size_linux():\n def ioctl_GWINSZ(fd):\n try:\n import fcntl\n import termios\n cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))\n return cr\n except:\n pass\n\n cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)\n if not cr:\n try:\n fd = os.open(os.ctermid(), os.O_RDONLY)\n cr = ioctl_GWINSZ(fd)\n os.close(fd)\n except:\n pass\n if not cr:\n try:\n cr = (os.environ['LINES'], os.environ['COLUMNS'])\n except:\n return None\n return int(cr[1]), int(cr[0])\n\n\n_COLUMN_SIZE = get_terminal_size()[0]\n\n\n\ndef create_full_logger(name: str, base_folder: str = FOLDER, stream_level: int = logging.INFO) -> logging.Logger:\n return LogManager(name, base_folder, stream_level).logger\n\n\ndef create_file_logger(name: str, base_folder: str = FOLDER) -> logging.Logger:\n return LogManager(name, base_folder).create_file_logger(name)\n\n\ndef _get_log_handler(logger: logging.Logger, handler_type: type) -> logging.Handler:\n for handler in logger.handlers:\n if type(handler) == handler_type:\n return handler\n\n\nclass LogManager:\n \"\"\"\n Used to manage logs: How many log history folders to keep and how to organize the log folders/files inside.\n \"\"\"\n formatter = logging.Formatter(fmt=f'%(asctime)s [%(levelname)s] %(message)s', datefmt=_DATEFORMAT)\n\n def __init__(self, logger_name: str, base_folder: str = FOLDER, max_folders: int = 10, stream_level: int = logging.INFO, mode: str = 'w') -> None:\n self.logger_name = logger_name\n self.base_folder = base_folder\n self.max_folders = max_folders\n self.stream_level = stream_level\n self._create_folder()\n self._rotate()\n self.logger = self.create_full_logger(self.logger_name, stream_level)\n\n def _create_folder(self) -> None:\n self.folder = os.path.join(self.base_folder, datetime.now().strftime(\"%m-%d-%Y_%H-%M-%S\"))\n os.makedirs(self.folder, exist_ok=True)\n\n def _rotate(self) -> None:\n sub_folders = sorted([x for x in Path(self.base_folder).iterdir() if x.is_dir()], key=os.path.getmtime)\n count = len(sub_folders) - self.max_folders\n if count > 0:\n for i in range(count):\n shutil.rmtree(sub_folders[i])\n\n def new_instance(self, *args, **kwargs):\n return self.__class__(self.logger_name, self.base_folder, self.max_folders, self.stream_level, *args, **kwargs)\n\n @classmethod\n def create_file_handler(cls, folder: str, name: str, file_level: int = logging.DEBUG, mode: str = 'w') -> logging.FileHandler:\n os.makedirs(folder, exist_ok=True)\n file_handler = logging.FileHandler(os.path.join(folder, f'{name}.log'), mode=mode)\n file_handler.setLevel(file_level)\n file_handler.setFormatter(cls.formatter)\n return file_handler\n\n @staticmethod\n def _close_file_handlers(logger: logging.Logger):\n handler_ = None\n for handler in logger.handlers:\n if isinstance(handler, logging.FileHandler):\n handler.flush()\n handler.close()\n if os.path.exists(handler.baseFilename) and os.stat(handler.baseFilename).st_size == 0:\n os.remove(handler.baseFilename)\n handler_ = handler\n logger.removeHandler(handler_)\n\n @classmethod\n def create_stream_handler(cls, stream_level: int = logging.INFO) -> logging.StreamHandler:\n stream_handler = logging.StreamHandler(sys.stdout)\n stream_handler.setLevel(stream_level)\n stream_handler.setFormatter(cls.formatter)\n return stream_handler\n\n def get_logger(self, name: str) -> logging.Logger:\n return logging.getLogger(f'{self.folder}.{name}')\n\n def create_full_logger(self, name: str, stream_level: int = logging.INFO, mode='w') -> logging.Logger:\n logger = self.get_logger(name)\n if not logger.hasHandlers():\n logger.setLevel(logging.DEBUG)\n logger.addHandler(self.create_file_handler(self.folder, name, logging.DEBUG, mode=mode))\n logger.addHandler(self.create_stream_handler(stream_level))\n logger.propagate = False\n return logger\n\n def create_file_logger(self, name: str) -> logging.Logger:\n logger = self.get_logger(name)\n if not logger.hasHandlers():\n logger.setLevel(logging.DEBUG)\n logger.addHandler(self.create_file_handler(self.folder, name, logging.DEBUG))\n logger.propagate = False\n return logger\n\n def close(self) -> None:\n self._close_file_handlers(self.logger)\n\n\nclass SuiteLogManager(LogManager):\n \"\"\"\n Used to organize log files in sub folders and mark log files on completion\n \"\"\"\n formatter = logging.Formatter(fmt=f'%(asctime)s [%(levelname)s] %(infix)s %(message)s', datefmt=_DATEFORMAT)\n\n def __init__(self, logger_name: str = 'suite_run', base_folder: str = FOLDER, max_folders: int = 10, stream_level: int = logging.INFO) -> None:\n super().__init__(logger_name, base_folder, max_folders, stream_level, mode='a+')\n self.test_run_file_handler = _get_log_handler(self.logger, logging.FileHandler)\n self._test_terminator = '\\n' + ('-' * _COLUMN_SIZE)\n self._module_terminator = '\\n' + ('=' * _COLUMN_SIZE)\n\n @staticmethod\n def _change_filter_name(logger: logging.Logger, name: str) -> None:\n for handler in logger.handlers:\n if isinstance(handler, logging.StreamHandler):\n for filter in handler.filters:\n filter.name = name\n\n def _add_flush_handler(self, logger: logging.Logger, filter_name: str) -> None:\n filter_ = InfixFilter(filter_name)\n test_run_memory_handler = ManualFlushHandler(\n self.create_file_handler(self.folder, self.logger_name, logging.INFO, filter_=filter_, mode='a+')\n )\n test_run_memory_handler.setFormatter(self.formatter)\n test_run_memory_handler.addFilter(filter_)\n logger.addHandler(test_run_memory_handler)\n\n @staticmethod\n def _flush_and_close_log_memory_handler(logger: logging.Logger, infix_name: str) -> None:\n handler_ = None\n for handler in logger.handlers:\n if isinstance(handler, ManualFlushHandler) and handler.filters[0].name == infix_name:\n handler.flush()\n handler.close()\n handler_ = handler\n break\n logger.removeHandler(handler_)\n\n def _get_logger(self, module_name: str, test_name: str, formatter_infix: str = None) -> Tuple[logging.Logger, str]:\n logger = self.get_logger(f'{module_name}.{test_name}')\n infix_name = f'{module_name.split(\".\")[-1]}::{formatter_infix or test_name}'\n if not logger.hasHandlers():\n filter_ = InfixFilter(infix_name)\n logger.setLevel(logging.DEBUG)\n logger.addHandler(self.create_file_handler(\n os.path.join(self.folder, module_name), test_name.replace(' ', '_'),\n logging.DEBUG,\n filter_=filter_))\n logger.addHandler(self.create_stream_handler(filter_=filter_))\n return logger, infix_name\n\n @classmethod\n def create_file_handler(cls, folder: str, name: str, file_level: int = logging.DEBUG, mode: str = 'w', filter_: logging.Filter = None) -> logging.FileHandler:\n os.makedirs(folder, exist_ok=True)\n file_handler = logging.FileHandler(os.path.join(folder, f'{name}.log'), mode=mode)\n file_handler.setLevel(file_level)\n if filter_:\n file_handler.addFilter(filter_)\n file_handler.setFormatter(cls.formatter)\n else:\n file_handler.setFormatter(super().formatter)\n return file_handler\n\n @classmethod\n def create_stream_handler(cls, stream_level: int = logging.INFO, filter_: logging.Filter = None) -> logging.StreamHandler:\n stream_handler = logging.StreamHandler(sys.stdout)\n stream_handler.setLevel(stream_level)\n if filter_:\n stream_handler.addFilter(filter_)\n stream_handler.setFormatter(cls.formatter)\n else:\n stream_handler.setFormatter(super().formatter)\n return stream_handler\n\n def on_suite_start(self, suite_name: str) -> None:\n pass\n\n def on_module_start(self, module_name: str) -> None:\n pass\n\n def on_setup_module_done(self, module_name: str, result: Result) -> None:\n logger, infix_name = self._get_logger(module_name, 'setup')\n self._flush_and_close_log_memory_handler(logger, infix_name)\n if result and result.status is Status.SKIPPED:\n self.logger.critical(f'Setup Skipping all tests in {module_name}')\n self._close_file_handlers(logger)\n\n def on_setup_test_done(self, module_name: str, test_name: str, setup_test_result: Result) -> None:\n logger, infix_name = self._get_logger(module_name, test_name, 'setup_test')\n self._flush_and_close_log_memory_handler(logger, infix_name)\n if setup_test_result and setup_test_result.status is Status.SKIPPED:\n logger.critical(f'Setup Test Failed; skipping {test_name}')\n file_handler = None\n for handler in logger.handlers:\n if isinstance(handler, logging.FileHandler):\n handler.close()\n file_handler = handler\n os.rename(handler.baseFilename, handler.baseFilename.replace(f'{test_name}', f'{Status.SKIPPED.name}_{test_name}'))\n logger.removeHandler(file_handler)\n logger.addHandler(self.create_file_handler(os.path.join(self.folder, module_name), test_name, logging.DEBUG))\n\n def on_test_done(self, module_name: str, test_method_result: TestMethodResult) -> None:\n logger, infix_name = self._get_logger(module_name, test_method_result.name)\n self._flush_and_close_log_memory_handler(logger, infix_name)\n if test_method_result.status is Status.FAILED:\n self._move_failed_test(module_name, logger)\n self._close_file_handlers(logger) \n self.logger.info(f'{module_name}::{test_method_result}{self._test_terminator}')\n\n def on_parameterized_test_done(self, module_name: str, parameter_result: TestMethodResult) -> None:\n self.on_test_done(module_name, parameter_result)\n if parameter_result.status is Status.FAILED:\n self._move_failed_test(module_name, self._get_logger(module_name, parameter_result.name)[0])\n self.logger.info(f'{module_name}::{parameter_result}{self._test_terminator}')\n\n def _move_failed_test(self, module_name: str, logger: logging.Logger) -> None:\n for handler in logger.handlers:\n if isinstance(handler, logging.FileHandler):\n handler.close()\n base_name = os.path.basename(handler.baseFilename)\n os.rename(handler.baseFilename, os.path.join(self.folder, f'{Status.FAILED.name}_{module_name}.{base_name}'))\n\n def on_teardown_test_done(self, module_name: str, test_name: str, teardown_test_result: Result) -> None:\n logger, infix_name = self._get_logger(module_name, test_name, 'teardown_test')\n self._flush_and_close_log_memory_handler(logger, infix_name)\n if teardown_test_result and teardown_test_result.status is not Status.PASSED:\n self.logger.critical(f'Teardown Test Failed for {test_name}')\n\n def on_teardown_module_done(self, module_name: str, result: Result) -> None:\n logger, infix_name = self._get_logger(module_name, 'teardown')\n self._flush_and_close_log_memory_handler(logger, infix_name)\n if result and result.status is Status.FAILED:\n self.logger.critical(f'Teardown Module Failed for {module_name}')\n self._close_file_handlers(logger)\n\n def on_module_done(self, test_module_result: TestModuleResult) -> None:\n if test_module_result.status in [Status.PASSED, Status.SKIPPED]:\n for test_result in test_module_result.test_results:\n self._close_file_handlers(self._get_logger(test_module_result.name, test_result.name)[0])\n os.rename(\n os.path.join(self.folder, test_module_result.name),\n os.path.join(self.folder, f'{test_module_result.status.name}_{test_module_result.name}'))\n self.logger.info(f'{test_module_result}{self._module_terminator}')\n\n def on_suite_stop(self, suite_result: TestSuiteResult) -> None:\n self.logger.info(str(suite_result))\n self._close_file_handlers(self.logger)\n\n def get_setup_logger(self, module_name: str) -> logging.Logger:\n logger, infix_name = self._get_logger(module_name, 'setup')\n self._add_flush_handler(logger, infix_name)\n return logger\n\n def get_setup_test_logger(self, module_name: str, test_name: str) -> logging.Logger:\n logger, infix_name = self._get_logger(module_name, test_name, 'setup_test')\n self._add_flush_handler(logger, infix_name)\n return logger\n\n def get_test_logger(self, module_name: str, test_name: str) -> logging.Logger:\n logger, infix_name = self._get_logger(module_name, test_name)\n self._add_flush_handler(logger, infix_name)\n self._change_filter_name(logger, infix_name)\n return logger\n\n def get_teardown_test_logger(self, module_name: str, test_name: str) -> logging.Logger:\n logger, infix_name = self._get_logger(module_name, test_name, 'teardown_test')\n self._add_flush_handler(logger, infix_name)\n self._change_filter_name(logger, infix_name)\n return logger\n\n def get_teardown_logger(self, module_name: str) -> logging.Logger:\n logger, infix_name = self._get_logger(module_name, 'teardown')\n self._add_flush_handler(logger, infix_name)\n return logger\n\n\nclass ManualFlushHandler(MemoryHandler):\n \"\"\"\n This class will only flush on close; also emits at log level or above.\n \"\"\"\n def __init__(self, target, emit_level=logging.INFO) -> None:\n super().__init__(capacity=None, target=target)\n self.emit_level = emit_level\n\n def emit(self, record) -> None:\n if record.levelno >= self.emit_level:\n super().emit(record)\n\n def shouldFlush(self, record) -> bool:\n return False\n\n\nclass InfixFilter(logging.Filter):\n def filter(self, record) -> bool:\n record.infix = self.name\n return True\n"
},
{
"alpha_fraction": 0.602150559425354,
"alphanum_fraction": 0.6523297429084778,
"avg_line_length": 20.461538314819336,
"blob_id": "a751e696fc5aa431cf2066e0ffd3081b85e0f9d3",
"content_id": "5b9da4d526fe83a21b9c83d0b74f3cc596727bc8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 279,
"license_type": "permissive",
"max_line_length": 55,
"num_lines": 13,
"path": "/examples/simple/regression/import_fail.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from invalid.module\nfrom end2 import RunMode\n\n\n__run_mode__ = RunMode.PARALLEL\n\n\ndef test_python_gotcha_3(logger):\n assert round(1.5) == 2\n assert round(2.5) == 2\n assert round(3.5) == 4\n assert round(4.5) == 4\n logger.info(\"Odds round up while evens round down\")\n"
},
{
"alpha_fraction": 0.6277372241020203,
"alphanum_fraction": 0.650547444820404,
"avg_line_length": 29.44444465637207,
"blob_id": "5632fa48be2ce9b602a3f93f7e8a3bd01d0a098d",
"content_id": "23e608d8d9b75e9384e7c74159fdbf20e0495bae",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1096,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 36,
"path": "/examples/fake_clients/regression/sample2.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import end2\n\n__run_mode__ = end2.PARALLEL\n\n\[email protected]_test\nasync def my_setup_test(client, async_client):\n async_client.logger.info('running setup test')\n assert await async_client.get() == {}\n\n\[email protected]\nasync def my_teardown(client, async_client):\n client.logger.info('running teardown')\n assert await async_client.delete() is None\n\n\ndef test_21(client, async_client, *, step):\n client.logger.info('hi')\n step(\"my first step\", lambda x: x is None, client.put, {'hi': 21})\n result = step(\"my second step\", None, client.put, {'hi': 22})\n step(\"my second step\", None, client.put, {'hi': 23})\n assert result is None\n\n\[email protected](tags=['business'])\nasync def test_22(client, async_client, *, step):\n client.logger.info('hi22')\n await step(\"my first step\", lambda x: x == {}, async_client.post, {'hi': 21})\n result = await step(\"my second step\", None, async_client.post, {'hi': 22})\n await step(\"my third step\", None, async_client.post, {'hi': 23})\n assert result == {}\n\n\ndef test_23(client, async_client):\n assert client.get() == client.get()\n"
},
{
"alpha_fraction": 0.5177085399627686,
"alphanum_fraction": 0.5598993897438049,
"avg_line_length": 33.4466667175293,
"blob_id": "f786e20dcc8ae514b0b9e1d3b9bb9850ce2154f0",
"content_id": "4938f976dd44c900345f1f919980154cdff5a6e7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5167,
"license_type": "permissive",
"max_line_length": 303,
"num_lines": 150,
"path": "/end2/logger/har_logger.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from datetime import datetime\nimport json\nimport logging\nimport os\n\n\nclass HarFileHandler(logging.FileHandler):\n def __init__(self, filename, mode: str, encoding: str=None, delay: bool=False, errors=None,\n version=\"1.2\", creator_version=\"537.36\", pages_title=\"HAR Generator\") -> None:\n super().__init__(filename, mode=mode, encoding=encoding, delay=delay)\n self._starter(version, creator_version, pages_title)\n\n def _starter(self, version, creator_version, pages_title):\n starter = f'''{{\n \"log\": {{\n \"version\": \"{version}\",\n \"creator\": {{\n \"name\": \"WebInspector\",\n \"version\": \"{creator_version}\"\n }},\n \"pages\": [\n {{\n \"startedDateTime\": \"{datetime.now().strftime(\"%Y-%m-%dT%H:%M:%S.123Z\")}\",\n \"id\": \"page_1\",\n \"title\": \"{pages_title}\",\n \"pageTimings\": {{\n \"onContentLoad\": 908.7060000747442,\n \"onLoad\": 2029.8569998703897\n }}\n }}\n ],\n \"entries\": [\n'''\n self.emit(logging.makeLogRecord({'msg': starter}))\n\n def _closer(self):\n closer = ''' ]\n }\n}\n'''\n self.emit(logging.makeLogRecord({'msg': closer}))\n\n def close(self) -> None:\n self._closer()\n return super().close()\n\n\nclass HarLogger:\n def __init__(self):\n self.logger = logging.getLogger()\n self.logger.setLevel(logging.DEBUG)\n file_handler = HarFileHandler(os.path.join('logs', 'test.har'), mode='w')\n file_handler.setLevel(logging.DEBUG)\n file_handler.setFormatter(logging.Formatter(fmt=f'%(message)s'))\n self.logger.addHandler(file_handler)\n self.entry_delimiter = \"\"\n\n def info(self, request, response):\n self.debug(request, response)\n\n def debug(self, request, response):\n entry = self._make_entry(request, response)\n self.logger.info(entry)\n\n def warning(self, request, response):\n self.debug(request, response)\n\n def critical(self, request, response):\n self.debug(request, response)\n\n def error(self, request, response):\n self.debug(request, response)\n\n def _make_entry(self, request, response):\n entry = self.entry_delimiter + f'''{{\n \"_fromCache\": \"disk\",\n \"_initiator\": {{\n \"type\": \"script\",\n \"stack\": {{}}\n }},\n \"_priority\": \"High\",\n \"_resourceType\": \"fetch\",\n \"cache\": {{}},\n \"pageref\": \"page_1\",\n \"request\": {{\n \"method\": \"{response._method}\",\n \"url\": \"{request.full_url}\",\n \"httpVersion\": \"http/2.0\",\n \"headers\": {self._make_headers(request.header_items())},\n \"queryString\": [],\n \"cookies\": [],\n \"headersSize\": -1,\n \"bodySize\": 0\n }},\n \"response\": {{\n \"status\": {response.status},\n \"statusText\": \"{response.reason}\",\n \"httpVersion\": \"http/2.0\",\n \"headers\": {self._make_headers(response.headers._headers)},\n \"cookies\": [],\n \"content\": {{\n \"size\": 0,\n \"mimeType\": \"{response.headers._default_type}\",\n \"text\": {json.dumps(response.read().decode(response.headers.get_content_charset()))}\n }},\n \"redirectURL\": \"\",\n \"headersSize\": -1,\n \"bodySize\": 0,\n \"_transferSize\": 0,\n \"_error\": null\n }},\n \"serverIPAddress\": \"140.82.113.4\",\n \"startedDateTime\": \"{datetime.now().strftime(\"%Y-%m-%dT%H:%M:%S.123Z\")}\",\n \"time\": 4.252000013366342,\n \"timings\": {{\n \"blocked\": 1.195999818906188,\n \"dns\": -1,\n \"ssl\": -1,\n \"connect\": -1,\n \"send\": 0,\n \"wait\": 1.8329999623298645,\n \"receive\": 1.223000232130289,\n \"_blocked_queueing\": 1.076999818906188\n }}\n }}'''\n self.entry_delimiter = \",\\n\"\n return entry\n\n def _make_headers(self, r_headers, indent=2*6):\n list_dict = [{\"name\": header[0], \"value\": header[1]} for header in r_headers]\n return json.dumps(list_dict, indent=indent)\n\n\nif __name__ == '__main__':\n a = HarLogger()\n from urllib import request\n headers = {\"User-Agent\":\"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:66.0) Gecko/20100101 Firefox/66.0\", \"Accept\":\"application/json,text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\", \"Content-Type\": \"application/json\", \"DNT\":\"1\",\"Connection\":\"close\", \"Upgrade-Insecure-Requests\":\"1\"}\n req = request.Request('https://google.com', headers=headers)\n with request.urlopen(req, timeout=3) as response:\n print(dir(req))\n print(dir(response))\n a.info(req, response)\n b =1\n\n headers = {\"User-Agent\":\"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:66.0) Gecko/20100101 Firefox/66.0\", \"Accept\":\"application/json,text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\", \"Content-Type\": \"application/json\", \"DNT\":\"1\",\"Connection\":\"close\", \"Upgrade-Insecure-Requests\":\"1\"}\n req = request.Request('https://reqres.in/api/users?page=2', headers=headers)\n with request.urlopen(req, timeout=6) as response:\n print(req.full_url)\n a.info(req, response)\n b =1\n"
},
{
"alpha_fraction": 0.7263681888580322,
"alphanum_fraction": 0.746268630027771,
"avg_line_length": 17.272727966308594,
"blob_id": "d8b41c369014183bd32db67bcfdfd968214cc116",
"content_id": "6981d4702bc3f601900a280499a1b80c4fd6b28c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 201,
"license_type": "permissive",
"max_line_length": 39,
"num_lines": 11,
"path": "/examples/package_objects/package1/__init__.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "\nfrom end2 import setup, teardown\n\n\n@setup\ndef my_smoke_setup(global_object):\n global_object.package1 = \"package1\"\n\n\n@teardown\ndef my_smoke_teardown(global_object):\n print(global_object.package1)"
},
{
"alpha_fraction": 0.6191860437393188,
"alphanum_fraction": 0.6366279125213623,
"avg_line_length": 14.636363983154297,
"blob_id": "09f11cc476e436b6304646ab8bd52fcf12d37aee",
"content_id": "319bc509738eb5f337cd30df920e59aa5c1f2a52",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 344,
"license_type": "permissive",
"max_line_length": 68,
"num_lines": 22,
"path": "/examples/simple/smoke/sample2.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from end2 import (\n RunMode,\n setup_test\n)\n\n\n__run_mode__ = RunMode.PARALLEL\n\n\n@setup_test\ndef my_bad(logger):\n assert False, \"FAILING SETUP_TEST ON PURPOSE\"\n\n\ndef test_1(logger):\n assert 1 == 1\n assert True is True\n logger.info('Hi')\n\n\ndef test_2(logger):\n assert f\"testing: {test_2.__name__} using logger: {logger.name}\"\n"
},
{
"alpha_fraction": 0.5431856513023376,
"alphanum_fraction": 0.5493550300598145,
"avg_line_length": 31.71559715270996,
"blob_id": "22b81afbf33dc1f2df44c4d2eff5bb48eb4d575f",
"content_id": "84f47814124cfc6bb759a57c69d5ac1d875cfae1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3566,
"license_type": "permissive",
"max_line_length": 94,
"num_lines": 109,
"path": "/end2/resource_profile.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from configparser import ConfigParser\nimport os\n\nfrom end2.constants import Status\nfrom end2.models.result import TestSuiteResult\n\n\n_PRODUCT_NAME = 'end2'\n_FILE_NAME = f'.{_PRODUCT_NAME}rc'\nLAST_RUN_PATH = os.path.join('logs', f'.{_PRODUCT_NAME}lastrunrc')\n\n\ndef get_rc() -> ConfigParser:\n file_name = f'.{_PRODUCT_NAME}rc'\n rc = ConfigParser(comment_prefixes=('#',))\n if not rc.read(file_name):\n # Temporarily recreating with a different comment_prefix\n # so I can have comments\n rc = ConfigParser(comment_prefixes=(';',))\n rc.read_string(_create_default_rc_string())\n with open(_FILE_NAME, 'w') as configfile:\n rc.write(configfile)\n rc = ConfigParser(comment_prefixes=('#',))\n rc.read(_FILE_NAME)\n else:\n rc = _check_for_corruption(_FILE_NAME)\n return rc\n\n\n_default_rc_dict = {\n 'settings': {\n 'max-workers': (int, 20),\n 'max-log-folders': (int, 10),\n 'no-concurrency': (bool, False),\n 'stop-on-fail': (bool, False),\n 'event-timeout': (float, 20.0)\n },\n 'suite-alias': {\n '# Examples': (str, ''),\n '# short_suite_name': (str, 'path/to/suite1.py path/to/another/suite2.py'),\n '# super_suite': (str, 'short_suite_name path/to/suite3.py')\n },\n 'suite-disabled': {\n '# Examples': (str, ''),\n '# path/to/suite3.py': (str, 'BUG-1234'),\n '# short_suite_name': (str, 'Need to refactor stuff')\n }\n}\n\ndef _create_default_rc_string() -> str:\n lines = []\n for section, options in _default_rc_dict.items():\n lines.append(f'[{section}]' + \"\\n\")\n for k, v in options.items():\n lines.append(f\"{k} = {str(v[1])}\" + \"\\n\")\n lines.append(\"\\n\")\n return ''.join(lines)\n\n\ndef _check_for_corruption(file_name: str) -> ConfigParser:\n corrupted = False\n rc = ConfigParser(comment_prefixes=(';',))\n rc.read(file_name)\n for section, options in _default_rc_dict.items():\n if section == 'settings':\n for k, v in options.items():\n if section in rc:\n if not isinstance(rc[section].get(k, None), v[0]):\n corrupted = True\n rc[section][k] = str(v[1])\n else:\n corrupted = True\n rc[section] = {k: str(v[1])}\n elif section not in rc:\n corrupted = True\n rc[section] = _default_rc_dict[section]\n if corrupted:\n with open(file_name, 'w') as configfile:\n rc.write(configfile)\n rc = ConfigParser(comment_prefixes=('#',))\n rc.read(file_name)\n return rc\n\n\ndef create_last_run_rc(results: TestSuiteResult) -> None:\n failed_test_dict = {}\n for module in results:\n if module.status is Status.FAILED:\n if module.failed_count == module.total_count:\n failed_test_dict[module.file_name] = \"All Failed\"\n else:\n test_list = []\n for test in module:\n if test.status is Status.FAILED:\n test_list.append(test.name)\n failed_test_dict[f'{module.file_name}::{\",\".join(test_list)}'] = \"Some failed\"\n with open(LAST_RUN_PATH, 'w') as configfile:\n rc = ConfigParser()\n rc.read_dict({\n 'failures': failed_test_dict\n })\n rc.write(configfile)\n\n\ndef get_last_run_rc() -> ConfigParser:\n rc = ConfigParser()\n if not rc.read(LAST_RUN_PATH):\n raise FileNotFoundError(LAST_RUN_PATH)\n return rc\n"
},
{
"alpha_fraction": 0.6814516186714172,
"alphanum_fraction": 0.6854838728904724,
"avg_line_length": 14.5,
"blob_id": "6024353e49f7ca0f9d4f0dd285be530af1238614",
"content_id": "76ef928fe392fe334681bf2f524245b3eb614164",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 248,
"license_type": "permissive",
"max_line_length": 65,
"num_lines": 16,
"path": "/examples/simple/regression/sample_skipped.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from end2 import (\n RunMode,\n setup\n)\n\n\n__run_mode__ = RunMode.PARALLEL\n\n\n@setup\ndef my_setup(logger):\n assert False, \"FAILING SETUP ON PURPOSE\"\n\n\ndef test_skipped(logger):\n assert False, \"THIS TEST SHOULD NOT RUN BECAUSE SETUP FAILED\"\n"
},
{
"alpha_fraction": 0.6984572410583496,
"alphanum_fraction": 0.6998597383499146,
"avg_line_length": 30,
"blob_id": "9f3ac575593952fc6c2db838e6572a7988bdbbd4",
"content_id": "d18a68907d10625b657fac933c332b07c723f58e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 713,
"license_type": "permissive",
"max_line_length": 72,
"num_lines": 23,
"path": "/end2/pattern_matchers/glob_.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from glob import glob\nimport re\n\nfrom end2.pattern_matchers.default import (\n DefaultModulePatternMatcher,\n DefaultTestCasePatternMatcher\n)\n\n\nclass GlobModulePatternMatcher(DefaultModulePatternMatcher):\n @classmethod\n def parse_str(cls, pattern: str, include: bool = True):\n return cls(glob(pattern, recursive=True), pattern, include)\n\n\nclass GlobTestCasePatternMatcher(DefaultTestCasePatternMatcher):\n @classmethod\n def parse_str(cls, pattern: str, include: bool = True):\n pattern_ = pattern.replace('?', '.').replace('*', '.*')\n return cls([], pattern_, True)\n\n def included(self, func) -> bool:\n return True if re.match(self._pattern, func.__name__) else False\n"
},
{
"alpha_fraction": 0.7268292903900146,
"alphanum_fraction": 0.7463414669036865,
"avg_line_length": 17.545454025268555,
"blob_id": "05eba7b50fdd1d8608f0a190fe54ea5537b4e603",
"content_id": "ad24d7c002cec15fa13c552626134f4baa2e7d4a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 205,
"license_type": "permissive",
"max_line_length": 41,
"num_lines": 11,
"path": "/examples/package_objects/package1/package2a/__init__.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "\nfrom end2 import setup, teardown\n\n\n@setup\ndef my_smoke_setup(global_object):\n global_object.package2a = \"package2a\"\n\n\n@teardown\ndef my_smoke_teardown(global_object):\n print(global_object.package2a)\n"
},
{
"alpha_fraction": 0.758400022983551,
"alphanum_fraction": 0.758400022983551,
"avg_line_length": 28.761905670166016,
"blob_id": "d214af01f02fdbc2e113ba60a6e5ec9c4ea2fcc1",
"content_id": "ca0bfc5cd567fcae23d9fabbb65899bd897d8ffb",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 625,
"license_type": "permissive",
"max_line_length": 74,
"num_lines": 21,
"path": "/end2/pattern_matchers/__init__.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from .default import (\n DefaultModulePatternMatcher,\n DefaultTestCasePatternMatcher\n)\nfrom .glob_ import (\n GlobModulePatternMatcher,\n GlobTestCasePatternMatcher\n)\nfrom .regex import (\n RegexModulePatternMatcher,\n RegexTestCasePatternMatcher\n)\nfrom .tag import (\n TagModulePatternMatcher,\n TagTestCasePatternMatcher\n)\n\n__all__ = ['DefaultModulePatternMatcher', 'DefaultTestCasePatternMatcher',\n 'GlobModulePatternMatcher', 'GlobTestCasePatternMatcher',\n 'RegexModulePatternMatcher', 'RegexTestCasePatternMatcher',\n 'TagModulePatternMatcher', 'TagTestCasePatternMatcher']\n"
},
{
"alpha_fraction": 0.5714017748832703,
"alphanum_fraction": 0.5728073716163635,
"avg_line_length": 35.79999923706055,
"blob_id": "8c8751badeeae0d3c72cf93307e423d2e46947a6",
"content_id": "a9e05820fbb3153af6d569f2473819bc72cc9cce",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10672,
"license_type": "permissive",
"max_line_length": 148,
"num_lines": 290,
"path": "/end2/models/testing_containers.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from inspect import getmro\nimport os\nimport pathlib\nfrom typing import (\n Dict,\n Iterator,\n List,\n Set\n)\n\nfrom end2.constants import RunMode\nfrom end2.fixtures import (\n empty_func,\n get_fixture,\n on_failures_in_module,\n package_test_parameters,\n setup,\n setup_module,\n teardown,\n teardown_module\n)\nfrom end2.pattern_matchers import (\n DefaultModulePatternMatcher,\n DefaultTestCasePatternMatcher\n)\n\n\ndef build_full_name(module_name: str, test_name: str) -> str:\n return f'{module_name}::{test_name}'\n\n\nclass Importable:\n def __init__(self, path: str, module_pattern_matcher: DefaultModulePatternMatcher, test_pattern_matcher: DefaultTestCasePatternMatcher) -> None:\n self.path = os.path.normpath(path)\n self.module_matcher = module_pattern_matcher\n self.test_matcher = test_pattern_matcher\n\n def __repr__(self):\n return self.path\n\n\nclass TestMethod:\n def __init__(self, func, setup_func=empty_func, teardown_func=empty_func\n , parameterized_tuple: tuple = None) -> None:\n self.name = func.__name__\n self.full_name = build_full_name(func.__module__, self.name)\n self.func = func\n self.description = func.__doc__\n self.setup_func = setup_func\n self.teardown_func = teardown_func\n self.parameterized_tuple = parameterized_tuple or tuple()\n self.metadata = getattr(func, 'metadata', {})\n\n def __eq__(self, rhs: 'TestMethod') -> bool:\n return self.full_name == rhs.full_name\n\n def __hash__(self) -> int:\n return id(self.full_name)\n\n\nclass TestGroups:\n def __init__(self, name: str, tests: Dict[str, TestMethod]\n , setup_func=empty_func, teardown_func=empty_func) -> None:\n self.name = name\n self.setup_func = setup_func\n self.tests = tests\n self.teardown_func = teardown_func\n self.children: List[TestGroups] = []\n\n def append(self, group: 'TestGroups') -> None:\n self.children.append(group)\n\n def update(self, same_group: 'TestGroups') -> None:\n for ignored in same_group.ignored_tests:\n self.tests.pop(ignored, None)\n self.tests.update(same_group.tests)\n self.ignored_tests.update(same_group.ignored_tests)\n\n def has_tests(self) -> bool:\n has_tests = bool(self.tests)\n if not has_tests:\n for child in self.children:\n has_tests = bool(child.tests)\n if has_tests:\n break\n return has_tests\n\n\nclass DynamicMroMixin:\n @classmethod\n def _get_mros(cls):\n return getmro(cls)[::-1]\n\n @classmethod\n def __getattrcls__(cls, name):\n for a in cls._get_mros():\n if hasattr(a, name):\n return getattr(a, name)\n raise AttributeError(f\"No atrribute named {name}\")\n \n @classmethod\n def __setattrcls__(cls, name: str, value) -> None:\n for a in cls._get_mros():\n if hasattr(a, name):\n setattr(a, name, value)\n return\n setattr(cls, name, value)\n\n def __getattr__(self, name: str):\n return self.__getattrcls__(name)\n \n def __setattr__(self, name: str, value) -> None:\n return self.__setattrcls__(name, value)\n\n @staticmethod\n def add_mixin(name: str, current_mixin):\n return type(\n f\"{name.replace('.', 'Dot')}Dot{DynamicMroMixin.__name__}\",\n (current_mixin.__class__,),\n {}\n )()\n\n\nclass TestModule:\n def __init__(self, module, groups: TestGroups, ignored_tests: set = None) -> None:\n self.module = module\n self.name: str = module.__name__\n self.file_name = os.path.relpath(module.__file__)\n self.last_modified = pathlib.Path(self.file_name).stat().st_mtime\n self.run_mode: RunMode = module.__run_mode__\n self.is_parallel = self.run_mode is RunMode.PARALLEL\n self.description = module.__doc__\n self.groups = groups\n self.ignored_tests = ignored_tests or set()\n self.on_failures_in_module = get_fixture(self.module, on_failures_in_module.__name__)\n\n def __eq__(self, rhs: 'TestModule') -> bool:\n return self.name == rhs.name\n\n def __hash__(self) -> int:\n return id(self.module)\n\n def update(self, same_module: 'TestModule') -> None:\n for ignored in same_module.ignored_tests:\n for child in self.groups.children:\n child.tests.pop(ignored, None)\n for same_child in same_module.groups.children:\n for self_child in self.groups.children:\n if child.name == self_child.name:\n self_child.tests.update(same_child.tests)\n self.ignored_tests.update(same_module.ignored_tests)\n\n\nclass TestPackage:\n def __init__(self, package, sequential_modules: set = None, parallel_modules: set = None\n , package_object: DynamicMroMixin = None) -> None:\n self.package = package\n self.setup_func = get_fixture(self.package, setup.__name__)\n self.teardown_func = get_fixture(self.package, teardown.__name__)\n self.setup_module_func = get_fixture(self.package, setup_module.__name__)\n self.teardown_module_func = get_fixture(self.package, teardown_module.__name__)\n self.package_test_parameters_func = get_fixture(self.package, package_test_parameters.__name__, default=None)\n self.name: str = self.package.__name__\n self.description = self.package.__doc__\n self.package_object = package_object or DynamicMroMixin()\n self.sequential_modules: Set[TestModule] = sequential_modules or set()\n self.parallel_modules: Set[TestModule] = parallel_modules or set()\n self.sub_packages: List[TestPackage] = []\n\n def __eq__(self, o: 'TestPackage') -> bool:\n return self.name == o.name\n\n def setup(self) -> None:\n self.setup_func(self.package_object)\n\n def teardown(self) -> None:\n self.teardown_func(self.package_object)\n\n def append(self, package) -> None:\n package_object = DynamicMroMixin.add_mixin(package.__name__, self.package_object)\n self.sub_packages.append(TestPackage(package, package_object=package_object))\n if self.sub_packages[-1].package_test_parameters_func is None:\n self.sub_packages[-1].package_test_parameters_func = self.package_test_parameters_func\n\n def append_module(self, module: TestModule) -> None:\n if module.is_parallel:\n self.parallel_modules.add(module)\n else:\n self.sequential_modules.add(module)\n\n def tail(self, package, index: int = -1) -> None:\n package_object = DynamicMroMixin.add_mixin(package.__name__, self.package_object)\n self._tail(TestPackage(package, package_object=package_object), index)\n\n def _tail(self, package, index: int = -1) -> None:\n sub_packages = self.sub_packages\n if sub_packages:\n sub_package = sub_packages[index]\n sub_package._tail(package, -1)\n else:\n self.sub_packages.append(package)\n\n def last(self, index: int = -1) -> 'TestPackage':\n if not self.sub_packages:\n return self\n sub_package = self.sub_packages[index]\n while sub_package.sub_packages:\n sub_package = sub_package.sub_packages[-1]\n return sub_package\n\n def find(self, rhs: str, index: int = -1) -> 'TestPackage':\n if self.name == rhs:\n return self\n elif self.sub_packages:\n return self.sub_packages[index].find(rhs)\n\n\nclass TestPackageTree:\n def __init__(self, package = None, modules = None) -> None:\n self.packages = [TestPackage(package, modules)] if package else []\n\n def __iter__(self) -> Iterator[TestPackage]:\n def _recurse_sub_packages(sub_package_: TestPackage):\n for sub_package in sub_package_.sub_packages:\n yield sub_package\n yield from _recurse_sub_packages(sub_package)\n\n for package in self.packages:\n yield package\n for sub_package in package.sub_packages:\n yield sub_package\n yield from _recurse_sub_packages(sub_package)\n\n def find(self, rhs: TestPackage) -> TestPackage:\n for package in self.packages:\n if package == rhs:\n return package\n elif package.sub_packages:\n for sub_package in package.sub_packages:\n if sub_package == rhs:\n return sub_package\n\n def find_by_str(self, rhs: str) -> TestPackage:\n for package in self.packages:\n if package.name == rhs:\n return package\n elif package.sub_packages:\n for sub_package in package.sub_packages:\n if sub_package.name == rhs:\n return sub_package\n\n def append(self, package: TestPackage) -> None:\n found_package = self.find(package)\n if found_package:\n self.merge(found_package, package)\n else:\n found_parent = self.find_by_str(\".\".join(package.name.split('.'))[:-1])\n if found_parent:\n found_parent.sub_packages.append(package)\n else:\n self.packages.append(package)\n\n def merge(self, lhs: TestPackage, rhs: TestPackage) -> None:\n for rm in rhs.sequential_modules:\n updated = False\n for lm in lhs.sequential_modules:\n updated = False\n if lm == rm:\n updated = True\n lm.update(rm)\n break\n if not updated:\n lhs.sequential_modules.add(rm)\n for i, lhs_sp in enumerate(lhs.sub_packages):\n if len(rhs.sub_packages) - 1 > i:\n self.merge(lhs_sp, rhs.sub_packages[i])\n\n for rm in rhs.parallel_modules:\n updated = False\n for lm in lhs.parallel_modules:\n updated = False\n if lm == rm:\n updated = True\n lm.update(rm)\n break\n if not updated:\n lhs.parallel_modules.add(rm)\n for i, lhs_sp in enumerate(lhs.sub_packages):\n if len(rhs.sub_packages) - 1 > i:\n self.merge(lhs_sp, rhs.sub_packages[i])\n"
},
{
"alpha_fraction": 0.6161380410194397,
"alphanum_fraction": 0.6217350959777832,
"avg_line_length": 26.139240264892578,
"blob_id": "a1409b0caae21e8f9af18d6cfec6ee95c0c12019",
"content_id": "894b2a50d29adc5ab953fab3056e30f25de5c63c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2144,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 79,
"path": "/tests/unit/fixtures.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import unittest\n\nimport end2\nfrom end2.fixtures import package_test_parameters\n\n\nclass TestFixtures(unittest.TestCase):\n @end2.setup\n def setup_(self):\n pass\n\n @end2.setup_test\n def setup_test_(self):\n pass\n\n @end2.teardown\n def teardown_(self):\n pass\n\n @end2.teardown_test\n def teardown_test_(self):\n pass\n\n @end2.metadata(tags=['a', 'b', 'c'])\n def metadata_(self):\n pass\n\n @end2.parameterize([\n ('a', 'b', 'c'),\n ('a', 'b', 'c')\n ])\n def parameterize_(self):\n pass\n\n @end2.on_failures_in_module\n def on_failures_in_module_(self):\n pass\n\n @package_test_parameters\n def package_test_parameters_(self, logger, package_object):\n return (logger,), {}\n\n def my_failure_step(self, *args, **kwargs):\n self.abc = True\n\n def test_setup(self):\n self.assertTrue(hasattr(self.setup_, 'setup'))\n\n def test_setup_test(self):\n self.assertTrue(hasattr(self.setup_test_, 'setup_test'))\n\n def test_teardown(self):\n self.assertTrue(hasattr(self.teardown_, 'teardown'))\n\n def test_teardown_test(self):\n self.assertTrue(hasattr(self.teardown_test_, 'teardown_test'))\n\n def test_metadata(self):\n self.assertTrue(hasattr(self.metadata_, 'metadata'))\n self.assertEqual(self.metadata_.metadata['tags'], ['a', 'b', 'c'])\n\n def test_parameterize(self):\n self.assertTrue(hasattr(self.parameterize_, 'names'))\n self.assertTrue(hasattr(self.parameterize_, 'parameterized_list'))\n self.assertEqual(len(self.parameterize_.names), 2)\n self.assertEqual(len(self.parameterize_.parameterized_list), 2)\n\n def test_on_failures_in_module(self):\n self.assertTrue(hasattr(self.on_failures_in_module_, 'on_failures_in_module'))\n\n def test_on_test_failure(self):\n @end2.on_test_failure(self.my_failure_step)\n def dd():\n pass\n dd.on_test_failure()\n self.assertTrue(hasattr(self, 'abc'))\n\n def test_on_test_failure(self):\n self.assertTrue(hasattr(self.package_test_parameters_, 'package_test_parameters'))\n"
},
{
"alpha_fraction": 0.6144737005233765,
"alphanum_fraction": 0.6151315569877625,
"avg_line_length": 51.41379165649414,
"blob_id": "2837d0f6e70a55d037e0cd9962edcc077212814e",
"content_id": "11b9bf8f79aa0a1f406d5afbba012b2cef7f58d4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1520,
"license_type": "permissive",
"max_line_length": 150,
"num_lines": 29,
"path": "/tests/unit/arg_parser.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import unittest\n\nfrom end2.arg_parser import SuiteArg\n\n\nclass TestSuiteArg(unittest.TestCase):\n def test_b_path_found(self):\n self.assertEqual(SuiteArg._resolve_paths(paths={'a'}, suite_aliases={'a': 'b'}, disabled_suites=[]), {'b'})\n\n def test_b_path_still_found(self):\n self.assertEqual(SuiteArg._resolve_paths(paths={'a', 'b'}, suite_aliases={'a': 'b'}, disabled_suites=[]), {'b'})\n\n def test_b_and_c_paths_found(self):\n self.assertEqual(SuiteArg._resolve_paths(paths={'a', 'b'}, suite_aliases={'a': 'c'}, disabled_suites=[]) ^ {'b', 'c'}, set())\n\n def test_a_b_c_are_all_paths(self):\n self.assertEqual(SuiteArg._resolve_paths(paths={'a', 'b', 'c'}, suite_aliases={}, disabled_suites=[]) ^ {'a', 'b', 'c'}, set())\n\n def test_nested__a_resolves_to_b_and_c_which_resolves_to_d_and_e(self):\n self.assertEqual(SuiteArg._resolve_paths(paths={'a'}, suite_aliases={'a': 'b c', 'b': 'd', 'c': 'e'}, disabled_suites=[]) ^ {'d', 'e'}, set())\n\n def test_disabled_suites_resolve_to_empty_path(self):\n self.assertEqual(SuiteArg._resolve_paths(paths={'a'},suite_aliases= {'a': 'b'}, disabled_suites=['b']), set())\n\n def test_disabled_alias_but_path_still_provided_so_b_exists(self):\n self.assertEqual(SuiteArg._resolve_paths(paths={'a', 'b'}, suite_aliases={'a': 'b'}, disabled_suites=['a']), {'b'})\n\n def test_c_path_found(self):\n self.assertEqual(SuiteArg._resolve_paths(paths={'a'}, suite_aliases={'a': 'c'}, disabled_suites=['b']) ^ {'c'}, set())\n"
},
{
"alpha_fraction": 0.5980519652366638,
"alphanum_fraction": 0.5987012982368469,
"avg_line_length": 26.01754379272461,
"blob_id": "827ab52433ddd18f4d38968c65cb7c91c2ac5ac7",
"content_id": "150e1e0e0de1d8c9057b081015dc17813643cb1a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1540,
"license_type": "permissive",
"max_line_length": 93,
"num_lines": 57,
"path": "/end2/pattern_matchers/default.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from typing import (\n Callable,\n List\n)\n\n\nclass PatternMatcherBase:\n excluder = '!'\n delimiter = ','\n\n def __init__(self, items: List[str], pattern: str, include: bool):\n self._items = items\n self._pattern = pattern\n self._include = include\n\n @classmethod\n def parse_str(cls, pattern: str, include: bool = True):\n index, include = None, True\n if pattern.startswith(cls.excluder):\n index = 1\n include = False\n return cls(pattern[index:].split(cls.delimiter) if pattern else [], pattern, include)\n\n def __str__(self) -> str:\n return f\"{'include' if self._include else 'exclude'}: {self._items}\"\n\n @property\n def included_items(self) -> List[str]:\n return self._items if self._include else []\n\n @property\n def excluded_items(self) -> List[str]:\n return self._items if not self._include else []\n\n def included(self, item: str) -> bool:\n if item in self._items:\n value = self._include\n else:\n value = not self._include\n if not self._items:\n value = True\n return value\n\n def excluded(self, item: str) -> bool:\n return not self.included(item)\n\n\nclass DefaultModulePatternMatcher(PatternMatcherBase):\n delimiter = ';'\n\n def module_included(self, module) -> bool:\n return True\n\n\nclass DefaultTestCasePatternMatcher(PatternMatcherBase):\n def func_included(self, func: Callable) -> bool:\n return self.included(func.__name__)\n"
},
{
"alpha_fraction": 0.6920473575592041,
"alphanum_fraction": 0.6920473575592041,
"avg_line_length": 22.639999389648438,
"blob_id": "0a9a7ad09e8e41a94fcb7045bb1874589209d918",
"content_id": "43c749528d9634e6cc6cbfc9775aeff408a6f5ca",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 591,
"license_type": "permissive",
"max_line_length": 70,
"num_lines": 25,
"path": "/end2/__init__.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "# Only exporting stuff commonly used in test modules.\nfrom .constants import RunMode\nfrom .exceptions import (\n IgnoreTestException,\n SkipTestException\n)\nfrom .fixtures import (\n on_failures_in_module,\n on_test_failure,\n metadata,\n parameterize,\n setup,\n setup_test,\n teardown,\n teardown_test\n)\n\n__all__ = [\n 'IgnoreTestException', 'on_failures_in_module', 'on_test_failure',\n 'metadata', 'parameterize', 'RunMode', 'setup', 'setup_test',\n 'SkipTestException', 'teardown', 'teardown_test'\n]\n\nPARALLEL = RunMode.PARALLEL\nSEQUENTIAL = RunMode.SEQUENTIAL\n"
},
{
"alpha_fraction": 0.7227723002433777,
"alphanum_fraction": 0.7425742745399475,
"avg_line_length": 17.272727966308594,
"blob_id": "980efd90ba2bd1ecfd477d534079bb679fd13ed4",
"content_id": "6f58486b9c47ae689bb65039b41e3b976de08e4d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 202,
"license_type": "permissive",
"max_line_length": 39,
"num_lines": 11,
"path": "/examples/package_objects/package1/package2a/package3/__init__.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "\nfrom end2 import setup, teardown\n\n\n@setup\ndef my_smoke_setup(global_object):\n global_object.package3 = \"package3\"\n\n\n@teardown\ndef my_smoke_teardown(global_object):\n print(global_object.package3)\n"
},
{
"alpha_fraction": 0.6268733739852905,
"alphanum_fraction": 0.629801869392395,
"avg_line_length": 44.3515625,
"blob_id": "ae88963e433200783c5b6d1205c36292e764cfe3",
"content_id": "2c62abda4bc1369b4352e5f35e00e2e7e55a2498",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5805,
"license_type": "permissive",
"max_line_length": 135,
"num_lines": 128,
"path": "/end2/arg_parser.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import argparse\nfrom typing import (\n Dict,\n List,\n Set\n)\nfrom end2.models.testing_containers import Importable\n\nfrom end2.resource_profile import (\n get_last_run_rc,\n get_rc\n)\nfrom end2.pattern_matchers import (\n DefaultModulePatternMatcher,\n DefaultTestCasePatternMatcher,\n GlobModulePatternMatcher,\n GlobTestCasePatternMatcher,\n RegexModulePatternMatcher,\n RegexTestCasePatternMatcher,\n TagModulePatternMatcher,\n TagTestCasePatternMatcher\n)\n\n\ndef default_parser() -> argparse.ArgumentParser:\n rc = get_rc()\n parent_parser = argparse.ArgumentParser()\n parent_parser.add_argument('--suite', nargs='*', action=SuiteFactoryAction,\n help=\"\"\"works by specifying a file path examples:\nfolder:\n--suite folder\nfile:\n --suite path/to/file1.py path/to/file2.py\nfile-delimited:\n--suite path/to/file1.py;file2.py\ntest-case:\n--suite path/to/file1.py::test_1\ntest-case-delimited:\n--suite path/to/file.py::test_1,test_2\nexcluding - anything on the right side of a '\\!' will be excluded:\n--suite path/to/\\!file.py # will run everything under path/to except path/to/file.py\n--suite path/to/file.py::\\!test_1,test_2 # will run everything under path/to/file.py except test_1 and test_2\"\"\")\n parent_parser.add_argument('--suite-glob', nargs='*', action=SuiteFactoryAction,\n help=\"List of glob expression to search for tests\")\n parent_parser.add_argument('--suite-regex', nargs='*', action=SuiteFactoryAction,\n help=\"List of regex expression to search for tests\")\n parent_parser.add_argument('--suite-tag', nargs='*', action=SuiteFactoryAction,\n help=\"List of path-tags to search for tests\")\n parent_parser.add_argument('--suite-last-failed', nargs=0, action=SuiteFactoryAction,\n help=\"List of regex expression to search for tests\")\n parent_parser.add_argument('--max-workers', type=int, default=rc['settings'].getint('max-workers'),\n help='Total number of workers allowed to run concurrently')\n parent_parser.add_argument('--max-log-folders', type=int, default=rc['settings'].getint('max-log-folders'),\n help='Total number of max log folders')\n parent_parser.add_argument('--no-concurrency', action='store_true', default=rc['settings'].getboolean('no-concurrency'),\n help='Make all tests run sequentially')\n parent_parser.add_argument('--stop-on-fail', action='store_true', default=rc['settings'].getboolean('stop-on-fail'),\n help='Make all tests run sequentially')\n parent_parser.add_argument('--event-timeout', type=float, default=rc['settings'].getfloat('event-timeout'),\n help='Timeout value in seconds used if end() is not called in time')\n parent_parser.add_argument('--watch', action='store_true', help='Watches files matched in suite arg')\n return parent_parser\n\n\nclass SuiteArg:\n rc_alias = 'suite-alias'\n rc_disabled = 'suite-disabled'\n\n def __init__(self, paths: List[str], module_class: DefaultModulePatternMatcher, test_class: DefaultTestCasePatternMatcher) -> None:\n self.paths = []\n self.excluded_paths = []\n rc = get_rc()\n disabled_suites = list(rc[self.rc_disabled].keys())\n for path in self._resolve_paths(set(paths), rc[self.rc_alias], disabled_suites):\n paths_str, tests_str = path, ''\n if '::' in path:\n paths_str, tests_str = path.split('::')\n module_matcher = module_class.parse_str(paths_str)\n if module_matcher.included_items:\n for path in module_matcher.included_items:\n self.paths.append(\n Importable(path, module_matcher, test_class.parse_str(tests_str))\n )\n else:\n self.excluded_paths.extend(module_matcher.excluded_items)\n self.excluded_paths.extend(disabled_suites)\n\n @staticmethod\n def _resolve_paths(paths: Set[str], suite_aliases: Dict[str, str], disabled_suites: List[str]) -> Set[str]:\n paths_ = set()\n for path in paths:\n if path not in disabled_suites:\n if path in suite_aliases:\n paths_ |= SuiteArg._resolve_paths(\n suite_aliases[path].split(' '), suite_aliases, disabled_suites\n )\n else:\n paths_.add(path)\n return paths_\n\n def __str__(self) -> str:\n temp_ = {\n \"included_modules\": self.paths,\n \"excluded_modules\": self.excluded_paths\n }\n return str(temp_)\n\n\nclass SuiteFactoryAction(argparse.Action):\n def __call__(self, parser, namespace, values, option_string=None) -> None:\n if values or option_string == '--suite-last-failed':\n arg_to_name = f\"_parse_{option_string[2:].replace('-', '_')}\"\n setattr(namespace, 'suite', getattr(self, arg_to_name)(values))\n\n def _parse_suite(self, suite: list) -> SuiteArg:\n return SuiteArg(suite, DefaultModulePatternMatcher, DefaultTestCasePatternMatcher)\n\n def _parse_suite_glob(self, suite: list) -> SuiteArg:\n return SuiteArg(suite, GlobModulePatternMatcher, GlobTestCasePatternMatcher)\n\n def _parse_suite_regex(self, suite: list) -> SuiteArg:\n return SuiteArg(suite, RegexModulePatternMatcher, RegexTestCasePatternMatcher)\n\n def _parse_suite_tag(self, suite: list) -> SuiteArg:\n return SuiteArg(suite, TagModulePatternMatcher, TagTestCasePatternMatcher)\n\n def _parse_suite_last_failed(self, _: list) -> SuiteArg:\n return self._parse_suite(get_last_run_rc()['failures'])\n"
},
{
"alpha_fraction": 0.5367021560668945,
"alphanum_fraction": 0.5718085169792175,
"avg_line_length": 26.246376037597656,
"blob_id": "f0da318b922df54cd53c942da194df9b8a289754",
"content_id": "55189c3d58968965d32e089f841272c52a7ba502",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1880,
"license_type": "permissive",
"max_line_length": 94,
"num_lines": 69,
"path": "/examples/simple/regression/sample3.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from end2.fixtures import setup\nfrom end2 import (\n RunMode,\n setup,\n setup_test,\n teardown,\n teardown_test\n)\n\n\n__run_mode__ = RunMode.PARALLEL\n\n\ndef test_python_gotcha_3(logger):\n assert round(1.5) == 2\n assert round(2.5) == 2\n assert round(3.5) == 4\n assert round(4.5) == 4\n logger.info(\"Odds round up while evens round down\")\n\n\nclass Group1:\n @staticmethod\n @setup\n def my_setup(logger):\n logger.info(f'{Group1.__name__}.{Group1.my_setup.__name__}')\n\n @staticmethod\n def test_1(logger):\n logger.info(f'{Group1.__name__}.{Group1.test_1.__name__}')\n\n class Group2:\n @staticmethod\n @setup\n def my_setup(logger):\n logger.info(f'{Group1.Group2.__name__}.{Group1.Group2.my_setup.__name__}')\n\n @staticmethod\n @setup_test\n def my_setup_test(logger):\n logger.info(f'{Group1.Group2.__name__}.{Group1.Group2.my_setup_test.__name__}')\n\n @staticmethod\n def test_22(logger):\n logger.info(f'{Group1.Group2.__name__}.{Group1.Group2.test_22.__name__}')\n\n class Group3:\n @staticmethod\n @setup\n def my_setup(logger):\n logger.info(f'{Group1.Group3.__name__}.{Group1.Group3.my_setup.__name__}')\n\n @staticmethod\n @teardown\n def my_teardown(logger):\n logger.info(f'{Group1.Group2.__name__}.{Group1.Group3.my_teardown.__name__}')\n\n @staticmethod\n @teardown_test\n def my_teardown_test(logger):\n logger.info(f'{Group1.Group2.__name__}.{Group1.Group3.my_teardown_test.__name__}')\n\n @staticmethod\n def test_3(logger):\n logger.info(f'{Group1.Group3.__name__}.{Group1.Group3.test_3.__name__}')\n\n @staticmethod\n def test_33(logger):\n logger.info(f'{Group1.Group3.__name__}.{Group1.Group3.test_33.__name__}')\n"
},
{
"alpha_fraction": 0.6684492230415344,
"alphanum_fraction": 0.6773618459701538,
"avg_line_length": 25.714284896850586,
"blob_id": "b77daa7bdfcb6c0867ee2e3ad0d99b1c2b663a6b",
"content_id": "804944ebd7b470376fbca5b0eb2adf4974c185f6",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 561,
"license_type": "permissive",
"max_line_length": 77,
"num_lines": 21,
"path": "/examples/package_objects/run.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\nimport os\nimport sys\n\nsys.path.insert(0, os.path.join('..', '..'))\nfrom end2.runner import start_test_run\nfrom end2.arg_parser import default_parser\n\n\n\nif __name__ == '__main__':\n # Run from inside examples\\simple\n ## --suite package1\n args = default_parser().parse_args()\n\n def test_parameters(logger, package_object):\n return (logger, package_object), {}\n\n test_suite_result, failed_imports = start_test_run(args, test_parameters)\n print(test_suite_result, failed_imports)\n exit(test_suite_result.exit_code)\n"
},
{
"alpha_fraction": 0.7070063948631287,
"alphanum_fraction": 0.7197452187538147,
"avg_line_length": 16.44444465637207,
"blob_id": "f0fc3a402c7cc769eafa7b848e11741744cb040e",
"content_id": "411767900b0d6658f997f60f10f9ec7a6ede1fab",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 157,
"license_type": "permissive",
"max_line_length": 74,
"num_lines": 9,
"path": "/examples/simple/smoke/ignored_module.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from end2 import RunMode\n\n\n__run_mode__ = RunMode.PARALLEL\n\n\n\ndef test_1(logger):\n assert False, \"I SHOULD NOT RUN BECAUSE THIS MODULE SHOULD BE IGNORED\"\n"
},
{
"alpha_fraction": 0.5662847757339478,
"alphanum_fraction": 0.5973813533782959,
"avg_line_length": 20.068965911865234,
"blob_id": "8753b0cfe8738b2f8cd02d467fc79359046d9612",
"content_id": "97facbfafe32a86a95a9d3fc0c5d37c64b8eb448",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 611,
"license_type": "permissive",
"max_line_length": 73,
"num_lines": 29,
"path": "/examples/simple/regression/sample5.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from end2 import RunMode\n\n\n__run_mode__ = RunMode.SEQUENTIAL\n\n\ndef test_python_gotcha_1(logger):\n def append_to(element, to=[]):\n to.append(element)\n return to\n\n my_list = append_to(12)\n assert my_list == [12]\n my_other_list = append_to(42)\n assert my_other_list == [12, 42]\n logger.info(\"Don't put a mutable object as a parameter\")\n\n\ndef test_python_gotcha_2(logger):\n class A:\n x = 1\n\n class C(A):\n pass\n\n assert A.x == 1 and C.x == 1\n A.x = 3\n assert A.x == 3 and C.x == 3\n logger.info('Class variables are handled as dictionaries internally')\n"
},
{
"alpha_fraction": 0.6189336180686951,
"alphanum_fraction": 0.6265506148338318,
"avg_line_length": 44.04901885986328,
"blob_id": "74faa69e6c66f2890d789235674899420443b5bb",
"content_id": "25303dc9ffa3ec07013446729f76a4d73778d456",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4595,
"license_type": "permissive",
"max_line_length": 140,
"num_lines": 102,
"path": "/tests/integration/runner.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import os\nfrom time import sleep\nimport unittest\n\nfrom end2 import (\n arg_parser,\n runner\n)\nfrom end2.constants import Status\nfrom examples.fake_clients import run as clients\n\n\nclass TestStartRun(unittest.TestCase):\n def setUp(self):\n # Sleeping because I want a different timestamp folder name for each integration test\n sleep(1)\n\n def test_integration_simple(self):\n arg_list=['--suite', os.path.join('examples', 'simple', 'regression')]\n args = arg_parser.default_parser().parse_args(arg_list)\n\n def test_parameters(logger_, package_object):\n return (logger_,), {}\n\n results, _ = runner.start_test_run(args, test_parameters)\n self.assertTrue(all(result.status is not None\n and result.end_time is not None\n and result.duration is not None\n for result in results))\n\n def test_integration_module(self):\n arg_list=['--suite', os.path.join('examples', 'simple', 'smoke', 'sample1.py'), os.path.join('examples', 'simple', 'regression')]\n args = arg_parser.default_parser().parse_args(arg_list)\n\n def test_parameters(logger_, package_object):\n return (logger_,), {}\n\n results, _ = runner.start_test_run(args, test_parameters)\n self.assertTrue(all(result.status is not None\n and result.end_time is not None\n and result.duration is not None\n for result in results))\n\n def test_integration_package_object(self):\n arg_list=['--suite', os.path.join('examples', 'package_objects', 'package1')]\n args = arg_parser.default_parser().parse_args(arg_list)\n\n def test_parameters(logger_, package_object):\n return (logger_, package_object), {}\n\n results, _ = runner.start_test_run(args, test_parameters)\n self.assertTrue(all(result.status is not None\n and result.end_time is not None\n and result.duration is not None\n for result in results))\n\n def test_integration_end(self):\n timeout = 2.0\n arg_list=['--suite', os.path.join('examples', 'fake_clients', 'regression', 'sample3.py::test_32'), '--event-timeout', str(timeout)]\n args = arg_parser.default_parser().parse_args(arg_list)\n\n def test_parameters(logger, package_object):\n return (clients.Client(logger), clients.AsyncClient(logger)), {}\n\n results, _ = runner.start_test_run(args, test_parameters)\n self.assertTrue(results.test_modules[0].test_results[0].status is Status.PASSED)\n\n def test_integration_end_timeout_async(self):\n timeout = 2.0\n arg_list=['--suite', os.path.join('examples', 'fake_clients', 'regression', 'sample3.py::test_33'), '--event-timeout', str(timeout)]\n args = arg_parser.default_parser().parse_args(arg_list)\n\n def test_parameters(logger, package_object):\n return (clients.Client(logger), clients.AsyncClient(logger)), {}\n\n results, _ = runner.start_test_run(args, test_parameters)\n self.assertIn(f'time out reached: {timeout}s', results.test_modules[0].test_results[0].record)\n\n def test_integration_step(self):\n arg_list=['--suite', os.path.join('examples', 'fake_clients', 'regression', 'sample2.py::test_21,test_22')]\n args = arg_parser.default_parser().parse_args(arg_list)\n\n def test_parameters(logger, package_object):\n return (clients.Client(logger), clients.AsyncClient(logger)), {}\n\n results, _ = runner.start_test_run(args, test_parameters)\n self.assertGreater(len(results.test_modules[0].test_results[0].steps), 0)\n self.assertGreater(len(results.test_modules[0].test_results[1].steps), 0)\n\n def test_tag_pattern_matcher(self):\n timeout = 2.0\n arg_list=['--suite-tag', os.path.join('examples', 'fake_clients', 'regression', 'product,'),'--event-timeout', str(timeout)]\n args = arg_parser.default_parser().parse_args(arg_list)\n\n def test_parameters(logger, package_object):\n return (clients.Client(logger), clients.AsyncClient(logger)), {}\n\n results, _ = runner.start_test_run(args, test_parameters)\n test_module_names = [x.name for x in results.test_modules]\n self.assertNotIn('examples.fake_clients.regression.sample2', test_module_names)\n self.assertIn('examples.fake_clients.regression.sample3', test_module_names)\n self.assertEqual(results.total_count, 6)\n"
},
{
"alpha_fraction": 0.728723406791687,
"alphanum_fraction": 0.7393617033958435,
"avg_line_length": 16.090909957885742,
"blob_id": "e844fab24ae923220dd63a146d089cdce8de456c",
"content_id": "36b8710a52faad7b78eabee752c118f418324873",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 188,
"license_type": "permissive",
"max_line_length": 37,
"num_lines": 11,
"path": "/examples/simple/__init__.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "\nfrom end2 import setup, teardown\n\n\n@setup\ndef my_smoke_setup(global_object):\n global_object.cheesy = 4\n\n\n@teardown\ndef my_smoke_teardown(global_object):\n print(global_object.cheesy)"
},
{
"alpha_fraction": 0.6389776468276978,
"alphanum_fraction": 0.6400426030158997,
"avg_line_length": 30.299999237060547,
"blob_id": "30e056760e352b1b2b4bbb22356deda47adec003",
"content_id": "754cded0c62380c3e47b2265dd0509ff09a9694e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 939,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 30,
"path": "/end2/pattern_matchers/regex.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from glob import glob\nimport os\nimport re\n\nfrom end2.pattern_matchers.default import (\n DefaultModulePatternMatcher,\n DefaultTestCasePatternMatcher\n)\n\n\nclass RegexModulePatternMatcher(DefaultModulePatternMatcher):\n @classmethod\n def parse_str(cls, pattern: str, include: bool = True):\n items = []\n include = False\n for module in filter(lambda x: not x.endswith('__init__.py'),\n glob(f'.{os.sep}**{os.sep}*.py', recursive=True)):\n if re.match(pattern, module):\n items.append(module)\n include = True\n return cls(items, pattern, include)\n\n\nclass RegexTestCasePatternMatcher(DefaultTestCasePatternMatcher):\n @classmethod\n def parse_str(cls, pattern: str, include: bool = True):\n return cls([], pattern, True)\n\n def included(self, func) -> bool:\n return True if re.match(self._pattern, func.__name__) else False\n"
},
{
"alpha_fraction": 0.6387959718704224,
"alphanum_fraction": 0.6677814722061157,
"avg_line_length": 24.628570556640625,
"blob_id": "5d326afa2853c5b96a9f047783bc9570c190a434",
"content_id": "1a8032a30a10bfb867958f08cd87d65ec0ebbaa7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 897,
"license_type": "permissive",
"max_line_length": 53,
"num_lines": 35,
"path": "/examples/fake_clients/regression/sample1.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import end2\n\n__run_mode__ = end2.PARALLEL\n\n\[email protected]_test\ndef my_setup_test(client, async_client):\n client.logger.info('running setup test')\n assert client.get() == {}\n\n\[email protected]\ndef my_teardown(client, async_client):\n client.logger.info('running teardown')\n assert client.delete() is None\n\n\[email protected](tags=['product', 'business'])\nasync def test_11(client, async_client):\n client.logger.info('hi')\n assert await async_client.put({'hi': 11}) is None\n assert await async_client.put({'hi': 12}) is None\n assert await async_client.put({'hi': 13}) is None\n\n\[email protected](tags=['product'])\ndef test_12(client, async_client):\n client.logger.info('hi12')\n assert client.post({'hi': 11}) is None\n assert client.post({'hi': 12}) is None\n assert client.post({'hi': 13}) is None\n\n\ndef test_13(client, async_client):\n assert client.get() == client.get()\n"
},
{
"alpha_fraction": 0.5760171413421631,
"alphanum_fraction": 0.5952890515327454,
"avg_line_length": 36.36000061035156,
"blob_id": "00c7ac1df4f823dcb9e8c584c3764b7bd35ebcd1",
"content_id": "750a3a7f9572577bc14f4cc1cc6f3b3e633d92e1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 934,
"license_type": "permissive",
"max_line_length": 112,
"num_lines": 25,
"path": "/setup.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from distutils.core import setup\n\n\nsetup(\n name = 'end2',\n packages = ['end2'],\n version = '1.3',\n license='MIT', # https://help.github.com/articles/licensing-a-repository\n description = 'A Minimal E2E Test Automation Framework',\n author = 'Jon Wesneski',\n author_email = '[email protected]',\n url = 'https://github.com/jonwesneski/end2',\n keywords = ['end-2-end', 'end2end', 'end-to-end', 'endtoend', 'e2e', 'end2', 'testing', 'qa', 'automation'],\n install_requires=[],\n classifiers=[\n 'Development Status :: 3 - Alpha',\n 'Topic :: Software Development :: Quality Assurance',\n 'Topic :: Software Development :: Testing',\n 'Topic :: Software Development :: Testing :: Acceptance',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'Programming Language :: Python :: 3.10',\n ],\n)\n"
},
{
"alpha_fraction": 0.631243884563446,
"alphanum_fraction": 0.6341821551322937,
"avg_line_length": 21.688888549804688,
"blob_id": "7992ef168f04ffa0677c36dfb955ca68ef645848",
"content_id": "5d78efa459e07bdd9c119373dd49530204af136b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2042,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 90,
"path": "/end2/fixtures.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import functools\n\nfrom end2.constants import FUNCTION_TYPE\nfrom end2.exceptions import MoreThan1SameFixtureException\n\n\ndef setup_module(func):\n func.setup_module = None\n return func\n\n\ndef teardown_module(func):\n func.teardown_module = None\n return func\n\n\ndef on_failures_in_module(func):\n func.on_failures_in_module = None\n return func\n\n\ndef on_test_failure(func):\n def inner(func_):\n @functools.wraps(func_)\n def wrapper(*args, **kwargs):\n return func_(*args, **kwargs)\n wrapper.on_test_failure = func\n return wrapper\n return inner\n\n\ndef setup(func):\n func.setup = None\n return func\n\n\ndef setup_test(func):\n func.setup_test = None\n return func\n\n\ndef teardown_test(func):\n func.teardown_test = None\n return func\n\n\ndef teardown(func):\n func.teardown = None\n return func\n\n\ndef package_test_parameters(func):\n func.package_test_parameters = None\n return func\n\n\ndef metadata(**kwargs):\n def inner(func):\n func.metadata = kwargs\n return func\n return inner\n\n\ndef parameterize(parameters_list: list, first_arg_is_name: bool = False):\n def wrapper(func):\n if first_arg_is_name:\n func.names = [f'{func.__name__}[{i}] {args[0]}' for i, args in enumerate(parameters_list)]\n func.parameterized_list = tuple(p[1:] for p in parameters_list)\n else:\n func.names = [f'{func.__name__}[{i}]' for i in range(len(parameters_list))]\n func.parameterized_list = tuple(parameters_list)\n return func\n return wrapper\n\n\ndef empty_func(*args, **kwargs) -> None:\n return\n\n\ndef get_fixture(module, name: str, default=empty_func):\n fixture = default\n found = False\n for key in dir(module):\n attribute = getattr(module, key)\n if type(attribute) is FUNCTION_TYPE and hasattr(attribute, name):\n if found:\n raise MoreThan1SameFixtureException(name, module.__name__)\n fixture = attribute\n found = True\n return fixture\n"
},
{
"alpha_fraction": 0.7094594836235046,
"alphanum_fraction": 0.7297297120094299,
"avg_line_length": 25.909090042114258,
"blob_id": "89bbacb02f8763dae55c044a16de6c19d21ca6d1",
"content_id": "0a6319d9cebefe71ae5ee57396ad9e0dea1765fb",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 296,
"license_type": "permissive",
"max_line_length": 52,
"num_lines": 11,
"path": "/examples/package_objects/package1/package2a/package3/test_package3.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from end2 import RunMode\n\n\n__run_mode__ = RunMode.PARALLEL\n\n\ndef test_1(logger, package_objects):\n assert hasattr(package_objects, 'package1')\n assert hasattr(package_objects, 'package2a')\n assert not hasattr(package_objects, 'package2b')\n assert hasattr(package_objects, 'package3')\n"
},
{
"alpha_fraction": 0.6557971239089966,
"alphanum_fraction": 0.6557971239089966,
"avg_line_length": 18.034482955932617,
"blob_id": "bf06d82e1dd73f433b21af9f20c0f1687fc521af",
"content_id": "452335380aa3ac9155a494234acd59f1d047d464",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 552,
"license_type": "permissive",
"max_line_length": 55,
"num_lines": 29,
"path": "/end2/constants.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "\"\"\"\n Constants/Enums for test framework library features\n * Please keep class names alphabetical\n * Please keep variables in classes alphabetical\n\"\"\"\nfrom enum import Enum\n\n\nFUNCTION_TYPE = type(lambda: None)\nTAGS = '__tags__'\n\n\nclass RunMode(Enum):\n PARALLEL = 'parallel'\n SEQUENTIAL = 'sequential'\n\n\nclass Status(Enum):\n FAILED = 'Failed'\n IGNORED = 'Ignored'\n PASSED = 'Passed'\n SKIPPED = 'Skipped'\n\n\nclass ReservedWords(Enum):\n END = 'end'\n LOGGER = 'logger'\n PACKAGE_OBJECT = 'package_object'\n STEP = 'step'\n"
},
{
"alpha_fraction": 0.5716575384140015,
"alphanum_fraction": 0.5762775540351868,
"avg_line_length": 39.326995849609375,
"blob_id": "30b8962242078c820771f248356ebb8ed1a03265",
"content_id": "2ef1f0502c3c9031b449bb14c5c26d819ed97120",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10606,
"license_type": "permissive",
"max_line_length": 188,
"num_lines": 263,
"path": "/end2/discovery.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import importlib\nimport inspect\nimport os\nfrom random import shuffle\nfrom typing import (\n List,\n Tuple,\n Callable\n)\n\nfrom end2.fixtures import (\n empty_func,\n get_fixture,\n setup,\n setup_test,\n teardown,\n teardown_test\n)\nfrom end2.constants import (\n FUNCTION_TYPE,\n RunMode\n)\nfrom end2.exceptions import MoreThan1SameFixtureException\nfrom end2.models.testing_containers import (\n Importable,\n TestGroups,\n TestMethod,\n TestModule,\n TestPackage,\n TestPackageTree\n)\nfrom end2.pattern_matchers import (\n DefaultModulePatternMatcher,\n DefaultTestCasePatternMatcher\n)\n\n\ndef _shuffle_dict(dict_: dict) -> dict:\n list_ = list(dict_.items())\n shuffle(list_)\n return dict(list_)\n\n\ndef discover_suite(importables: List[Importable]) -> Tuple[TestPackageTree, set]: \n shuffle(importables)\n failed_imports = set()\n package_tree = TestPackageTree()\n for importable in importables:\n package_name = importable.path.replace(os.sep, '.')\n package = package_tree.find_by_str(package_name)\n if os.path.isdir(importable.path):\n p, f = discover_packages(importable.path, importable.module_matcher, importable.test_matcher, package)\n if p:\n package_tree.append(p)\n failed_imports |= f\n else:\n package_names = []\n if not package:\n names = package_name.split('.')\n if package_name.endswith('.py'):\n names = names[:-2]\n for i in range(len(names)):\n package_names.append(\".\".join(names[:i+1]))\n new_package = importlib.import_module(package_names[0])\n package = TestPackage(new_package)\n for package_name in package_names[1:-1]:\n new_package = importlib.import_module(package_name)\n package.tail(new_package)\n m, f = discover_module(importable.path, importable.module_matcher, importable.test_matcher)\n if m:\n package.append_module(m)\n elif f:\n failed_imports.add(f)\n package_tree.append(package)\n return package_tree, failed_imports\n\n\ndef discover_packages(importable: str, module_pattern_matcher: DefaultModulePatternMatcher, test_pattern_matcher: DefaultTestCasePatternMatcher, test_package: TestPackage = None) -> tuple:\n names = importable.replace(os.sep, '.').split('.')\n package_names = []\n package_ = None\n failed_imports = set()\n if test_package:\n package_names = [f'{test_package.name}.{names[-1]}']\n else:\n for i in range(len(names)):\n package_names.append(\".\".join(names[:i+1]))\n if test_package and test_package.name == \".\".join(names):\n # Handling same package conflict\n new_package = None\n end_package = test_package\n else:\n new_package = importlib.import_module(package_names[0])\n if new_package:\n if test_package:\n package_ = test_package\n package_.tail(new_package)\n else:\n package_ = TestPackage(new_package)\n for package_name in package_names[1:]:\n new_package = importlib.import_module(package_name)\n package_.tail(new_package)\n end_package = package_.find(package_names[-1])\n items = list(filter(lambda x: '__pycache__' not in x and x != '__init__.py', os.listdir(importable)))\n shuffle(items)\n for item in items:\n full_path = os.path.join(importable, item)\n if os.path.isdir(full_path):\n _, f = discover_packages(full_path, module_pattern_matcher, test_pattern_matcher, end_package)\n failed_imports |= f\n else:\n m, f = discover_module(full_path, module_pattern_matcher, test_pattern_matcher)\n if m:\n end_package.append_module(m)\n elif f:\n failed_imports.add(f)\n return package_, failed_imports\n\n\ndef discover_module(importable: str, module_pattern_matcher: DefaultModulePatternMatcher, test_pattern_matcher: DefaultTestCasePatternMatcher) -> Tuple[TestModule, str]:\n test_module, error_str = None, ''\n module_str = importable.replace('.py', '').replace(os.sep, '.')\n try:\n module = importlib.import_module(module_str)\n if module_pattern_matcher.module_included(module):\n groups = discover_groups(module, test_pattern_matcher)\n if groups.has_tests():\n test_module = TestModule(module, groups, ignored_tests=set(test_pattern_matcher.excluded_items))\n if test_module.run_mode not in RunMode:\n error = f'{test_module.run_mode} is not a valid RunMode'\n raise Exception(error)\n except ModuleNotFoundError as me:\n if me.name == module_str:\n error_str = f\"Module doesn't exist - {module_str}\"\n else:\n error_str = f\"Failed to load {importable} - {me}\"\n except MoreThan1SameFixtureException as mt1fe:\n error_str = mt1fe.message\n except Exception as e:\n error_str = f'Failed to load {importable} - {e}'\n return test_module, error_str\n\n\ndef discover_groups(test_group, test_pattern_matcher: DefaultTestCasePatternMatcher) -> TestGroups:\n setup_fixture, _, _, teardown_fixture = discover_group_fixtures(test_group)\n group = TestGroups(test_group.__name__, discover_tests(test_group, test_pattern_matcher), setup_fixture, teardown_fixture)\n for name in dir(test_group):\n attribute = getattr(test_group, name)\n if inspect.isclass(attribute) and name.startswith('Group'):\n group.append(discover_groups(attribute, test_pattern_matcher))\n return group\n\n\ndef discover_group_fixtures(group) -> Tuple[Callable]:\n setup_fixture = empty_func\n setup_test_fixture = empty_func\n teardown_fixture = empty_func\n teardown_test_fixture = empty_func\n found_map = {}\n\n def check_if_found_already(fixture: Callable):\n nonlocal found_map\n if found_map.get(fixture.__name__):\n try:\n raise MoreThan1SameFixtureException(fixture.__name__, group.__name__)\n except:\n print(dir(group), group.__name__)\n raise\n if fixture != empty_func:\n found_map[fixture.__name__] = True\n \n for key in dir(group):\n attribute = getattr(group, key)\n if type(attribute) is FUNCTION_TYPE:\n setup_fixture, setup_test_fixture, \\\n teardown_test_fixture, teardown_fixture = \\\n discover_func_fixtures(attribute, setup, setup_test, teardown_test, teardown, default=empty_func)\n check_if_found_already(setup_fixture)\n check_if_found_already(setup_test_fixture)\n check_if_found_already(teardown_test_fixture)\n check_if_found_already(teardown_fixture)\n return setup_fixture, setup_test_fixture, teardown_fixture, teardown_test_fixture\n\n\ndef discover_func_fixtures(func, *fixtures, default=empty_func) -> List[Callable]:\n fixture_names = [x.__name__ for x in fixtures]\n discovered_fixtures = [default] * len(fixtures)\n found_map = {}\n for i, fixture_name in enumerate(fixture_names):\n if hasattr(func, fixture_name):\n if found_map.get(fixture_name):\n raise MoreThan1SameFixtureException(fixture_name, func.__name__)\n discovered_fixtures[i] = fixtures[i]\n found_map[fixture_name] = True\n return discovered_fixtures\n\n\ndef discover_tests(module, test_pattern_matcher: DefaultTestCasePatternMatcher) -> dict:\n tests = {}\n setup_test_ = get_fixture(module, setup_test.__name__)\n teardown_test_ = get_fixture(module, teardown_test.__name__)\n for name in dir(module):\n attribute = getattr(module, name)\n if type(attribute) is FUNCTION_TYPE and name.startswith('test_'):\n if test_pattern_matcher.func_included(attribute):\n if hasattr(attribute, 'parameterized_list'):\n range_ = discover_parameterized_test_range(name, attribute.parameterized_list)\n for i in range_:\n attribute.range = range_\n tests[f'{name}[{i}]'] = TestMethod(attribute, setup_test_, teardown_test_, attribute.parameterized_list[i])\n else:\n tests[name] = TestMethod(attribute, setup_test_, teardown_test_)\n return _shuffle_dict(tests)\n\n\ndef discover_parameterized_test_range(test_name: str, parameterized_list: list) -> range:\n open_bracket_index = test_name.find('[') + 1\n close_bracket_index = -1\n range_ = range(0)\n access_token = test_name[open_bracket_index:close_bracket_index]\n if open_bracket_index and test_name[close_bracket_index] == ']' and access_token:\n range_args = [None, None, None]\n if ':' in access_token:\n segments = access_token.split(':')\n if len(segments) <= 3:\n try:\n for i, segment in enumerate(segments):\n if segment == '':\n if i == 0:\n range_args[0] = 0\n elif i == 1:\n range_args[1] = len(parameterized_list)\n else:\n int_ = int(segment)\n if i == 0:\n range_args[0] = int_\n elif i == 1:\n if int_ < 0:\n range_args[1] = len(parameterized_list) + int_\n else:\n range_args[1] = int_\n elif i == 2:\n range_args[2] = int_\n if range_args[2] is not None:\n range_ = range(*range_args)\n else:\n range_ = range(range_args[0], range_args[1])\n except:\n pass\n else:\n try:\n int_ = int(access_token)\n if int_ < 0:\n range_args[0] = len(parameterized_list) - int_\n else:\n range_args[0] = int_\n range_args[1] = range_args[0] + 1\n range_ = range(range_args[0], range_args[1])\n except:\n pass\n elif '[' not in test_name and ']' not in test_name:\n range_ = range(len(parameterized_list))\n return range_\n"
},
{
"alpha_fraction": 0.7303921580314636,
"alphanum_fraction": 0.75,
"avg_line_length": 17.545454025268555,
"blob_id": "e21d593575622f90085aadd1e603d4212b126533",
"content_id": "ce649f2261d204bb454426b24690fa86ac8d0f82",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 204,
"license_type": "permissive",
"max_line_length": 41,
"num_lines": 11,
"path": "/examples/package_objects/package1/package2b/__init__.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from end2 import setup, teardown\n\n\n@setup\ndef my_smoke_setup(global_object):\n global_object.package2b = \"package2b\"\n\n\n@teardown\ndef my_smoke_teardown(global_object):\n print(global_object.package2b)\n"
},
{
"alpha_fraction": 0.6618911027908325,
"alphanum_fraction": 0.6626074314117432,
"avg_line_length": 38.88571548461914,
"blob_id": "4e142a87710abc7d848238e25cd8003975503055",
"content_id": "af163bccd7784550c0d085b218d4913e2baccc15",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1396,
"license_type": "permissive",
"max_line_length": 63,
"num_lines": 35,
"path": "/tests/unit/pattern_matchers.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import unittest\n\nfrom end2.pattern_matchers.default import PatternMatcherBase\n\n\nclass TestPatternMatcherBase(unittest.TestCase):\n def test_include_item(self):\n matcher_include = PatternMatcherBase(['a'], 'a', True)\n self.assertTrue(matcher_include.included('a'))\n self.assertFalse(matcher_include.excluded('a'))\n \n def test_include_item_no(self):\n matcher_include = PatternMatcherBase(['a'], 'a', True)\n self.assertFalse(matcher_include.included('b'))\n self.assertTrue(matcher_include.excluded('b'))\n\n def test_exclude_item(self):\n matcher_exclude = PatternMatcherBase(['a'], 'a', False)\n self.assertFalse(matcher_exclude.included('a'))\n self.assertTrue(matcher_exclude.excluded('a'))\n\n def test_exclude_item_no(self):\n matcher_exclude = PatternMatcherBase(['a'], 'a', False)\n self.assertTrue(matcher_exclude.included('b'))\n self.assertFalse(matcher_exclude.excluded('b'))\n \n def test_include_all(self):\n matcher_include = PatternMatcherBase([], '', True)\n self.assertTrue(matcher_include.included('a'))\n self.assertFalse(matcher_include.excluded('a'))\n\n def test_exclude_none(self):\n matcher_exclude = PatternMatcherBase(['a'], 'a', False)\n self.assertTrue(matcher_exclude.included('b'))\n self.assertFalse(matcher_exclude.excluded('b'))\n"
},
{
"alpha_fraction": 0.6201395988464355,
"alphanum_fraction": 0.6241276264190674,
"avg_line_length": 33.58620834350586,
"blob_id": "4b8766ed0df35975399bb43d7264829eb09ee338",
"content_id": "0bd0c0a07033c9586fb3187e68d9672a053e6462",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2006,
"license_type": "permissive",
"max_line_length": 99,
"num_lines": 58,
"path": "/end2/pattern_matchers/tag.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import os\nfrom typing import Callable\n\nfrom end2 import constants\nfrom end2.pattern_matchers.default import (\n PatternMatcherBase,\n DefaultModulePatternMatcher,\n DefaultTestCasePatternMatcher\n)\n\nmodule_tags_matcher = None\nmodule_tags_dict = {}\n\n\nclass TagModulePatternMatcher(DefaultModulePatternMatcher):\n tag_delimiter = ','\n\n @classmethod\n def parse_str(cls, pattern: str, include: bool = True):\n global module_tags_matcher\n pattern_list = pattern.split(os.sep)\n if cls.tag_delimiter in pattern_list[-1]:\n if pattern_list[-1].endswith(cls.tag_delimiter):\n module_tags_matcher = PatternMatcherBase.parse_str(pattern_list[-1][:-1], include)\n else:\n module_tags_matcher = PatternMatcherBase.parse_str(pattern_list[-1], include)\n pattern_list = pattern_list[:-1]\n matcher = super(TagModulePatternMatcher, cls).parse_str(os.sep.join(pattern_list), include)\n return matcher\n\n def module_included(self, module) -> bool:\n include = True\n if module_tags_matcher and hasattr(module, constants.TAGS):\n for tag in module.__tags__:\n include = module_tags_matcher.included(tag)\n if include:\n break\n if include:\n module_tags_dict[module.__name__] = getattr(module, constants.TAGS, [])\n return include\n\n\nclass TagTestCasePatternMatcher(DefaultTestCasePatternMatcher):\n delimiter = ','\n\n def func_included(self, func: Callable) -> bool:\n include = not module_tags_matcher._include if module_tags_matcher else self._include\n matcher = module_tags_matcher or self\n tags = set(module_tags_dict[func.__module__])\n try:\n tags |= set(func.metadata.get('tags', []))\n except AttributeError:\n pass\n for tag in tags:\n include = matcher.included(tag)\n if include:\n break\n return include\n"
},
{
"alpha_fraction": 0.6624549031257629,
"alphanum_fraction": 0.673285186290741,
"avg_line_length": 16.3125,
"blob_id": "a944f786db25b106d47a62c0f504b9a6427ff270",
"content_id": "8fe3bccc02417b7de1202f12ad2089b1bf3a3a08",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 554,
"license_type": "permissive",
"max_line_length": 60,
"num_lines": 32,
"path": "/end2/exceptions.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "\"\"\"\n * Please keep class names alphabetical\n\"\"\"\n\n\nclass IgnoreTestException(Exception):\n pass\n\n\nclass MoreThan1SameFixtureException(Exception):\n def __init__(self, *args):\n # args[0] is fixture name args[1] is module name\n self.message = f'More than 1 {args[0]} in {args[1]}'\n\n def __str__(self) -> str:\n return self.message\n\n\nclass OnEventFailedException(Exception):\n pass\n\n\nclass SkipTestException(Exception):\n pass\n\n\nclass StopTestRunException(Exception):\n pass\n\n\nclass TestCodeException(Exception):\n pass\n"
},
{
"alpha_fraction": 0.6692594885826111,
"alphanum_fraction": 0.6863721013069153,
"avg_line_length": 41.85333251953125,
"blob_id": "b6e47ef93d84fa2787a50a95e565d07959386eda",
"content_id": "3f4acdb44b86b1dbe8f08ed5490f825200a2bc18",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3214,
"license_type": "permissive",
"max_line_length": 139,
"num_lines": 75,
"path": "/tests/unit/discovery.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "import os\nimport unittest\n\nfrom end2.pattern_matchers import (\n DefaultModulePatternMatcher,\n DefaultTestCasePatternMatcher\n)\nfrom end2.models.testing_containers import TestPackage\nfrom end2 import discovery\n\nfrom examples.simple.smoke import sample1\n\n\nclass TestDiscoverModule(unittest.TestCase):\n def test_module_found_and_no_error(self):\n module_matcher = DefaultModulePatternMatcher([], '', True)\n test_matcher = DefaultTestCasePatternMatcher([], '', True)\n module, error_str = discovery.discover_module(os.path.join('examples', 'simple', 'smoke', 'sample1'), module_matcher, test_matcher)\n self.assertIsNotNone(module)\n self.assertEqual(error_str, '')\n\n def test_module_not_found_and_error_str(self):\n module_matcher = DefaultModulePatternMatcher([], '', True)\n test_matcher = DefaultTestCasePatternMatcher([], '', True)\n module, error_str = discovery.discover_module(os.path.join('examples', 'dont_exist'), module_matcher, test_matcher)\n self.assertIsNone(module)\n self.assertNotEqual(error_str, '')\n\n\nclass TestDiscoverTests(unittest.TestCase):\n def test_discovered(self):\n matcher = DefaultTestCasePatternMatcher([], '', True)\n tests = discovery.discover_tests(sample1, matcher)\n self.assertIsNotNone(tests)\n\n def test_partially_discovered(self):\n matcher = DefaultTestCasePatternMatcher(['test_1', 'test_2'], '', True)\n tests = discovery.discover_tests(sample1, matcher)\n assert len(tests) == 2\n\n\nclass TestDiscoverParameterizedTestRange(unittest.TestCase):\n def setUp(self) -> None:\n self.x = [1, 2, 3, 4, 5, 6, 7, 8]\n return super().setUp()\n\n def test_no_range_defaults_to_all(self):\n self.assertEqual(discovery.discover_parameterized_test_range('test_1', self.x), range(len(self.x)))\n\n def test_single_element(self):\n self.assertEqual(discovery.discover_parameterized_test_range('test_1[0]', self.x), range(0, 1))\n\n def test_last_to_first(self):\n self.assertEqual(discovery.discover_parameterized_test_range('test_1[-1:]', self.x), range(-1, len(self.x)))\n\n def test_all_except_last(self):\n self.assertEqual(discovery.discover_parameterized_test_range('test_1[:-1]', self.x), range(0, len(self.x)-1))\n \n def test_reverse_without_last(self):\n self.assertEqual(discovery.discover_parameterized_test_range('test_1[::-1]', self.x), range(0, len(self.x), -1))\n \n def test_start_at_1_skip_by_1_stop_after_1(self):\n self.assertEqual(discovery.discover_parameterized_test_range('test_1[1:1:1]', self.x), range(1, 1))\n\n def test_no_accessor(self):\n self.assertEqual(discovery.discover_parameterized_test_range('test_1[]', self.x), range(0, 0))\n\n def test_no_accessor_or_closing_bracket(self):\n self.assertEqual(discovery.discover_parameterized_test_range('test_1[', self.x), range(0, 0))\n\n def test_no_opening_bracket(self):\n self.assertEqual(discovery.discover_parameterized_test_range('test_1]', self.x), range(0, 0))\n\n def test_brackets_in_wrong_order(self):\n self.assertEqual(discovery.discover_parameterized_test_range('test_1][', self.x), range(0, 0))\n"
},
{
"alpha_fraction": 0.5973645448684692,
"alphanum_fraction": 0.6017569303512573,
"avg_line_length": 35.80838394165039,
"blob_id": "0ead9f75ff9f0a1462ef1fd9c61e82b6895d88a5",
"content_id": "c9af7922c957284a2d54a787aae5361feb27b4f9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6147,
"license_type": "permissive",
"max_line_length": 188,
"num_lines": 167,
"path": "/end2/models/result.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from datetime import datetime\nfrom typing import (\n Any,\n Generator,\n List\n)\n\nfrom end2.constants import Status\nfrom end2.models.testing_containers import TestModule\n\n\nclass Result:\n def __init__(self, name: str, status: Status = None, record: str = \"\") -> None:\n self.name = name\n self._end_time = None\n self.duration = None\n self.status = status\n self.record = record\n self.start()\n\n def __str__(self) -> str:\n return f'{self.name} Result: {{{self.status} | Duration: {self.duration}}}'\n\n @property\n def start_time(self) -> datetime:\n return self._start_time\n\n @property\n def end_time(self) -> datetime:\n return self._end_time\n\n @end_time.setter\n def end_time(self, value: datetime):\n self._end_time = value\n self.duration = self._end_time - self._start_time\n\n @property\n def total_seconds(self) -> float:\n return 0.0 if not self.duration else self.duration.total_seconds()\n\n def _now(self) -> datetime:\n return datetime.now()\n\n def start(self) -> None:\n if self._end_time is None:\n self._start_time = self._now()\n\n def end(self, status: Status = None):\n self.end_time = datetime.now()\n if status:\n self.status = status\n return self\n\n\nclass TestStepResult(Result):\n def __init__(self, record: str) -> None:\n self.record = record\n self._end_time = None\n self.start()\n\n def __str__(self) -> str:\n return f'{self.record} | Duration: {self.duration}'\n\n\nclass TestMethodResult(Result):\n def __init__(self, name: str, setup: Result = None, teardown: Result = None\n , status: Status = None, record: str = \"\", description: str = \"\"\n , metadata: dict = None) -> None:\n super().__init__(name, status, record)\n self.setup_result = setup\n self.teardown_result = teardown\n self.metadata = metadata or {}\n self.description = description\n self.steps = []\n\n def to_base(self) -> Result:\n result = Result(self.name, self.status, self.record)\n result._start_time = self._start_time\n result._end_time = self._end_time\n return result\n\n\nclass TestModuleResult(Result):\n def __init__(self, module: TestModule, setups: List[Result] = None, teardowns: List[Result] = None\n , test_results: List[TestMethodResult] = None, status: Status = None\n , record: str = \"\") -> None:\n super().__init__(module.name, status, record)\n self.file_name = module.file_name\n self.setups = setups or []\n self.teardowns = teardowns or []\n self.description = module.description\n self.test_results = test_results if test_results else []\n self.passed_count, self.failed_count, self.skipped_count = 0, 0, 0\n\n def __str__(self) -> str:\n return f'{self.name} Results: {{Total: {self.total_count} | Passed: {self.passed_count} | Failed: {self.failed_count} | Skipped: {self.skipped_count} | Duration: {self.duration}}}'\n\n def __iter__(self) -> Generator[TestMethodResult, Any, None]:\n for result in self.test_results:\n yield result\n\n @property\n def total_count(self) -> int:\n return self.passed_count + self.failed_count + self.skipped_count\n\n def append(self, test_result) -> None:\n if test_result:\n self.test_results.append(test_result)\n\n def extend(self, test_results: list) -> None:\n self.test_results.extend(test_results)\n\n def end(self, status: Status = None):\n super().end(status)\n self.passed_count, self.failed_count, self.skipped_count = 0, 0, 0\n if self.test_results:\n if all(x.status is Status.SKIPPED for x in self.test_results):\n self.status = Status.SKIPPED\n self.skipped_count = len(self.test_results)\n else:\n for result in self.test_results:\n if result.status is Status.PASSED:\n self.passed_count += 1\n elif result.status is Status.FAILED:\n self.failed_count += 1\n elif result.status is Status.SKIPPED:\n self.skipped_count += 1\n self.status = Status.PASSED if self.passed_count > 0 and self.failed_count == 0 and self.skipped_count == 0 else Status.FAILED\n return self\n\n\nclass TestSuiteResult(Result):\n def __init__(self, name: str, test_modules: List[TestModuleResult] = None, status: Status = None, record: str = \"\") -> None:\n super().__init__(name, status, record)\n self.test_modules = test_modules if test_modules else []\n self.passed_count, self.failed_count, self.skipped_count = 0, 0, 0\n\n def __str__(self) -> str:\n return f'{self.name} Results: {{Total: {self.total_count} | Passed: {self.passed_count} | Failed: {self.failed_count} | Skipped: {self.skipped_count} | Duration: {self.duration}}}'\n\n def __iter__(self) -> Generator[TestModuleResult, Any, None]:\n for result in self.test_modules:\n yield result\n\n @property\n def exit_code(self) -> int:\n return 0 if self.status is Status.PASSED else 1\n\n @property\n def total_count(self) -> int:\n return self.passed_count + self.failed_count + self.skipped_count\n\n def append(self, test_module_result: TestModuleResult) -> None:\n self.test_modules.append(test_module_result)\n\n def extend(self, test_module_results: List[TestMethodResult]) -> None:\n self.test_modules.extend(test_module_results)\n\n def end(self, status: Status = None):\n super().end(status)\n self.passed_count, self.failed_count, self.skipped_count = 0, 0, 0\n for result in self.test_modules:\n self.passed_count += result.passed_count\n self.failed_count += result.failed_count\n self.skipped_count += result.skipped_count\n self.status = Status.PASSED if self.passed_count > 0 and self.failed_count == 0 and self.skipped_count == 0 else Status.FAILED\n return self\n"
},
{
"alpha_fraction": 0.6241387128829956,
"alphanum_fraction": 0.624962568283081,
"avg_line_length": 44.337860107421875,
"blob_id": "9ee1e599cbee861c83dce68cbeed898fbd932f51",
"content_id": "5094def3007b46ff9bd12ad95b97a43d40bd58f9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 26704,
"license_type": "permissive",
"max_line_length": 163,
"num_lines": 589,
"path": "/end2/runner.py",
"repo_name": "jonwesneski/test_framework",
"src_encoding": "UTF-8",
"text": "from argparse import Namespace\nimport asyncio\nfrom cmd import Cmd\nimport concurrent.futures\nimport inspect\nfrom logging import Logger\nimport pathlib\nimport threading\nfrom time import sleep\nimport traceback\nimport sys\nfrom typing import (\n Callable,\n List,\n Tuple\n)\n\nfrom end2 import exceptions\nfrom end2.discovery import discover_suite\nfrom end2.constants import ReservedWords, Status\nfrom end2.logger import SuiteLogManager\nfrom end2.models.result import (\n Result,\n TestMethodResult,\n TestModuleResult,\n TestStepResult,\n TestSuiteResult,\n)\nfrom end2.models.testing_containers import (\n DynamicMroMixin,\n TestGroups,\n TestMethod,\n TestModule,\n TestPackage,\n TestPackageTree\n)\nfrom end2.resource_profile import create_last_run_rc\n\n\ndef default_test_parameters(logger, package_object) -> Tuple[tuple, dict]:\n return (logger,), {}\n\n\ndef create_test_run(parsed_args: Namespace, test_parameters_func=default_test_parameters\n , log_manager: SuiteLogManager = None) -> Tuple['SuiteRun', Tuple[str]]:\n test_packages, failed_imports = discover_suite(parsed_args.suite.paths)\n suite_run = SuiteRun(parsed_args, test_parameters_func, test_packages, log_manager)\n return suite_run, failed_imports\n\n\ndef start_test_run(parsed_args: Namespace, test_parameters_func=default_test_parameters\n , log_manager: SuiteLogManager = None) -> Tuple[TestSuiteResult, Tuple[str]]:\n suite_run, failed_imports = create_test_run(parsed_args, test_parameters_func, log_manager)\n results = suite_run.run()\n suite_run.log_manager.close()\n return results, failed_imports\n\n\nclass SuiteRun:\n def __init__(self, parsed_args: Namespace, test_parameters_func: Callable, test_packages: Tuple[TestPackageTree], log_manager: SuiteLogManager = None) -> None:\n self.parsed_args = parsed_args\n self.test_parameters_func = test_parameters_func\n self.test_packages = test_packages\n self.allow_concurrency = not self.parsed_args.no_concurrency\n self.name = 'suite_run' if not self.parsed_args.watch else 'suite_watch'\n self.results = None\n self.log_manager = log_manager or SuiteLogManager(logger_name=self.name, max_folders=self.parsed_args.max_log_folders)\n\n @property\n def logger(self):\n return self.log_manager.logger\n\n def run(self) -> TestSuiteResult:\n self.log_manager.on_suite_start(self.name)\n self.results = TestSuiteResult(self.name)\n try:\n if self.parsed_args.watch:\n self.run_watched()\n else:\n for package in self.test_packages:\n self.results.extend(self.run_modules(package))\n except exceptions.StopTestRunException as stre:\n self.logger.critical(stre)\n self.results.end()\n self.log_manager.on_suite_stop(self.results)\n create_last_run_rc(self.results)\n return self.results\n\n def run_modules(self, package: TestPackage) -> List[TestModuleResult]:\n test_parameters_func = package.package_test_parameters_func or self.test_parameters_func \n package.setup()\n test_module_results = []\n if self.allow_concurrency:\n sequential_modules = package.sequential_modules\n parallel_modules = package.parallel_modules\n else:\n sequential_modules = sequential_modules + parallel_modules\n parallel_modules = tuple()\n for test_module in sequential_modules:\n module_run = TestModuleRun(test_parameters_func, test_module, self.log_manager, package.package_object, self.parsed_args)\n test_module_results.append(module_run.run())\n\n with concurrent.futures.ThreadPoolExecutor(max_workers=self.parsed_args.max_workers) as executor:\n futures = [\n executor.submit(\n TestModuleRun(test_parameters_func, test_module, self.log_manager, package.package_object, self.parsed_args, executor).run)\n for test_module in parallel_modules\n ]\n for future in futures:\n test_module_results.append(future.result())\n package.teardown()\n return test_module_results\n\n def run_watched(self) -> None:\n try:\n suite_cmd = _SuiteWatchCmd(self)\n suite_cmd.cmdloop()\n except KeyboardInterrupt:\n pass\n \n\nclass _SuiteWatchCmd(Cmd):\n prompt = ''\n\n def __init__(self, suite_run: SuiteRun) -> None:\n super().__init__()\n self.intro = '\\nWatch Mode: Press Ctrl+C to exit...\\n'\n self.suite_run = suite_run\n self.ran_at_least_once = False\n\n def cmdloop(self, intro: str = None) -> None:\n self._run_watch()\n return super().cmdloop(self.intro)\n\n def postcmd(self, stop: bool, line: str) -> bool:\n return super().postcmd(stop, line)\n\n def _run_watch(self) -> None:\n self.cmdqueue.append(self.do_watch_modules.__name__.replace('do_', ''))\n\n def do_watch_modules(self, line: str) -> None:\n for package in self.suite_run.test_packages:\n package_ = TestPackage(package.package, package_object=package.package_object)\n for sequential_module in package.sequential_modules:\n if self._has_changed(sequential_module):\n package_.sequential_modules.add(sequential_module)\n for parallel_module in package.parallel_modules:\n if self._has_changed(parallel_module):\n package_.parallel_modules.add(parallel_module)\n if package_.parallel_modules or package_.parallel_modules:\n if self.ran_at_least_once:\n self.suite_run.log_manager = self.suite_run.log_manager.new_instance()\n self.suite_run.run_modules(package_)\n self.suite_run.log_manager.on_suite_stop(TestSuiteResult(self.suite_run.name))\n self.suite_run.log_manager.close()\n self.stdout.write(self.intro)\n self.ran_at_least_once = True\n sleep(7)\n self._run_watch()\n \n @staticmethod\n def _has_changed(module: TestModule) -> bool:\n last_modified = pathlib.Path(module.file_name).stat().st_mtime\n changed = False\n if last_modified > module.last_modified:\n module.last_modified = last_modified\n changed = True\n return changed\n\n\nclass TestModuleRun:\n def __init__(self, test_parameters_func, module: TestModule, log_manager: SuiteLogManager\n , package_object: DynamicMroMixin, parsed_args: Namespace\n , concurrent_executor: concurrent.futures.ThreadPoolExecutor = None) -> None:\n self.test_parameters_func = test_parameters_func\n self.module = module\n self.log_manager = log_manager\n self.package_object = package_object\n self.parsed_args = parsed_args\n self.stop_on_fail = parsed_args.stop_on_fail\n self.concurrent_executor = concurrent_executor\n self.parameters_resolver = ParametersResolver(test_parameters_func, self.package_object, self.parsed_args.event_timeout)\n\n def run(self) -> TestModuleResult:\n result = TestModuleResult(self.module)\n setup_results, test_results, teardown_results = self.run_group(self.module.groups)\n result.setups = setup_results\n result.test_results = test_results\n result.teardowns = teardown_results\n result.end()\n self.log_manager.on_module_done(result)\n return result\n\n def run_group(self, group: TestGroups) -> Tuple[List[Result], List[TestMethodResult], List[Result]]:\n setup_results = [self.setup(group.setup_func)]\n teardown_results = []\n if setup_results[0].status is Status.FAILED:\n test_results = self._create_skipped_results(group, setup_results[0].record)\n else:\n test_results = self.run_tests(group)\n for group_ in group.children:\n sr, tr, trr = self.run_group(group_)\n setup_results.extend(sr)\n test_results.extend(tr)\n teardown_results.extend(trr)\n teardown_results.append(self.teardown(group.teardown_func))\n if any(x.status is Status.FAILED for x in test_results):\n self._run_on_failures_in_module()\n return setup_results, test_results, teardown_results\n\n def _run_on_failures_in_module(self):\n teardown_logger = self.log_manager.get_teardown_logger(self.module.name)\n args, kwargs, ender = self.parameters_resolver.resolve(self.module.on_failures_in_module, teardown_logger)\n if inspect.iscoroutinefunction(self.module.on_failures_in_module):\n loop = asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n loop.run_until_complete(run_async_test_func(teardown_logger, ender, self.module.on_failures_in_module, *args, **kwargs))\n loop.close()\n else:\n run_test_func(teardown_logger, ender, self.module.on_failures_in_module, *args, **kwargs)\n\n def _create_skipped_results(self, group: TestGroups, record: str) -> List[TestMethodResult]:\n test_results = [\n TestMethodResult(v.name, status=Status.SKIPPED, record=record, description=v.__doc__, metadata=v.metadata)\n for _, v in group.tests.items()\n ]\n for g in group.children:\n test_results.extend(self._create_skipped_results(g, record))\n return test_results\n\n def setup(self, setup_func: Callable) -> Result:\n setup_logger = self.log_manager.get_setup_logger(self.module.name)\n args, kwargs, ender = self.parameters_resolver.resolve(setup_func, setup_logger)\n if inspect.iscoroutinefunction(setup_func):\n loop = asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n result = loop.run_until_complete(run_async_test_func(setup_logger, ender, setup_func, *args, **kwargs))\n loop.close()\n else:\n result = run_test_func(setup_logger, ender, setup_func, *args, **kwargs)\n self.log_manager.on_setup_module_done(self.module.name, result.to_base())\n return result\n\n def run_tests(self, group: TestGroups) -> List[TestMethodResult]: \n async def as_completed(coroutines_, results_, stop_on_first_fail_):\n for fs in coroutines_:\n try:\n result = await fs.run_async()\n results_.append(result)\n if result.status is Status.FAILED and stop_on_first_fail_:\n [f.cancel() for f in coroutines_]\n except exceptions.IgnoreTestException:\n pass\n \n routines, coroutines = [], []\n for k, test in group.tests.items():\n test_run = TestMethodRun(test, self.parameters_resolver, self.log_manager, self.module.name)\n if inspect.iscoroutinefunction(test.func):\n coroutines.append(test_run)\n else:\n routines.append(test_run)\n results = []\n loop = None\n try:\n if self.concurrent_executor:\n future_results = [\n self.concurrent_executor.submit(test.run)\n for test in routines\n ]\n try:\n for future_result in concurrent.futures.as_completed(future_results):\n try:\n result = future_result.result()\n results.append(result)\n if self.stop_on_fail and result.status is Status.FAILED:\n raise exceptions.StopTestRunException(result.record)\n except exceptions.IgnoreTestException:\n pass\n except exceptions.StopTestRunException as stre:\n raise\n except:\n self.log_manager.logger.error(traceback.format_exc())\n loop = asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n loop.run_until_complete(as_completed(coroutines, results, self.stop_on_fail))\n loop.close()\n else:\n try:\n for test in routines:\n try:\n results.append(test.run())\n if self.stop_on_fail and results[-1].status is Status.FAILED:\n raise exceptions.StopTestRunException(results[-1].record)\n except exceptions.IgnoreTestException:\n pass\n loop = asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n for test in coroutines:\n try:\n results.append(loop.run_until_complete(test.run_async()))\n if self.stop_on_fail and results[-1].status is Status.FAILED:\n raise exceptions.StopTestRunException(results[-1].record)\n except exceptions.IgnoreTestException:\n pass\n loop.close()\n except exceptions.StopTestRunException as stre:\n raise\n except:\n self.log_manager.logger.error(traceback.format_exc())\n return results\n finally:\n if loop is not None and loop.is_running():\n loop.close()\n\n def teardown(self, teardown_func: Callable) -> Result:\n teardown_logger = self.log_manager.get_teardown_logger(self.module.name)\n args, kwargs = self.test_parameters_func(teardown_logger, self.package_object)\n args, kwargs, ender = self.parameters_resolver.resolve(teardown_func, teardown_logger)\n if inspect.iscoroutinefunction(teardown_func):\n loop = asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n result = loop.run_until_complete(run_async_test_func(teardown_logger, ender, teardown_func, *args, **kwargs))\n loop.close()\n else:\n result = run_test_func(teardown_logger, ender, teardown_func, *args, **kwargs)\n self.log_manager.on_teardown_module_done(self.module.name, result.to_base())\n return result\n\n\nclass Ender:\n def __init__(self, time_out: float = 15.0) -> None:\n self.time_out = time_out\n self.event = threading.Event()\n \n def create(self) -> Callable:\n self.event = threading.Event()\n return Ender.end_wrapper(self.event)\n\n @staticmethod\n def end_wrapper(event: threading.Event) -> Callable:\n def end() -> None:\n event.set()\n def fail(x: str) -> None:\n event.set()\n raise exceptions.OnEventFailedException(x)\n end.fail = fail\n return end\n\n def wait(self) -> None:\n in_time = self.event.wait(self.time_out)\n if not in_time:\n raise TimeoutError(f\"end() time out reached: {self.time_out}s\")\n\n\nclass ParametersResolver:\n def __init__(self, test_parameters_func: Callable, package_object, time_out: float = 15.0) -> None:\n self._package_object = package_object\n self._test_parameters_func = test_parameters_func\n self.time_out = time_out\n\n def resolve(self, method: Callable, logger: Logger, extra_args: tuple = None) -> tuple:\n args, kwargs = self._test_parameters_func(logger, self._package_object)\n if extra_args:\n args += extra_args\n kwonlyargs = dict.fromkeys(inspect.getfullargspec(method).kwonlyargs, True)\n ender = None\n if kwonlyargs:\n if kwonlyargs.pop(ReservedWords.END.value, False):\n ender = Ender(self.time_out)\n kwargs[ReservedWords.END.value] = ender.create()\n if kwonlyargs.pop(ReservedWords.LOGGER.value, False):\n kwargs[ReservedWords.LOGGER.value] = logger\n if kwonlyargs.pop(ReservedWords.PACKAGE_OBJECT.value, False):\n kwargs[ReservedWords.PACKAGE_OBJECT.value] = self._package_object\n if kwonlyargs.pop(ReservedWords.STEP.value, False):\n kwargs[ReservedWords.STEP.value] = True\n if kwonlyargs:\n raise exceptions.TestCodeException(f\"Unknown reserved words found or possibly typos: {list(kwonlyargs.keys())}\"\n f\"\\npossible reserved keywords: {[[x.name for x in ReservedWords]]}\")\n return args, kwargs, ender\n\n\nclass TestMethodRun:\n def __init__(self, test_method: TestMethod, parameters_resolver: ParametersResolver\n , log_manager: SuiteLogManager, module_name: str) -> None:\n self.test_method = test_method\n self.parameters_resolver = parameters_resolver\n self.log_manager = log_manager\n self.module_name = module_name\n\n def run(self) -> TestMethodResult:\n loop = asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n if inspect.iscoroutinefunction(self.test_method.setup_func):\n setup_result = loop.run_until_complete(\n self._intialize_args_and_setup_async()\n )\n else:\n setup_result = self._intialize_args_and_setup()\n\n result = self._intialize_args_and_run()\n if result.status is Status.FAILED and hasattr(self.test_method.func, 'on_test_failure'):\n logger = self.log_manager.get_test_logger(self.module_name, self.test_method.name)\n args, kwargs, ender = self.parameters_resolver.resolve(self.test_method.func.on_test_failure, logger)\n if inspect.iscoroutinefunction(self.test_method.func.on_test_failure):\n loop.run_until_complete(\n run_async_test_func(self.log_manager.logger, ender, self.test_method.func.on_test_failure, *args, **kwargs)\n )\n else:\n run_test_func(self.log_manager.logger, ender, self.test_method.func.on_test_failure, *args, **kwargs)\n\n if inspect.iscoroutinefunction(self.test_method.teardown_func):\n teardown_result = loop.run_until_complete(\n self._intialize_args_and_teardown_async()\n )\n else:\n teardown_result = self._intialize_args_and_teardown()\n result.setup_result = setup_result\n result.teardown_result = teardown_result\n loop.close()\n return result\n\n async def run_async(self) -> TestMethodResult:\n if inspect.iscoroutinefunction(self.test_method.setup_func):\n setup_result = await self._intialize_args_and_setup_async()\n else:\n setup_result = self._intialize_args_and_setup()\n\n result = await self._intialize_args_and_run_async()\n if result.status is Status.FAILED and hasattr(self.test_method.func, 'on_test_failure'):\n logger = self.log_manager.get_test_logger(self.module_name, self.test_method.name)\n args, kwargs, ender = self.parameters_resolver.resolve(self.test_method.func.on_test_failure, logger)\n if inspect.iscoroutinefunction(self.test_method.func.on_test_failure):\n await run_async_test_func(self.log_manager.logger, ender, self.test_method.func.on_test_failure, *args, **kwargs)\n else:\n run_test_func(self.log_manager.logger, ender, self.test_method.func.on_test_failure, *args, **kwargs)\n\n if inspect.iscoroutinefunction(self.test_method.teardown_func):\n teardown_result = await self._intialize_args_and_teardown_async()\n else:\n teardown_result = self._intialize_args_and_teardown()\n result.setup_result = setup_result\n result.teardown_result = teardown_result\n return result\n\n def _intialize_args_and_setup(self) -> Result:\n logger = self.log_manager.get_setup_test_logger(self.module_name, self.test_method.name)\n args, kwargs, ender = self.parameters_resolver.resolve(self.test_method.setup_func, logger)\n result = run_test_func(logger, ender, self.test_method.setup_func, *args, **kwargs)\n self.log_manager.on_setup_test_done(self.module_name, self.test_method.name, result.to_base())\n return result\n\n async def _intialize_args_and_setup_async(self) -> Result:\n logger = self.log_manager.get_setup_test_logger(self.module_name, self.test_method.name)\n args, kwargs, ender = self.parameters_resolver.resolve(self.test_method.setup_func, logger)\n result = await run_async_test_func(logger, ender, self.test_method.setup_func, *args, **kwargs)\n self.log_manager.on_setup_test_done(self.module_name, self.test_method.name, result.to_base())\n return result\n\n def _intialize_args_and_teardown(self) -> Result:\n logger = self.log_manager.get_teardown_test_logger(self.module_name, self.test_method.name)\n args, kwargs, ender = self.parameters_resolver.resolve(self.test_method.teardown_func, logger)\n result = run_test_func(logger, ender, self.test_method.teardown_func, *args, **kwargs)\n self.log_manager.on_teardown_test_done(self.module_name, self.test_method.name, result.to_base())\n return result\n\n async def _intialize_args_and_teardown_async(self) -> Result:\n logger = self.log_manager.get_teardown_test_logger(self.module_name, self.test_method.name)\n args, kwargs, ender = self.parameters_resolver.resolve(self.test_method.teardown_func, logger)\n result = await run_async_test_func(logger, ender, self.test_method.teardown_func, *args, **kwargs)\n self.log_manager.on_teardown_test_done(self.module_name, self.test_method.name, result.to_base())\n return result\n\n def _intialize_args_and_run(self) -> TestMethodResult:\n logger = self.log_manager.get_test_logger(self.module_name, self.test_method.name)\n args, kwargs, ender = self.parameters_resolver.resolve(self.test_method.func, logger, self.test_method.parameterized_tuple)\n result = run_test_func(logger, ender, self.test_method.func, *args, **kwargs)\n result.metadata = self.test_method.metadata\n self.log_manager.on_test_done(self.module_name, result)\n return result\n\n async def _intialize_args_and_run_async(self) -> TestMethodResult:\n logger = self.log_manager.get_test_logger(self.module_name, self.test_method.name)\n args, kwargs, ender = self.parameters_resolver.resolve(self.test_method.func, logger, self.test_method.parameterized_tuple)\n result = await run_async_test_func(logger, ender, self.test_method.func, *args, **kwargs)\n result.metadata = self.test_method.metadata\n self.log_manager.on_test_done(self.module_name, result)\n return result\n\n\nclass TestStepsRun:\n def __init__(self, logger: Logger) -> None:\n self.logger = logger\n self.steps: TestStepResult = []\n\n def __str__(self) -> str:\n return f'Number of steps: {len(self.steps)} | Duration: {self.duration}'\n\n def step(self, record: str, assert_lambda: Callable, func: Callable, *args, **kwargs):\n self.logger.info(record)\n step_ = TestStepResult(record)\n try:\n return_value = func(*args, **kwargs)\n finally:\n self.steps.append(step_.end())\n if assert_lambda:\n assert assert_lambda(return_value)\n return return_value\n\n async def step_async(self, record: str, assert_lambda: Callable, func: Callable, *args, **kwargs):\n self.logger.info(record)\n step_ = TestStepResult(record)\n try:\n return_value = await func(*args, **kwargs)\n finally:\n self.steps.append(step_.end())\n if assert_lambda:\n assert assert_lambda(return_value)\n return return_value\n\n\ndef run_test_func(logger: Logger, ender: Ender, func: Callable, *args, **kwargs) -> TestMethodResult:\n result = TestMethodResult(func.__name__, status=Status.FAILED)\n steps = TestStepsRun(logger)\n if kwargs.get(ReservedWords.STEP.value):\n kwargs[ReservedWords.STEP.value] = steps.step\n try:\n func(*args, **kwargs)\n if ender:\n ender.wait()\n result.status = Status.PASSED\n result.steps = steps.steps\n except AssertionError as ae:\n _, _, tb = sys.exc_info()\n tb_info = traceback.extract_tb(tb)\n filename, line, func, error_text = tb_info[-1]\n result.record = str(ae) if str(ae) else error_text\n logger.error(result.record)\n except exceptions.SkipTestException as ste:\n result.status = Status.SKIPPED\n result.record = str(ste)\n logger.info(result.record)\n except exceptions.IgnoreTestException:\n raise\n except (TimeoutError, exceptions.OnEventFailedException) as other:\n result.record = f'{other.__class__.__name__}: {other}'\n logger.error(result.record)\n except Exception as e:\n logger.debug(traceback.format_exc())\n result.record = f'Encountered an exception: {e}'\n logger.error(result.record)\n return result.end()\n\n\nasync def run_async_test_func(logger: Logger, ender: Ender, func: Callable, *args, **kwargs) -> TestMethodResult:\n result = TestMethodResult(func.__name__, status=Status.FAILED)\n steps = TestStepsRun(logger)\n if kwargs.get(ReservedWords.STEP.value):\n kwargs[ReservedWords.STEP.value] = steps.step_async\n try:\n await func(*args, **kwargs)\n if ender:\n ender.wait()\n result.status = Status.PASSED\n result.steps = steps.steps\n except AssertionError as ae:\n _, _, tb = sys.exc_info()\n tb_info = traceback.extract_tb(tb)\n filename, line, func, error_text = tb_info[-1]\n result.record = str(ae) if str(ae) else error_text\n logger.error(result.record)\n except exceptions.SkipTestException as ste:\n result.status = Status.SKIPPED\n result.record = str(ste)\n logger.info(result.record)\n except exceptions.IgnoreTestException:\n raise\n except (TimeoutError, exceptions.OnEventFailedException) as other:\n result.record = f'{other.__class__.__name__}: {other}'\n logger.error(result.record)\n except asyncio.CancelledError:\n result.status = Status.SKIPPED\n result.record = 'I got cancelled'\n logger.info(result.record)\n except Exception as e:\n logger.debug(traceback.format_exc())\n result.record = f'Encountered an exception: {e}'\n logger.error(result.record)\n return result.end()\n"
}
] | 46 |
Vivianliao009204/lesson6
|
https://github.com/Vivianliao009204/lesson6
|
a48d6bb5829e4ff59202d767413f888c26498841
|
9be1c4192dada7c0d75e905fa8af00dd4019a7f7
|
4ee430e4ce37cd5173c08d0b3c4bfa3633cf5008
|
refs/heads/main
| 2023-06-15T15:30:19.712938 | 2021-07-13T02:52:28 | 2021-07-13T02:52:28 | 385,453,535 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.4658823609352112,
"alphanum_fraction": 0.5011764764785767,
"avg_line_length": 16.217391967773438,
"blob_id": "aff2d6ca517df81cb743f16fdd40c128d8375e4d",
"content_id": "e59b710202e4368c95acc0ab0fb62814e3ad3c88",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 451,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 23,
"path": "/lesson6 hw way 2.py",
"repo_name": "Vivianliao009204/lesson6",
"src_encoding": "UTF-8",
"text": "x=input('多少小朋友?')\r\nx=int(x)\r\nlist=[]\r\nfor i in range(x):\r\n score=int(input('成績?'))\r\n name=input('名子?')\r\n list.append(score)\r\n list.append(name)\r\nprint (list)\r\ni=0\r\nmax=-1\r\nmin=101\r\nmax_i=0\r\nmin_i=0\r\nwhile i < x :\r\n if list [i*2]:\r\n max_i=i*2\r\n if list[i*2]:\r\n min=list[i*2]\r\n min_i=i*2\r\n i=i+1\r\nprint('高分',list[max_i+1],list[max_i])\r\nprint('低分',list[min_i+1],list[min_i])\r\n "
}
] | 1 |
joerg-rechinger/Data_Engineering
|
https://github.com/joerg-rechinger/Data_Engineering
|
2015b0e4f2cf21e23a981582bc952b146b865767
|
aaf7479e7092e64d316772cc1c1d2046fd57ae2f
|
4b36814553fc4df31cf30b77fd4e528bb9c9d498
|
refs/heads/main
| 2023-01-24T19:47:12.833392 | 2020-11-29T17:31:54 | 2020-11-29T17:31:54 | 317,005,547 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7683653235435486,
"alphanum_fraction": 0.7736598253250122,
"avg_line_length": 115.23076629638672,
"blob_id": "6e7d62befb01c0b4fb4946f101f68da63030d226",
"content_id": "dd852e21c5a12338da02be27142d647bd14d5d2a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3022,
"license_type": "no_license",
"max_line_length": 525,
"num_lines": 26,
"path": "/udacity-dend-data-lake/README.md",
"repo_name": "joerg-rechinger/Data_Engineering",
"src_encoding": "UTF-8",
"text": "## Data Lake Project\n### Udacity Data Engineering Nanodegree\n\n#### Starting point\nThe fictitous music streaming startup **Sparkify** has grown their userbase and songplays. So far the data was kept in a Data Warehouse and should now be moved to a Data Lake. Currently the data resides in S3 in json format. The data covers usage data of the streaming app as well as data about the songs and artists. The goal is build a data pipeline, loading the data from S3, processing it within Spark and then saving it back to S3 in the format of parquet files that can be queried by the Analytics team at **Sparkify**.\n\n#### How to run the script\nThe repository consits of three files (in addition to README.md):\n- dl.cfg: If you want to run **etl.py** locally on your machine, fill in your AWS credentials into this file (for being able to access udacity's s3 bucket with the input data).\n- etl.py: This script contains the ETL pipeline. Read more about how it works below. If you want to run the script locally, uncomment the lines as suggested in the script to run configParser with your AWS credentials from dl.cfg. **Important**: Also fill in your S3 bucket for the path of output_data so that the output gets written to your S3 bucket.\n- bootstramp_emr.sh: If you want to run the script on an EMR cluster, use this fill when creating the cluster to have a Spark version up and running within the cluster.\n\n#### EMR cluster setup\nInstead of running the script on your local machine, you can also run it on an EMR cluster from AWS. To do so, follow the steps below:\n1. Upload **etl.py** and **bootstrap_emr.sh** to a private S3 bucket owned by you\n2. Create a new cluster in EMR (via the advanced options). Configure it with Spark, Hadoop, Hive and Livy, using the latest release of EMR available.\n3. Make sure to include the bootstrap_emr.sh from your S3 bucket as a bootstrapping action when launching the cluster. Also include your private pem file so that you can SSH into the cluster.\n4. Once the cluster is launched, SSH into the cluster. Hit the command \"spark-submit + path/to/your/S3/etl.py/file\" (e.g. \"spark-submit S3://my-bucket/etl.py\")\n\n#### Schema design and ETL pipeline\nThe schema is designed in the well-known star schema, consisting of fact and dimension tables. This reduces redundancy in the saved data as well as reduces number of joins necessary to answer analytics question. The fact table contains the primary keys of the dimension tables so that they can be joined.\n\nWithin **etl.py** first a Spark Session is established. After that the script consists of two parts: first dealing with the song data and after that with the log data. The steps are the same:\n- First the data is loaded from Udacity's S3 bucket (to reduce runtime, for both cases the smaller buckets are chosen instead of the full-blown versions).\n- After that it is transformed and only the relevant columns are chosen for the respective tables.\n- Then the tables are saved back to S3 in the format of parquet files (columnar storage).\n"
},
{
"alpha_fraction": 0.660563588142395,
"alphanum_fraction": 0.6654991507530212,
"avg_line_length": 38.256248474121094,
"blob_id": "6a3cfdf878ebf669fdadafd907d85faaf32121d4",
"content_id": "a16a46a614ea0178ba771dbd930226c63610a659",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6281,
"license_type": "no_license",
"max_line_length": 134,
"num_lines": 160,
"path": "/udacity-dend-data-lake/etl.py",
"repo_name": "joerg-rechinger/Data_Engineering",
"src_encoding": "UTF-8",
"text": "#import configparser\nfrom datetime import datetime, date\nimport os\nfrom pyspark.sql.types import StringType, DateType, StructType, StructField, DoubleType, IntegerType, LongType\nfrom pyspark.sql import SparkSession\nfrom pyspark.sql.functions import udf, col, expr, to_timestamp, to_date\nfrom pyspark.sql.functions import year, month, dayofmonth, hour, weekofyear, date_format, desc, from_unixtime, dayofweek\n\n# for running locally, uncomment line 1, 10, 11, 13 and 14 and fill in dl.cfg file with your AWS credentials\n#config = configparser.ConfigParser()\n#config.read('dl.cfg')\n\n#os.environ['AWS_ACCESS_KEY_ID']=config['default']['AWS_ACCESS_KEY_ID']\n#os.environ['AWS_SECRET_ACCESS_KEY']=config['default']['AWS_SECRET_ACCESS_KEY']\n\n\ndef create_spark_session():\n '''Initiates the SparkSession.\n Parameters:\n None\n\n Returns:\n SparkSession object, configured according to the specifications in .config method\n '''\n spark = SparkSession \\\n .builder \\\n .config('spark.jars.packages\", \"org.apache.hadoop:hadoop-aws:2.7.0') \\\n .getOrCreate()\n return spark\n\n\ndef process_song_data(spark, input_data, output_data):\n '''Reads the song data from S3, transforms it into various tables and writes those back to S3.\n Parameters:\n spark: the SparkSession object\n input_data: path to S3 bucket where the song input data is stored\n output_data: path to your private S3 bucket where the output data should be stored.\n\n Returns:\n None\n '''\n # get filepath to song data file\n # using the reduced song_data input, song-data is the full input from udacity's S3\n song_data = str(input_data)+'/song_data/*/*/*/*.json'\n\n # read song data file\n df = spark.read.json(song_data)\n\n # extract columns to create songs table\n songs_table = df.select(['song_id', 'title', 'artist_id', 'year', 'duration']).dropDuplicates()\n\n # write songs table to parquet files partitioned by year and artist\n songs_table.write.format('parquet')\\\n .partitionBy(['year', 'artist_id'])\\\n .option('path', str(output_data)+'/songs')\\\n .saveAsTable('songs', mode = 'overwrite')\n\n # extract columns to create artists table\n artists_table = df.select(['artist_id', 'artist_name', 'artist_location', 'artist_latitude', 'artist_longitude']).dropDuplicates()\n\n # write artists table to parquet files\n artists_table.write.format('parquet')\\\n .option('path', str(output_data)+'/artists')\\\n .saveAsTable('artists', mode = 'overwrite')\n\n\ndef process_log_data(spark, input_data, output_data):\n '''Reads the log data from S3, transforms it into various tables and writes those back to S3.\n Parameters:\n spark: the SparkSession object\n input_data: path to S3 bucket where the log input data is stored\n output_data: path to your private S3 bucket where the output data should be stored.\n\n Returns:\n None\n '''\n # get filepath to log data file\n # using the 'log-data' file for reduced input, log_data is the full data file on udacity's S3\n log_data = str(input_data)+'/log-data/*/*/*.json'\n\n # read log data file\n df = spark.read.json(log_data)\n\n # filter by actions for song plays\n df = df.where(df.page=='NextSong')\n\n # extract columns for users table\n user_table = df.select(['userId', 'firstName', 'lastName', 'gender', 'level'])\\\n .dropDuplicates()\\\n .orderBy('userId')\n\n # write users table to parquet files\n user_table.write.format('parquet')\\\n .option('path', str(output_data)+'/users')\\\n .saveAsTable('users', mode = 'overwrite')\n\n # create timestamp column from original timestamp column\n get_timestamp = udf(lambda x: datetime.fromtimestamp(x / 1000.0).strftime('%Y-%m-%d %H:%M:%S'))\n df = df.withColumn('ts_2', get_timestamp('ts')).withColumn('timestamp', to_timestamp('ts_2', 'yyyy-MM-dd HH:mm:ss'))\n\n # create datetime column from original timestamp column\n df = df.withColumn('date', to_date('timestamp'))\n\n # extract columns to create time table\n time_table = df.select(['userId', 'sessionId', 'timestamp'])\\\n .withColumn('hour', hour('timestamp'))\\\n .withColumn('day', dayofmonth('timestamp'))\\\n .withColumn('month', month('timestamp'))\\\n .withColumn('year', year('timestamp'))\\\n .withColumn('weekday', dayofweek('timestamp'))\\\n .dropDuplicates()\n\n # write time table to parquet files partitioned by year and month\n time_table.write.format('parquet')\\\n .partitionBy(['year', 'month'])\\\n .option('path', str(output_data)+'/time')\\\n .saveAsTable('time', mode = 'overwrite')\n\n # read in song data to use for songplays table\n song_df = spark.read.json(str(input_data)+'/song_data/*/*/*/*.json')\n\n # extract columns from joined song and log datasets to create songplays table\n # creating the conditions for the join below\n cond=[df.artist==song_df.artist_name, df.song==song_df.title]\n songplays_table = df.join(song_df, cond, 'left')\n # create a unique songplay id from session id+timestamp\n songplays_table = songplays_table.withColumn('songplay_id', expr('\"sessionId\"+\"ts\"'))\\\n .select(['songplay_id', 'timestamp', 'userId', 'level', 'song_id', 'artist_id', 'sessionId', 'location', 'userAgent'])\\\n .withColumn('month', month('timestamp'))\\\n .withColumn('year', year('timestamp'))\\\n .withColumnRenamed('timestamp','start_time')\\\n .dropDuplicates()\n\n # write songplays table to parquet files partitioned by year and month\n songplays_table.write.format('parquet')\\\n .partitionBy(['year', 'month'])\\\n .option('path', str(output_data)+'/songplays')\\\n .saveAsTable('songplays', mode = 'overwrite')\n\n\ndef main():\n '''Main function that first declares the three inputs to the process_song_data and process_log_data functions and then runs them.\n Parameters:\n None\n Returns:\n None\n '''\n spark = create_spark_session()\n input_data = 's3a://udacity-dend/'\n #fill in the name of your private S3 bucket\n output_data = ''\n\n process_song_data(spark, input_data, output_data)\n process_log_data(spark, input_data, output_data)\n\n\nif __name__ == '__main__':\n '''Executes the main function.\n '''\n main()\n"
}
] | 2 |
AntoineGuillard/CryptoProject
|
https://github.com/AntoineGuillard/CryptoProject
|
af1109890e62c7d992e01c138ab7665cb8a0777b
|
e36857187149122529529afa272f04d90ac0b13d
|
fd8a188177793e3f2cec13dde5feb5bd305d05e1
|
refs/heads/master
| 2021-01-07T23:37:53.602636 | 2020-03-02T12:42:06 | 2020-03-02T12:42:06 | 241,852,336 | 1 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6596385836601257,
"alphanum_fraction": 0.6777108311653137,
"avg_line_length": 19.75,
"blob_id": "ca187f09e22d4b0cf0f625c4ec7fca704b4d2104",
"content_id": "d2527ab826df7d96bccfe57659b751f670b1930f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 332,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 16,
"path": "/Crypto_main.py",
"repo_name": "AntoineGuillard/CryptoProject",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\nimport sys\n\nfrom FileManager import create_zip, array_files, check_inputs\n\ninputs = sys.argv\n\n\ndef main(arguments):\n encrypt = check_inputs(arguments)\n create_zip(arguments[-1], array_files(arguments[3], arguments[5: len(arguments) - 2], encrypt))\n return 0\n\n\nif __name__ == \"__main__\":\n main(inputs)\n"
},
{
"alpha_fraction": 0.5975547432899475,
"alphanum_fraction": 0.6077432632446289,
"avg_line_length": 49.33333206176758,
"blob_id": "61ee4f99892b3f54586af17c81f6d562344a4be8",
"content_id": "65a8bffce43588343558a16c670a6bbcfa2031b0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7894,
"license_type": "no_license",
"max_line_length": 185,
"num_lines": 156,
"path": "/FileManager.py",
"repo_name": "AntoineGuillard/CryptoProject",
"src_encoding": "UTF-8",
"text": "import json\nimport os\nimport shutil\n\nfrom CryptoFunctions import cipher_file, cmac, decipher_file\nfrom SecondaryFunctions import get_dir_name, get_file_name, is_not_hex, file_or_directory_exist\n\n\ndef check_inputs(arguments):\n size_of_command = len(arguments)\n # Check if the command as the minimum arguments needed\n if size_of_command < 8:\n print(\n \"Please enter the right number of arguments.\\nThe Command as to be like this: python3 Crypto_main.py –enc|-dec –key \"\n \"F...F(128bits) –in <input file(s)> -out <output file>\\nAnd there is no need to put a '.zip' at the end of the output file\")\n exit()\n # Verify if the first argument is -enc of -dec\n if arguments[1] != \"-enc\" and arguments[1] != \"-dec\":\n print(arguments[1])\n print(\n \"You must choose if you want to encrypt or decrytp file(s).\\nThe Command as to be like this: python3 Crypto_main.py \"\n \"–enc|-dec –key F...F(128bits) –in <input file(s)> -out <output file>\\nAnd there is no need to put a '.zip' at the end of the output file\")\n exit()\n # Verify the option -key is present\n if arguments[2] != \"-key\":\n print(\n \"You must enter the -key option followed by a password of 128 bits in hexadecimal.\\nThe Command as to be \"\n \"like this: python3 Crypto_main.py –enc|-dec –key F...F(128bits) –in <input file(s)> -out <output file>\\nAnd there is no need to put a '.zip' at the end of the output file\")\n exit()\n # Check if the key is hexadecimal and his size is 128 bits\n if len(arguments[3]) != 32 or is_not_hex(arguments[3]):\n print(\n \"You must enter a key of 32 charater in hexadecimal.\\nThe Command as to be like this: python3 Crypto_main.py –enc|-dec \"\n \"–key F...F(128bits) –in <input file(s)> -out <output file>\\nAnd there is no need to put a '.zip' at the end of the output file\")\n exit()\n # Check for the option -in\n if arguments[4] != \"-in\":\n print(\n \"You must enter the -in option to take files as input.\\nThe Command as to be like this: python3 Crypto_main.py \"\n \"–enc|-dec –key F...F(128bits) –in <input file(s)> -out <output file>\\nAnd there is no need to put a '.zip' at the end of the output file\")\n exit()\n # Check for the option -out\n if arguments[size_of_command - 2] != \"-out\":\n print(\n \"You must enter the -out option to take files as output.\\nThe Command as to be like this: python3 Crypto_main.py \"\n \"–enc|-dec –key F...F(128bits) –in <input file(s)> -out <output file>\\nAnd there is no need to put a '.zip' at the end of the output file\")\n exit()\n # Check if zipped file already exist and ask the user if he want to remove it\n if file_or_directory_exist(arguments[size_of_command - 1] + \".zip\"):\n answer = \"\"\n while answer != \"yes\" and answer != \"no\":\n answer = input(\"The file you put as output file already exist do you want to overwrite it ?(yes/no)\")\n if answer == \"yes\":\n os.remove(arguments[- 1] + \".zip\")\n else:\n print(\"If you don't want to delete the existing file please enter a different name\")\n exit()\n # We'll need the name to create a temporary directory so we check if it exist first\n if os.path.isdir(arguments[- 1]):\n print(\"Please enter another name for the output file\")\n # Verify if the file(s) in input exists\n for i in range(5, size_of_command - 2):\n if not file_or_directory_exist(arguments[i]):\n print(\n \"The files you enter to cipher or decipher must exists.\\nThe Command as to be like this: python3 Crypto_main.py \"\n \"–enc|-dec –key F...F(128bits) –in <input file(s)> -out <output file>\\nAnd there is no need to put a '.zip' at the end of the output file\")\n print(\"There is at least one error for the \" + str(i - 4) + \"st/nd/th file\")\n exit()\n else:\n print(\"The file already exist\")\n if arguments[1] == \"-enc\":\n return True\n else:\n return False\n\n\ndef create_zip(zip_file_name, files):\n # Try to Create Directory to zip all the file together\n if file_or_directory_exist(zip_file_name):\n answer = \"\"\n while answer != \"yes\" and answer != \"no\":\n answer = input(\"This name is needed to create a temporary directory, do you want to delete it anyway (yes/no)\")\n if answer == \"yes\":\n shutil.rmtree(zip_file_name)\n else:\n print(\"You must enter a different name of file for your zip file \")\n try:\n os.mkdir(zip_file_name, 0o755)\n except OSError:\n print(\"Creation of the directory %s failed\" % zip_file_name)\n exit()\n\n if len(files[0]) == 2:\n # Iterate in an array of type {[filePath1, bytes of ciphered File1,iv],...}\n for file_resources in files:\n # Create file inside the newly created directory\n with open(zip_file_name + \"/\" + get_file_name(file_resources[0][0:-4]), 'wb') as file_deciphered:\n file_deciphered.write(file_resources[1])\n file_deciphered.close()\n\n elif len(files[0]) == 3:\n dict_for_json = files.pop()\n if file_or_directory_exist(get_dir_name(zip_file_name) + \"/iv.json\"):\n with open(get_dir_name(zip_file_name) + \"/iv.json\", 'rb') as jsonFile:\n dict_for_json = json.loads(jsonFile.read())\n jsonFile.close()\n\n for file_resources in files:\n # Create file inside the newly created directory\n with open(zip_file_name + \"/\" + get_file_name(file_resources[0]) + \".enc\", 'wb') as fileCiphered:\n fileCiphered.write(file_resources[1])\n fileCiphered.close()\n\n dict_for_json[get_file_name(file_resources[0])] = str(list(file_resources[2]))\n # Write the new content of IVs in the json file\n with open(zip_file_name + \"/iv.json\", 'w') as json_file:\n json_file.write(json.dumps(dict_for_json))\n json_file.close()\n # ZIP the directory\n shutil.make_archive(zip_file_name, 'zip', zip_file_name)\n # Remove the directory\n shutil.rmtree(zip_file_name)\n return 0\n\n\n# Create an array of lists, the last file is filled with name of input file, byte_array ciphered or decipher file, and iv\ndef array_files(key, input_files, encrypt):\n list_of_ciphered = []\n # Encrypt all files in input\n if encrypt:\n json_dict = {}\n if file_or_directory_exist(get_dir_name(input_files[0]) + \"/iv.json\"):\n with open(get_dir_name(input_files[0]) + \"/iv.json\", 'r') as jsonFile:\n json_dict = json.loads(jsonFile.read())\n\n for i, fileToCipher in enumerate(input_files):\n list_of_ciphered.append(cipher_file(key, fileToCipher))\n json_dict[get_file_name(fileToCipher) + \".mac\"] = str(list(cmac(key, list_of_ciphered[i][1])))\n list_of_ciphered.append(json_dict)\n return list_of_ciphered\n # Decrypt all files in input thanks to the iv stored in a json file\n else:\n # Open json of IV if it exist\n json_dict = {}\n if file_or_directory_exist(get_dir_name(input_files[0]) + \"/iv.json\"):\n with open(get_dir_name(input_files[0]) + \"/iv.json\", 'r') as jsonFile:\n json_dict = json.loads(jsonFile.read())\n else:\n print(\"The files to decrypt must be in the same directory as the iv.json file\")\n exit()\n list_of_deciphered = []\n for fileToDecipher in input_files:\n list_of_deciphered.append(decipher_file(key, fileToDecipher, bytearray(\n [int(i) for i in json_dict[get_file_name(fileToDecipher[0:-4])].strip('][').split(', ')]), json_dict))\n\n return list_of_deciphered\n"
},
{
"alpha_fraction": 0.5671342611312866,
"alphanum_fraction": 0.5771543383598328,
"avg_line_length": 18.959999084472656,
"blob_id": "febe556f97f30bc1c33e09d86673e5dee24f7f04",
"content_id": "4574100a34f280379356bd8fad6fe9b7c80c7b88",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 998,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 50,
"path": "/SecondaryFunctions.py",
"repo_name": "AntoineGuillard/CryptoProject",
"src_encoding": "UTF-8",
"text": "import os\nfrom pathlib import Path\n\n\n# Check if it is hexadecimal for the key\ndef is_not_hex(s):\n try:\n int(s, 16)\n return False\n except ValueError:\n return True\n\n\n# Check if the file exist\ndef file_or_directory_exist(file_path):\n if Path(file_path).exists():\n return True\n else:\n return False\n\n\ndef directory_exist(path):\n if Path(path).is_dir():\n return True\n else:\n return False\n\n\ndef get_file_name(absolute_path):\n if not os.path.isabs(absolute_path):\n return absolute_path\n # Allow to get the filename without the absolute path\n absolute_path = absolute_path[::-1]\n file_name = \"\"\n i = 0\n while absolute_path[i] != \"/\":\n file_name += absolute_path[i]\n i += 1\n return file_name[::-1]\n\n\ndef get_dir_name(path):\n if not os.path.isabs(path):\n return path\n path = path[::-1]\n i = 0\n while path[i] != \"/\":\n i += 1\n dir_name = path[i:]\n return dir_name[::-1]\n"
},
{
"alpha_fraction": 0.6515921354293823,
"alphanum_fraction": 0.6626192331314087,
"avg_line_length": 37.41904830932617,
"blob_id": "f1b0fcefbeb039e9fb8648172e99733e6ceb74b5",
"content_id": "8cd766f2263cff396cde689d15c6b057629a188a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8071,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 210,
"path": "/CryptoFunctions.py",
"repo_name": "AntoineGuillard/CryptoProject",
"src_encoding": "UTF-8",
"text": "from Crypto import Random\nfrom Crypto.Cipher import AES\nfrom Crypto.Hash import SHA512\nfrom Crypto.Protocol.KDF import PBKDF2\nfrom cryptography.hazmat.backends import default_backend\nfrom cryptography.hazmat.primitives import cmac\nfrom cryptography.hazmat.primitives.ciphers import algorithms\n\n# noinspection PyTypeChecker\nfrom SecondaryFunctions import get_file_name\n\n\ndef kdf(key):\n salt = b'e\\xd6e\\xfcY\\x0f|\\t\\xa3\\xd2\\x15\\xbe\\x8a\\xa9x\\x8c'\n keys = PBKDF2(key, salt, 64, count=1000000, hmac_hash_module=SHA512)\n return keys[19:35]\n\n\ndef padding(bytes_file):\n if len(bytes_file) % AES.block_size == 0:\n return bytes_file\n size = (len(bytes_file) // AES.block_size) + 1\n bytes_file = bytes_file.ljust(AES.block_size * size, b'\\0')\n return bytes_file\n\n\ndef cipher_file(key, input_file):\n # Open file in reading binary mode and store it inside a variable\n with open(input_file, 'rb') as fileToCipher:\n new_byte_file = bytearray(fileToCipher.read())\n fileToCipher.close()\n\n size_of_file = len(new_byte_file)\n\n # Convert the key to the right format\n bytes_key = bytes.fromhex(key)\n # Create Cipher function\n cipher = AES.new(bytes_key, AES.MODE_ECB)\n\n if size_of_file % AES.block_size == 0:\n return [input_file] + cipher_func(cipher, new_byte_file)\n return [input_file] + cts_cipher(cipher, new_byte_file)\n\n\ndef cipher_func(cipher_, byte_file):\n # Generate IV\n iv = bytearray(Random.new().read(AES.block_size))\n # Select first block of 16 bytes\n block = byte_file[0:AES.block_size]\n # XOR block with iv\n block_xored = bytearray([_a ^ _b for _a, _b in zip(iv, block)])\n # Cipher the result of the precedent XOR\n block_ciphered = bytearray(cipher_.encrypt(block_xored))\n # Store the result in a variable\n bytes_ciphered = block_ciphered\n\n # Repeat the process to create CBC operation mode instead of ECB\n for i in range(1, int(len(byte_file) / AES.block_size)):\n block = byte_file[i * AES.block_size:(i + 1) * AES.block_size]\n block_xor = bytearray([_a ^ _b for _a, _b in zip(block_ciphered, block)])\n block_ciphered_next = cipher_.encrypt(block_xor)\n block_ciphered = block_ciphered_next\n bytes_ciphered += block_ciphered\n return [bytes_ciphered, iv]\n\n\ndef cts_cipher(cipher_method, byte_file):\n # CTS Cipher\n # Store the size of the file\n size_of_file = len(byte_file)\n # First part for cipher in cts is the same so we call the function for file which are multiple of 16 bytes\n classic_cipher = cipher_func(cipher_method, padding(byte_file))\n bytes_ciphered = classic_cipher[0]\n\n # 1 We store the \"n-1\" block in a variable\n block_before_cut = bytes_ciphered[-(AES.block_size * 2):-AES.block_size]\n # 2 Cut to the required size\n block_after_cut = block_before_cut[0:-(AES.block_size - size_of_file % AES.block_size)]\n # 3 This one is the new n-1 block\n before_last = bytes_ciphered[-AES.block_size:]\n\n # The n and n-1 ciphered bytes are removed to be replaced\n bytes_ciphered = bytes_ciphered[0:-AES.block_size * 2]\n\n # Adding the new n-1 block\n bytes_ciphered += before_last\n # Adding the new n block\n bytes_ciphered += block_after_cut\n return_value = [bytes_ciphered, classic_cipher[1]]\n return return_value\n\n\ndef decipher_file(key, input_file, iv, json_dict):\n # Generate IV\n with open(input_file, 'rb') as file_to_decipher:\n byte_file_ciphered = bytearray(file_to_decipher.read())\n file_to_decipher.close()\n if cmac(key, byte_file_ciphered) != bytearray(\n [int(i) for i in json_dict[get_file_name(input_file[0:-4]) + \".mac\"].strip('][').split(', ')]):\n print(\"The file has been altered\\nNow Exiting\")\n exit()\n else:\n print(\"The integrity of the file has been verified\")\n\n size_of_file = len(byte_file_ciphered)\n\n bytes_key = bytes.fromhex(key)\n decipher = AES.new(bytes_key, AES.MODE_ECB)\n # Ciphered Block\n block = bytearray(byte_file_ciphered[0:AES.block_size])\n # Deciphered Before XOR\n block_ciphered_next = bytearray(decipher.decrypt(block))\n # Block Completely Deciphered\n block_xor = bytearray([_a ^ _b for _a, _b in zip(iv, block_ciphered_next)])\n\n bytes_deciphered = block_xor\n\n if size_of_file % AES.block_size == 0:\n # Iterative to decipher the whole file\n bytes_deciphered += decipher_simple(decipher, byte_file_ciphered, block)\n return [input_file] + [bytes_deciphered]\n\n else:\n bytes_deciphered += cts_decipher(decipher, byte_file_ciphered, block)\n # Remove the unnecessary padding at the end of the file\n bytes_deciphered = bytes_deciphered[0:size_of_file]\n return [input_file] + [bytes_deciphered]\n\n\ndef decipher_simple(decipher, byte_file_ciphered, block):\n bytes_deciphered = bytearray()\n for i in range(1, int(len(byte_file_ciphered) / AES.block_size) - 1):\n # Current Ciphered Block\n current_block = byte_file_ciphered[i * AES.block_size:(i + 1) * AES.block_size]\n # Decipher current block\n block_deciphered = decipher.decrypt(current_block)\n block_xor = bytearray([_a ^ _b for _a, _b in zip(block_deciphered, block)])\n block = current_block\n bytes_deciphered += block_xor\n\n return bytes_deciphered\n\n\ndef cts_decipher(decipher, byte_file_ciphered, block_):\n # Initialize variable to later store the n-2 block cipher text to xor with the n-1 block\n to_xor_later = 0\n size_of_file = len(byte_file_ciphered)\n bytes_deciphered = bytearray()\n\n for i in range(1, len(byte_file_ciphered) // AES.block_size - 1):\n # Current Ciphered Block\n current_block = byte_file_ciphered[i * AES.block_size:(i + 1) * AES.block_size]\n # Decipher current block\n\n # Store the cipher text for later\n if i == int(len(byte_file_ciphered) / AES.block_size) - 2:\n to_xor_later = current_block\n\n if i < int(len(byte_file_ciphered) / AES.block_size) - 1:\n # Decipher block\n block_deciphered = decipher.decrypt(current_block)\n # XOR with previous ciphered block\n block_xor = bytearray([_a ^ _b for _a, _b in zip(block_deciphered, block_)])\n # Store current block to XOR with it at the next iteration\n block_ = current_block\n # Add to the global deciphered\n bytes_deciphered += block_xor\n\n # Get the last part of the ciphered file\n byte_file_ciphered = byte_file_ciphered[-(size_of_file % AES.block_size + AES.block_size):]\n\n # 1 Create the block of 16 bytes n-1\n block_ = byte_file_ciphered[0:AES.block_size]\n\n # 2 Decipher the block\n block_ = decipher.decrypt(block_)\n\n # 3 Get last block of the ciphered file\n last_block = byte_file_ciphered[AES.block_size:]\n\n # 4 Store the end of the block\n end_of_block = block_[-(AES.block_size - len(last_block)):]\n\n # 5 Create n-1 block with last bytes of the ciphered file and the end of the n-1 block to get a 16 bytes block\n before_last = last_block + bytearray(end_of_block)\n\n # 6 Decipher the newly created n-1 block\n before_last_deciphered = decipher.decrypt(before_last)\n\n # 7 XOR with the variable we stored earlier to get the \"plain bytes\" of the file\n before_last_plain = bytearray([_a ^ _b for _a, _b in zip(to_xor_later, before_last_deciphered)])\n\n # 8 XOR with n-1 ciphered bytes to get the last clear bytes\n last = bytearray([_a ^ _b for _a, _b in zip(before_last, block_)])\n\n # Add n-1 block to deciphered bytearray\n bytes_deciphered += before_last_plain\n # Add n block to deciphered bytearray\n bytes_deciphered += last\n return bytes_deciphered\n\n\ndef cmac(key, ciphered_bytes):\n # Transform key into binary format\n bytes_key = bytes.fromhex(key)\n # Derive the key to not use the same for encryption and integrity\n derived_key = kdf(bytes_key)\n c = cmac.CMAC(algorithms.AES(derived_key), backend=default_backend())\n c.update(bytes(ciphered_bytes))\n return c.finalize()\n\n\n\n"
}
] | 4 |
Nstelt/Animation-Generator
|
https://github.com/Nstelt/Animation-Generator
|
3479fc53bcbb280b7d571842d434bb5aa894c28a
|
638d09cfee37ab914aacf268d1c321945f85ffa0
|
3f9313758f1f2b5b9712e84386362d0a4f8844ea
|
refs/heads/master
| 2020-04-13T03:26:31.552109 | 2019-03-12T16:55:22 | 2019-03-12T16:55:22 | 162,931,926 | 2 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6725333333015442,
"alphanum_fraction": 0.692799985408783,
"avg_line_length": 28.682538986206055,
"blob_id": "bfaf33e481846a45975eb498a3204308d0605068",
"content_id": "7e24dcf5da647bdbad67feefc1b64ac89f1fa46f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1875,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 63,
"path": "/blender_test.py",
"repo_name": "Nstelt/Animation-Generator",
"src_encoding": "UTF-8",
"text": "import bpy \nimport os\nfrom mathutils import Vector\n\n#delete cube\nbpy.ops.object.delete() \n\n\nnamed_model_folder = os.listdir(\"/home/nolan/Desktop/projects/Animation-Generator/model\")[0]\t\n\nfile_loc = os.getcwd() + \"/model/\" + named_model_folder\n\nfor file in os.listdir(file_loc):\n\tif file.endswith(\".obj\"):\n\t\tbreak\n\nfile_loc_obj = file_loc + \"/\" + file\n\nimported_object = bpy.ops.import_scene.obj(filepath=file_loc_obj)\nobj_object = bpy.context.selected_objects[0] ####<--Fix\n\n\nfor area in bpy.context.screen.areas: # iterate through areas in current screen\n if area.type == 'VIEW_3D':\n for space in area.spaces: # iterate through spaces in current VIEW_3D area\n if space.type == 'VIEW_3D': # check if space is a 3D view\n space.viewport_shade = 'MATERIAL' # set the viewport shading to rendered\n \n\nscene = bpy.context.scene\nscene.frame_end = 50\n\nbpy.context.scene.objects.active = obj_object\nkule = bpy.context.object\nkostka = scene.objects[0]\n\nbpy.context.scene.render.image_settings.file_format = 'AVI_JPEG'\n\n# start with frame 0\nnumber_of_frame = 0 \npositions = (2,0,0.5),(2,0,1),(2,0,1.5),(2,0,2),(2,0,2.5)\n\nfor pozice in positions:\n\n # now we will describe frame with number $number_of_frame\n scene.frame_set(number_of_frame)\n\n # set new location for sphere $kule and new rotation for cube $kostka\n kule.location = (0,0,0)#pozice\n kule.keyframe_insert(data_path=\"location\", index=-1)\n\n kostka.rotation_euler = pozice\n kostka.keyframe_insert(data_path=\"rotation_euler\", index=-1)\n\n # move next 10 frames forward - Blender will figure out what to do between this time\n number_of_frame += 10\n \n \nbpy.context.scene.render.filepath = \"/home/nolan/Desktop/projects/Animation-Generator/render/\"\n\nbpy.ops.render.render(animation = True, use_viewport = True)\n \n#bpy.ops.render.play_rendered_anim() \n\n\n\n\n"
},
{
"alpha_fraction": 0.719671368598938,
"alphanum_fraction": 0.7216046452522278,
"avg_line_length": 29.41176414489746,
"blob_id": "2f430531679ae1693516d3ea6b7dcde7bf44a19f",
"content_id": "db3f38667c9f2e14841228847670f90fc82d53a5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2069,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 68,
"path": "/themodelresouce_downloader.py",
"repo_name": "Nstelt/Animation-Generator",
"src_encoding": "UTF-8",
"text": "import time\nimport urllib\nimport zipfile \nimport os\nimport subprocess\nimport sys\n\nfrom selenium import webdriver\nfrom selenium.webdriver.common.keys import Keys\nfrom selenium.webdriver.firefox.firefox_profile import FirefoxProfile\n\n#the following stops the loading of un-needed webpage content \n\n#get the Firefox profile object\nfirefoxProfile = FirefoxProfile()\n#disable CSS\nfirefoxProfile.set_preference('permissions.default.stylesheet', 2)\n#disable images\nfirefoxProfile.set_preference('permissions.default.image', 2)\n#disable Flash\nfirefoxProfile.set_preference('dom.ipc.plugins.enabled.libflashplayer.so',\n 'false')\n \n#create broswer instance and open model site \nbrowser = webdriver.Firefox(firefoxProfile)\nbrowser.get('https://www.models-resource.com/')\n\n#get the model search term from program args as a string\nsearch_term = ' '.join(sys.argv[1:])\n\nsearch = browser.find_element_by_name('q')\nsearch.send_keys(search_term)\nsearch.send_keys(Keys.RETURN)\n\n#wait for search results to load\ntime.sleep(5)\n\n#now we are at the search result page \n#for now, just grab the first model link\nelems = browser.find_elements_by_xpath(\"//a[@href]\")\nmodel_link = \"\"\nfor elem in elems:\n curr_link = elem.get_attribute(\"href\")\n if \"/model/\" in curr_link: \n \tmodel_link = curr_link \n \tbreak\n \t\n#print \"link to model: \" + model_link\n\nbrowser.get(model_link)\n\ndownload = browser.find_element_by_link_text(\"Download this Model\")\n\ndownload_url = download.get_attribute(\"href\")\n\n#donwnload model zip\nurllib.urlretrieve(download_url, '/home/nolan/Desktop/projects/Animation-Generator/model.zip') \n\n#extract zip into model folder\nwith zipfile.ZipFile('/home/nolan/Desktop/projects/Animation-Generator/model.zip', 'r') as zip_ref:\n zip_ref.extractall('/home/nolan/Desktop/projects/Animation-Generator/model')\n\n#remove zip\nos.remove('/home/nolan/Desktop/projects/Animation-Generator/model.zip')\n\n#subprocess.call([\"blender\", \"--python_test.py\"])\n\nbrowser.quit()\n\n"
},
{
"alpha_fraction": 0.7622377872467041,
"alphanum_fraction": 0.7692307829856873,
"avg_line_length": 34.75,
"blob_id": "a928c045d621bf14e05895aaf00daab5e2dda9f1",
"content_id": "ca7a0cde2b6575065deba43b68aed34abc5c8191",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 143,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 4,
"path": "/README.md",
"repo_name": "Nstelt/Animation-Generator",
"src_encoding": "UTF-8",
"text": "# Animation-Generator\n\n** Unfinished **\nProgram to scrape random 3d models from the web, load them into blender, then animate and render them.\n"
}
] | 3 |
kira-youshikage/zhengqi
|
https://github.com/kira-youshikage/zhengqi
|
fcdd8bc03b06ae4cbf0aa2933df42e606b184be0
|
f46e2f2ca878365788ff393884d5b0ec3d259a2b
|
34767a8686aa08afe875ef4e399ccfd2ee319794
|
refs/heads/master
| 2020-05-24T10:32:27.205046 | 2019-05-18T06:53:58 | 2019-05-18T06:53:58 | 187,229,288 | 1 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6536412239074707,
"alphanum_fraction": 0.6678507924079895,
"avg_line_length": 31,
"blob_id": "d91af6507e90e3b87fa71df368db76be19cf7920",
"content_id": "82d9f64c55993aa1921c4af595e308c3376d3c63",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 563,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 17,
"path": "/工业蒸汽量预测/utils/plot.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "import numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\nfrom config import DefaultConfig\r\n\r\nopt = DefaultConfig()\r\n\r\ndef distribution_view(train_data, validation_data, test_data):\r\n\ttrain = np.row_stack((train_data, validation_data))\r\n\tn = train_data.shape[1]\r\n\tplt.figure(1)\r\n\tplt_number = [5,8,0]\r\n\tfor i in range(n):\r\n\t\tplt.subplot(plt_number[0], plt_number[1], i + 1)\r\n\t\tsns.distplot(train[:, i], hist = False, rug = False,label = 'train' + str(i))\r\n\t\tsns.distplot(test_data[:,i], hist = False, rug = False,label = 'test')\r\n\tplt.show()\r\n\r\n"
},
{
"alpha_fraction": 0.7923076748847961,
"alphanum_fraction": 0.7923076748847961,
"avg_line_length": 30.5,
"blob_id": "36bc683bab6205a5a2e52035708494f35bb86aec",
"content_id": "7c0f5e7e4d9a4a46d6435705a436f28ab079058d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 260,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 8,
"path": "/工业蒸汽量预测/model/__init__.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "from .sklearn_mlp import sklearn_mlp\r\n\r\nfrom .torch_mlp import torch_mlp\r\nfrom .torch_mlp import torch_train\r\nfrom .torch_mlp import torch_predict\r\n\r\nfrom .regression_analysis import Linear_model_build\r\nfrom .regression_analysis import polynomial_model_build\r\n"
},
{
"alpha_fraction": 0.6055470108985901,
"alphanum_fraction": 0.6178736686706543,
"avg_line_length": 20.379310607910156,
"blob_id": "c7e89ccc82e2e2b5356174dddea0943dd2d4ae79",
"content_id": "873c40f2cd06626a2f17448137be5338093437da",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1512,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 58,
"path": "/工业蒸汽量预测/ANOVA.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "#方差分析\r\n\"\"\"\r\n第一次修改内容:\r\n\t代码规范化\r\n第二次修改内容:\r\n\t数据名字通化\r\n\"\"\"\r\nfrom statsmodels.formula.api import ols\r\nfrom statsmodels.stats.anova import anova_lm\r\nfrom statsmodels.stats.multicomp import pairwise_tukeyhsd\r\nfrom scipy import stats\r\nimport pandas as pd\r\nimport numpy as np\r\n'''\r\ndef ANOVA(x):\r\n\t#获得数据矩阵的行数和列数\r\n\tsize_r,size_c = x.shape\r\n\t#给每一列命名,从A开始\r\n\tname = []\r\n\tfor i in range(size_c):\r\n\t\ta = 'A' + str(i)\r\n\t\tname.append(a)\r\n\t#原数据矩阵x转DataFrame,列索引是A,B,C……\r\n\tdf = pd.DataFrame(x,columns = name)\r\n\tformula = name[size_c-1] + '~'\r\n\tfor i in range(size_c - 2):\r\n\t\tformula =formula +'C('+ name[i] + ')+'\r\n\tformula = formula+'C(' + name[size_c - 2]+')'\r\n\t#print(formula)\r\n\tanova_results = anova_lm(ols(formula,df).fit())\r\n\treturn anova_results\r\n'''\r\ndef ANOVA(x):\r\n\t#获得数据矩阵的行数和列数\r\n\tsize_r,size_c = x.shape\r\n\t#给每一列命名,从A开始\r\n\tname = []\r\n\tfor i in range(size_c):\r\n\t\ta = 'A' + str(i)\r\n\t\tname.append(a)\r\n\t#原数据矩阵x转DataFrame,列索引是A,B,C……\r\n\tdf = pd.DataFrame(x,columns = name)\r\n\tformula = name[size_c-1] + '~'\r\n\tfor i in range(size_c - 2):\r\n\t\tformula =formula + name[i] + '+'\r\n\tformula = formula + name[size_c - 2]\r\n\t#print(formula)\r\n\tanova_results = anova_lm(ols(formula,df).fit())\r\n\treturn anova_results\r\n\r\n'''\r\n#以下为测试用\r\ndef test():\r\n\tx=np.mat([[1,2],[2,3],[2,2],[1,3],[1,5]])\r\n\tanova_results=ANOVA(x)\r\n\r\ntest()\r\n'''\r\n"
},
{
"alpha_fraction": 0.717322826385498,
"alphanum_fraction": 0.7204724550247192,
"avg_line_length": 37.75,
"blob_id": "8b6e61e4b69fd5aa205af839c264786157f758c2",
"content_id": "20212eccf2cf893960f8c802ea4fbd532a984200",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1270,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 32,
"path": "/工业蒸汽量预测/model/sklearn_mlp.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "import numpy as np\r\nfrom sklearn.neural_network import MLPClassifier\r\nfrom sklearn.neural_network import MLPRegressor\r\nfrom sklearn.metrics import mean_squared_error\r\nfrom config import DefaultConfig\r\n\r\nopt = DefaultConfig()\r\n\r\nclass sklearn_mlp():\r\n\tdef __init__(self, solver = opt.sklearn_solver, alpha = opt.sklearn_alpha, \r\n\t\t\t\thidden_layer_sizes = opt.sklearn_hidden_layer_sizes, \r\n\t\t\t\trandom_state = opt.sklearn_random_state, activation = opt.sklearn_activation):\r\n\t\tself.mlp = MLPRegressor(solver = solver, alpha = alpha, \r\n\t\t\thidden_layer_sizes = hidden_layer_sizes, random_state = random_state, \r\n\t\t\tactivation=activation)\r\n\r\n\tdef train(self, train_data, train_label, validation_data, validation_label):\r\n\t\tself.mlp.fit(train_data, train_label)\r\n\t\tloss = self.get_loss(validation_data, validation_label)\r\n\t\tprint(\"loss = \",end = '\\t')\r\n\t\tprint(loss)\r\n\r\n\tdef get_loss(self, validation_data, validation_label):\r\n\t\tpredict_y = self.mlp.predict(validation_data)\r\n\t\tpredict_y = predict_y.reshape([validation_data.shape[0], 1])\r\n\t\tloss = mean_squared_error(predict_y, validation_label)\r\n\t\treturn loss\r\n\r\n\tdef predict(self, test_x):\r\n\t\tpredict_np = np.array(self.mlp.predict(test_x))\r\n\t\tpredict_np = np.reshape(predict_np, [predict_np.shape[0],1])\r\n\t\treturn predict_np"
},
{
"alpha_fraction": 0.6221122145652771,
"alphanum_fraction": 0.632013201713562,
"avg_line_length": 17.483871459960938,
"blob_id": "072129bafa9204854d869844a7920017334fce55",
"content_id": "fff012fd95e95b01b64f05ac9ed2adb4d8f99a81",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 946,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 31,
"path": "/工业蒸汽量预测/txt.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "'''\r\n包含了对txt的读取和写入功能\r\n例如read_data('读取测试.txt')\r\nwrite_data中的参数是矩阵\r\n'''\r\n\"\"\"\r\n第一次修改内容:\r\n 代码规范化\r\n\"\"\"\r\nimport numpy as np\r\n\r\ndef read_data(ID):\r\n\tf = open(ID,\"r\") #设置文件对象\r\n\tstr_str = f.read()\r\n\tf.close() #关闭文件\r\n\treturn str_str\r\n\r\n#写入数据\r\n#如果在写之前先读取一下文件,再进行写入,则写入的数据会添加到文件末尾而不会替换掉原先的文件。\r\n#这是因为指针引起的,r+ 模式的指针默认是在文件的开头,如果直接写入,则会覆盖源文件,\r\n#通过read() 读取文件后,指针会移到文件的末尾,再写入数据就不会有问题了。\r\ndef write_data(ID,data_mat):\r\n\tf2 = open(ID,'wt+')\r\n\tsize_r,size_c = data_mat.shape\r\n\tfor i in range(size_r):\r\n\t\tfor j in range(size_c):\r\n\t\t\tf2.read()\r\n\t\t\tf2.write(str(data_mat[i,j]))\r\n\t\tf2.read()\r\n\t\tf2.write('\\n')\r\n\tf2.close()\r\n\r\n"
},
{
"alpha_fraction": 0.6618704795837402,
"alphanum_fraction": 0.6759712100028992,
"avg_line_length": 26.260162353515625,
"blob_id": "4e800701f610ea11ea7702fe22e66a68697e0586",
"content_id": "18df410c0f9771b686d0a9fc9d6647a070f21491",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3821,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 123,
"path": "/工业蒸汽量预测/model/regression_analysis.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "#回归分析\r\n\"\"\"\r\n第一次修改内容:\r\n 代码规范化\r\n\"\"\"\r\nfrom sklearn.linear_model import LinearRegression\r\nfrom sklearn.metrics import mean_squared_error\r\nimport matplotlib.pyplot as plt\r\nfrom sklearn.preprocessing import PolynomialFeatures\r\nimport numpy as np\r\nimport math\r\nfrom config import DefaultConfig\r\n\r\nopt = DefaultConfig()\r\n\r\n#模型评估指标(均方差)\r\ndef MSE(test_target, predict_target):\r\n\tmes = mean_squared_error(test_target, predict_target)\r\n\tprint(\"mse = %0.2f\"%(mes))\r\n\treturn mes\r\n\r\n#模型评估指标(残差图)\r\ndef plot_residual(real_y,predicted_y):\r\n\tplt.cla()\r\n\tplt.xlabel(\"Predicted Y\")\r\n\tplt.ylabel(\"Residual\")\r\n\tplt.title(\"Residual Plot\")\r\n\tplt.figure(1)\r\n\tdiff = real_y - predicted_y\r\n\tplt.plot(predicted_y,diff,'go')\r\n\tplt.show()\r\n\r\n#查看模型系数\r\ndef model_coef_view(model):\r\n\tmodel_coef = model.coef_\r\n\tprint(model_coef)\r\n\treturn model_coef\r\n\r\n#模型测试函数\r\ndef model_test(test_x, test_y,model):\r\n\tpredict_y = model.predict(test_x)\r\n\t#输出均方差\r\n\tmse=MSE(test_y, predict_y)\r\n\t#作残差图\r\n\tplot_residual(test_y, predict_y)\r\n\t#模型的R方\r\n\tprint(\"模型的R方是:%0.3f\" %model.score(test_x, test_y))\r\n\r\n#线性回归模型建立函数\r\ndef Linear_model_build(train_x, train_y):\r\n\tmodel = LinearRegression(normalize=opt.linear_normalize, fit_intercept=True)\r\n\tmodel.fit(train_x, train_y)\r\n\treturn model\r\n\r\n#多项式回归模型建立函数,二次函数拟合的时候:features=2\r\ndef polynomial_model_build(train_x, test_x, predict_x, train_y, features = opt.features):\r\n\tpoly_features = PolynomialFeatures(features)\r\n\tpoly_features.fit(train_x)\r\n\t#训练集的特征变换\r\n\tpoly_train_x = poly_features.transform(train_x)\r\n\t#测试集的特征变换\r\n\tpoly_test_x = poly_features.transform(test_x)\r\n\t#预测集的特征变换\r\n\tpoly_predict_x = poly_features.transform(predict_x)\r\n\tpoly_model = Linear_model_build(poly_train_x, train_y)\r\n\treturn poly_model, poly_train_x, poly_test_x, poly_predict_x\r\n\r\n#指数回归模型建立函数\r\ndef exp_model_build(train_x, test_x, predict_x, train_y):\r\n\t#训练集的特征变换\r\n\tsize_r,size_c = train_x.shape\r\n\texp_train_x = np.zeros((size_r, size_c))\r\n\tfor i in range(size_r):\r\n\t\tfor j in range(size_c):\r\n\t\t\texp_train_x[i,j] = math.exp(train_x[i,j])\r\n\t#测试集的特征变换\r\n\tsize_r,size_c = test_x.shape\r\n\texp_test_x = np.zeros((size_r,size_c))\r\n\tfor i in range(size_r):\r\n\t\tfor j in range(size_c):\r\n\t\t\texp_test_x[i,j] = math.exp(test_x[i,j])\r\n\t#预测集的特征变换\r\n\tsize_r,size_c = predict_x.shape\r\n\texp_predict_x = np.zeros((size_r,size_c))\r\n\tfor i in range(size_r):\r\n\t\tfor j in range(size_c):\r\n\t\t\texp_predict_x[i,j] = math.exp(predict_x[i,j])\r\n\texp_model=Linear_model_build(exp_train_x,train_y)\r\n\treturn exp_model,exp_train_x,exp_test_x,exp_predict_x\r\n\r\n'''\r\n#多项式回归测试函数\r\nif __name__ == '__main__':\r\n\ttrain_x = np.random.rand(10,3)\r\n\ttrain_y = np.random.rand(10,1)\r\n\ttest_x = np.random.rand(5,3)\r\n\ttest_y = np.random.rand(5,1)\r\n\tpredict_x = np.random.rand(5,3)\r\n\tpoly_model, poly_train_x, poly_test_x, poly_predict_x = \\ \r\n\t\tpolynomial_model_build(train_x,test_x,predict_x,train_y,2)\r\n\tmodel_test(poly_test_x,test_y,poly_model)\r\n\tprint(poly_train_x)\r\n\tprint(train_x)\r\n\tmodel_coef_view(poly_model)\r\n若train_x=np.mat([[x1,x2,x3]]),features=2\r\n则poly_train_x=np.mat([[1,x1,x2,x3,x1*x1,x1*x2,x1*x3,x2*x3,x3*x3]])\r\n'''\r\n\r\n'''\r\n#指数回归测试函数\r\nif __name__ == '__main__':\r\n\ttrain_x = np.random.rand(10,3)\r\n\ttrain_y = np.random.rand(10,1)\r\n\ttest_x = np.random.rand(5,3)\r\n\ttest_y = np.random.rand(5,1)\r\n\tpredict_x = np.random.rand(5,3)\r\n\texp_model, exp_train_x, exp_test_x, exp_predict_x = \\ \r\n\t\texp_model_build(train_x,test_x,predict_x,train_y)\r\n\tmodel_test(exp_test_x,test_y,exp_model)\r\n\tprint(exp_train_x)\r\n\tprint(train_x)\r\n\tmodel_coef_view(exp_model)\r\n'''\t"
},
{
"alpha_fraction": 0.6597346663475037,
"alphanum_fraction": 0.6694155335426331,
"avg_line_length": 36.15068435668945,
"blob_id": "4f92a8ab03b9dcd7b2896ff021cd503e68fc789e",
"content_id": "e7b36104e119d1a19bab850ffbe87bb680e2bb53",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2813,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 73,
"path": "/工业蒸汽量预测/model/torch_mlp.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "import torch as pt\r\nimport numpy as np\r\nimport torch.utils.data\r\nfrom config import DefaultConfig\r\n\r\nopt = DefaultConfig()\r\n\r\nclass torch_mlp(pt.nn.Module):\r\n\tdef __init__(self, train_data_nc, train_label_nc):\r\n\t\tsuper(torch_mlp, self).__init__()\r\n\t\tself.fc1 = pt.nn.Linear(train_data_nc, 47)\r\n\t\tself.fc2 = pt.nn.Linear(47, 11)\r\n\t\tself.fc3 = pt.nn.Linear(11, 10)\r\n\t\tself.fc4 = pt.nn.Linear(10, train_label_nc)\r\n\r\n\tdef forward(self, x):\r\n\t\tx = pt.nn.functional.relu(self.fc1(x))\r\n\t\tx = pt.nn.functional.dropout(x, p = opt.torch_p_1, training = self.training)\r\n\t\tx = pt.nn.functional.relu(self.fc2(x))\r\n\t\tx = pt.nn.functional.dropout(x, p = opt.torch_p_2, training = self.training)\r\n\t\tx = pt.nn.functional.relu(self.fc3(x))\r\n\t\tx = pt.nn.functional.dropout(x, p = opt.torch_p_3, training = self.training)\r\n\t\tx = self.fc4(x)\r\n\t\treturn x\r\n\r\ndef torch_train(train_data, train_label, validation_data, validation_label, \r\n\t\t optimizer = opt.torch_optimizer,learning_rate = opt.torch_learning_rate, \r\n\t\t momentum = opt.torch_moment, batch_size = opt.torch_batch_size, \r\n\t\t net = None, epoch_time = opt.torch_epoch_time):\r\n\ttrain_data_nc = train_data.shape[1]\r\n\ttrain_label_nc = train_label.shape[1]\r\n\tif net == None:\r\n\t\tnet = torch_mlp(train_data_nc, train_label_nc)\r\n\tif optimizer == 'Momentum':\r\n\t\toptimizer = pt.optim.SGD(net.parameters(), \r\n\t\t\t\t\t\t\t\tlr = learning_rate, momentum = momentum)\r\n\telif optimizer == 'Adam':\r\n\t\toptimizer = pt.optim.Adam(net.parameters(), lr = learning_rate)\r\n\telif optimizer == 'SGD':\r\n\t\toptimizer = pt.optim.SGD(net.parameters(), lr = learning_rate)\r\n\telse:\r\n\t\tprint(\"优化器参数输入错误\")\r\n\t\treturn\r\n\ttrain_data = pt.autograd.Variable(pt.from_numpy(train_data))\r\n\ttrain_label = pt.autograd.Variable(pt.from_numpy(train_label))\r\n\tvalidation_data = pt.from_numpy(validation_data)\r\n\tvalidation_label = pt.from_numpy(validation_label)\r\n\ttorch_dataset = torch.utils.data.TensorDataset(train_data, train_label)\r\n\ttrain_loader = torch.utils.data.DataLoader(torch_dataset,\r\n\t\tbatch_size = batch_size, shuffle = True)\r\n\tLoss = pt.nn.MSELoss()\r\n\tfor i in range(epoch_time):\r\n\t\tnet.train()\r\n\t\tfor batch_idx, (data, label) in enumerate(train_loader):\r\n\t\t\tdata = data.reshape(-1, train_data_nc)\r\n\t\t\tpredict = net(data)\r\n\t\t\tloss = Loss(predict.float(), label.float())\r\n\t\t\toptimizer.zero_grad()\r\n\t\t\tloss.backward()\r\n\t\t\toptimizer.step()\r\n\t\tif i % opt.torch_time == 0:\r\n\t\t\tprint(str(i) + \"\\t\" + \"epoch:\")\r\n\t\t\tprint(\"\\t\" + \"train_loss:\" + str(loss))\r\n\t\t\tpredict = net(validation_data)\r\n\t\t\tloss = Loss(predict.float(), validation_label.float())\r\n\t\t\tprint(\"\\t\" + \"validation_loss:\" + str(loss))\r\n\treturn net\r\n\r\ndef torch_predict(test_data, net):\r\n\ttest_data = pt.autograd.Variable(torch.from_numpy(test_data))\r\n\tpredict = net(test_data)\r\n\tpredict = predict.data.numpy()\r\n\treturn predict\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.5522903800010681,
"alphanum_fraction": 0.6119273900985718,
"avg_line_length": 18.660715103149414,
"blob_id": "b8ee40659bd1bca8bb0fb936b0cb1e5f4754302b",
"content_id": "e5fe677e3a090a1cb8c007c4cf2e7ea2d809ae7d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1293,
"license_type": "no_license",
"max_line_length": 46,
"num_lines": 56,
"path": "/工业蒸汽量预测/config.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "class DefaultConfig(object):\r\n\t# 数据地址\r\n\ttrain_data_address = 'data/zhengqi_train.txt'\r\n\ttest_data_address = 'data/zhengqi_test.txt'\r\n\t# 训练集规模\r\n\ttrain_n = 0.9999\r\n\t# 降维参数(累积贡献度)\r\n\t# 'incrementalPCA' 或 'PCA'\r\n\tpca = 'PCA'\r\n\tpca_n = 31\r\n\t# 标准化参数\r\n\tscale_type = 'min_max'\r\n\t#scale_type = 'z_core'\r\n\t# LOF参数\r\n\tk = 2\r\n\tlof = 30\r\n\t# 方差分析参数F\r\n\tF = 35\r\n\t# 删除特征\r\n\tcharacter = [5, 6,17, 20, 22, 11, 27]\r\n\r\n\t# sklearn_mlp模型参数\r\n\t#sklearn_solver = \"adam\"\r\n\t#sklearn_solver = \"sgd\"\r\n\tsklearn_solver = \"lbfgs\"\r\n\tsklearn_alpha = 0.27\r\n\tsklearn_hidden_layer_sizes = (100)\r\n\t#sklearn_hidden_layer_sizes = (30,20,15,10,5)\r\n\tsklearn_random_state = 1\r\n\t# ('identity', 'logistic', 'tanh', 'relu')\r\n\tsklearn_activation = 'tanh'\r\n\r\n\t# torch_mlp模型参数\r\n\t#torch_p_1 = 0.5\r\n\t#torch_p_2 = 0.5\r\n\t#torch_p_3 = 0.5\r\n\ttorch_p_1 = 0\r\n\ttorch_p_2 = 0\r\n\ttorch_p_3 = 0\r\n\t#torch_optimizer = 'Momentum'\r\n\ttorch_moment = 0.5\r\n\ttorch_optimizer = 'SGD'\r\n\t#torch_optimizer = 'Adam'\r\n\ttorch_learning_rate = 0.01\r\n\ttorch_batch_size = 200\r\n\ttorch_epoch_time = 180\r\n\ttorch_time = 10\r\n\r\n\t# 回归分析参数\r\n\t#多项式回归参数\r\n\tfeatures = 1\r\n\t#线性模型标准化\r\n\tlinear_normalize = True\r\n\r\n\t# 工具参数\r\n\ttxt_path = 'utils/test_label.txt'\r\n"
},
{
"alpha_fraction": 0.6367006301879883,
"alphanum_fraction": 0.6524559855461121,
"avg_line_length": 21.955554962158203,
"blob_id": "625354bee63895524a32e7561cc424d9c9ef15d5",
"content_id": "96cd5b15668cf70342429a8ccbe1dd931df62799",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3909,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 135,
"path": "/工业蒸汽量预测/LOF.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "'''\r\n利用局部异常因子(LOF)来找出异常点\r\n'''\r\n\"\"\"\r\n第一次修改内容:\r\n 代码规范化\r\n\"\"\"\r\n#准备工作\r\nfrom collections import defaultdict\r\nimport numpy as np\r\nimport xlrd\r\nimport xlwt\r\n\r\nimport os \r\nimport matplotlib.pyplot as plt\r\nimport heapq\r\nfrom sklearn.metrics import pairwise_distances\r\n\r\n#数据点\r\ninstances = np.matrix([[0,0],[0,1],[1,1],[1,0],[5,0]])\r\n\r\n\r\n#计算K距离邻居:\r\ndef all_indices(value,inlist):\r\n\tout_indices = []\r\n\tidx = -1\r\n\twhile True:\r\n\t\ttry:\r\n\t\t\tidx = inlist.index(value,idx + 1)\r\n\t\t\tout_indices.append(idx)\r\n\t\texcept ValueError:\r\n\t\t\tbreak\r\n\treturn out_indices\r\n\r\ndef LOF(instances,k):\r\n\t#操作方法\r\n\t#获取点两两之间的距离pairwise_distances\r\n\tk = k\r\n\tdistance = 'manhattan'\r\n\tdist = pairwise_distances(instances,metric=distance)\r\n\t#计算K距离,使用heapq来获得K最近邻\r\n\t#计算K距离\r\n\tk_distance = defaultdict(tuple)\r\n\t#对每个点进行计算\r\n\tfor i in range(instances.shape[0]):\r\n\t\t#获得它和其他所有点之间的距离\r\n\t\t#为了方便,将数组转为列表\r\n\t\tdistances = dist[i].tolist()\r\n\t\t#获得K最近邻\r\n\t\tksmallest = heapq.nsmallest(k + 1,distances)[1:][k - 1]\r\n\t\t#获取它们的索引号\r\n\t\tksmallest_idx = distances.index(ksmallest)\r\n\t\t#记录下每个点的第K给最近邻以及到它的距离\r\n\t\tk_distance[i] = (ksmallest,ksmallest_idx)\r\n\t#计算K距离邻居\r\n\tk_distance_neig = defaultdict(list)\r\n\t#对每个点进行计算\r\n\tfor i in range(instances.shape[0]):\r\n\t\t#获取它到所有邻居点的距离\r\n\t\tdistances = dist[i].tolist()\r\n\t\t#print(\"k distance neighbourhood\",i)\r\n\t\t#print(distances)\r\n\t\t#获取从第1到第K的最近邻\r\n\t\tksmallest = heapq.nsmallest(k + 1,distances)[1:]\r\n\t\t#print(ksmallest)\r\n\t\tksmallest_set = set(ksmallest)\r\n\t\t#print(ksmallest_set)\r\n\t\tksmallest_idx = []\r\n\t\t#获取K里最小的元素的索引号\r\n\t\tfor x in ksmallest_set:\r\n\t\t\tksmallest_idx.append(all_indices(x,distances))\r\n\t\t#将列表的列表转为列表\r\n\t\tksmallest_idx = [item for sublist in ksmallest_idx for item in sublist]\r\n\t\t#对每个点保存其K距离邻居\r\n\t\tk_distance_neig[i].extend(zip(ksmallest,ksmallest_idx))\r\n\t#计算可达距离和LRD:\r\n\t#局部可达密度\r\n\tlocal_reach_density = defaultdict(float)\r\n\tfor i in range(instances.shape[0]):\r\n\t\t#LRD的分子,K距离邻居的个数\r\n\t\tno_neighbours = len(k_distance_neig[i])\r\n\t\tdenom_sum = 0\r\n\t\t#可达距离求和\r\n\t\tfor neigh in k_distance_neig[i]:\r\n\t\t\t#P的K距离和P与Q的距离中的最大者\r\n\t\t\tdenom_sum += max(k_distance[neigh[1]][0],neigh[0])\r\n\t\tlocal_reach_density[i] = no_neighbours / (1.0 * denom_sum)\r\n\t#计算LOF\r\n\tlof_list=[]\r\n\t#计算局部异常因子\r\n\t#越接近1说明p的其邻域点密度差不多,p可能和邻域同属一簇;越大于1,说明p的密度小于其邻域点密度,p越可能是异常点。 \r\n\tfor i in range(instances.shape[0]):\r\n\t\tlrd_sum = 0\r\n\t\trdist_sum = 0\r\n\t\tfor neigh in k_distance_neig[i]:\r\n\t\t\tlrd_sum += local_reach_density[neigh[1]]\r\n\t\t\trdist_sum += max(k_distance[neigh[1]][0],neigh[0])\r\n\t\tlof_list.append((i,lrd_sum*rdist_sum))\r\n\treturn lof_list\r\n\r\n\r\n\r\n'''\r\nif __name__ == '__main__':\r\n\t#数据点\r\n\tdata = get_excel_data()\r\n\tk = 2\r\n\tlof_list = LOF(data,k)\r\n\t#x,y = get_x_y(lof_list)\r\n\t#plt.plot(y,'ro')\r\n\t#plt.show()\r\n\t#判断删除哪些点\r\n\tcounter = 0\r\n\tsize_r,size_c = data.shape\r\n\tfor i in range(size_r):\r\n\t\tif lof_list[i][1]>5:\r\n\t\t\tcounter = counter+1\r\n\t\t\tdata[i,0] = 999\r\n\tprint(counter / size_r)\r\n\tfor i in range(counter):\r\n\t\tfor j in range(len(data)):\r\n\t\t\tif (data[j,0] == '999'):\r\n\t\t\t\tdata = np.delete(data,j,axis = 0)\r\n\t\t\t\tbreak\r\n\tprint(size_r)\r\n\tprint(counter)\r\n\t#将数据写入Excel\r\n\tsize_r,size_c = data.shape\r\n\tworkbook = xlwt.Workbook(encoding = 'ascii')\r\n\tworksheet = workbook.add_sheet('结果')\r\n\tfor i in range(size_r):\r\n\t\tfor j in range(size_c):\r\n\t\t\tworksheet.write(i+1, j, label = data[i,j])\r\n\tworkbook.save('排除异常点结果.xls')\r\n'''\r\n \r\n"
},
{
"alpha_fraction": 0.7142857313156128,
"alphanum_fraction": 0.7236024737358093,
"avg_line_length": 22.615385055541992,
"blob_id": "e32f7ce04edb7f25a4532448ee29feb18ea9b9b3",
"content_id": "6c86d0a89374b3002111b0ca6c94c1b994911144",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 746,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 26,
"path": "/工业蒸汽量预测/normalization.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "'''\r\n两种数据标准化方法,min_max_scale是0-1标准化,使得数据在0-1区间内\r\nz_core标准化使得数据的均值为0,方差为1\r\n'''\r\n\"\"\"\r\n第一次修改内容:\r\n 代码规范化\r\n\"\"\"\r\nimport numpy as np\r\nimport pandas as pd\r\nfrom sklearn.preprocessing import MinMaxScaler\r\nfrom sklearn.preprocessing import StandardScaler\r\n\r\ndef min_max_scale(data_mat):\r\n\tscaler = MinMaxScaler()\r\n\tscaler.fit(data_mat)\r\n\tscaler.data_max_\r\n\tdata_scale_mat = scaler.transform(data_mat)\r\n\ta = scaler.inverse_transform(data_scale_mat)\r\n\treturn data_scale_mat\r\n\r\ndef z_core(data_mat):\r\n\tscaler = StandardScaler()\r\n\tdata_scale_mat = scaler.fit_transform(data_mat)\r\n\ta = scaler.inverse_transform(data_scale_mat)\r\n\treturn data_scale_mat\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.6557530164718628,
"alphanum_fraction": 0.6656688451766968,
"avg_line_length": 34.87586212158203,
"blob_id": "72c9210c576c3437e8294eb833b8d6c3aafc28cf",
"content_id": "2c329d0fbd982dbcea54e38d7931a41dafd5ec9a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5403,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 145,
"path": "/工业蒸汽量预测/data/dataset.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "import numpy as np\r\nimport txt\r\nimport math\r\nimport normalization\r\nfrom sklearn.decomposition import PCA, IncrementalPCA\r\nimport ANOVA\r\nimport LOF\r\nfrom config import DefaultConfig\r\n\r\nopt = DefaultConfig()\r\n\r\n# 获得清洗后的数据\r\ndef get_data(train = True, test = False):\r\n\tif test == False:\r\n\t\tdata = txt.read_data(opt.train_data_address)\r\n\t\tdata.replace(' ','')\r\n\t\tdata = data.split('\\n')\r\n\t\tn = len(data)-1\r\n\t\tdata_list = []\r\n\t\tfor i in range(n):\r\n\t\t\trow_data = data[i].split('\\t')\r\n\t\t\tdata_list.append(row_data)\r\n\t\tdata_np = np.float32(np.array(data_list[1:][:]))\r\n\t\tnp.random.shuffle(data_np)\r\n\t\tn = math.floor(data_np.shape[0] * opt.train_n)\r\n\t\tif train == True:\r\n\t\t\ttrain_data = np.array(\r\n\t\t\t\tdata_np[:n, :data_np.shape[1] - 1])\r\n\t\t\ttrain_label = np.array(\r\n\t\t\t\tdata_np[:n, data_np.shape[1] - 1])\r\n\t\t\ttrain_label = train_label.reshape([train_data.shape[0], 1])\r\n\t\t\treturn train_data, train_label\r\n\t\tif train == False:\r\n\t\t\tvalidation_data = np.array(\r\n\t\t\t\tdata_np[n:, :data_np.shape[1] - 1])\r\n\t\t\tvalidation_label = np.array(\r\n\t\t\t\tdata_np[n:, data_np.shape[1] - 1])\r\n\t\t\tvalidation_label = validation_label.reshape([validation_data.shape[0], 1])\r\n\t\t\treturn validation_data, validation_label\r\n\telse:\r\n\t\tdata = txt.read_data(opt.test_data_address)\r\n\t\tdata.replace(' ','')\r\n\t\tdata = data.split('\\n')\r\n\t\tn = len(data)-1\r\n\t\tdata_list = []\r\n\t\tfor i in range(n):\r\n\t\t\trow_data = data[i].split('\\t')\r\n\t\t\tdata_list.append(row_data)\r\n\t\ttest_data = np.float32(np.array(data_list[1:][:]))\r\n\t\ttest_label = np.float32(np.arange(test_data.shape[0]))\r\n\t\ttest_label = test_label.reshape([test_data.shape[0], 1])\r\n\t\treturn test_data, test_label\r\n\r\n# 标准化\r\ndef scale(train_data,validation_data, test_data, type = opt.scale_type):\r\n\ttrain_n = train_data.shape[0]\r\n\tvalidation_n = validation_data.shape[0]\r\n\tdata_np = np.row_stack((train_data, validation_data, test_data))\r\n\tif type == 'min_max':\r\n\t\ttransform = normalization.min_max_scale\r\n\telif type == 'z_core':\r\n\t\ttransform = normalization.z_core\r\n\telse:\r\n\t\tprint(\"标准化的类型输入错误\")\r\n\t\treturn\r\n\tdata_scale = transform(data_np)\r\n\ttrain_scale_data = np.float32(data_scale[:train_n, :])\r\n\tvalidation_scale_data = np.float32(\r\n\t\tdata_scale[train_n:train_n + validation_n, :])\r\n\ttest_scale_data = np.float32(\r\n\t\tdata_scale[train_n + validation_n:,:])\r\n\treturn train_scale_data, validation_scale_data, test_scale_data\r\n\r\n# 排除异常点\r\ndef remove(train_data, train_label, k = opt.k):\r\n\ttrain_remove = np.column_stack((train_data, train_label))\r\n\tLOF_list = LOF.LOF(train_remove, k)\r\n\ttrain_data = list(train_data)\r\n\ttrain_label = list(train_label)\r\n\ttrain_remove_data = []\r\n\ttrain_remove_label = []\r\n\tfor i in range(len(LOF_list)):\r\n\t\tif LOF_list[i][1] <= opt.lof:\r\n\t\t\ttrain_remove_data.append(train_data[i][:])\r\n\t\t\ttrain_remove_label.append(train_label[i])\r\n\ttrain_remove_data = np.array(train_remove_data)\r\n\ttrain_remove_label = np.array(train_remove_label)\r\n\ttrain_remove_label = train_remove_label.reshape(\r\n\t\t[train_remove_data.shape[0], 1])\r\n\treturn train_remove_data, train_remove_label\r\n\r\ndef pca(train_data, validation_data, test_data, pca_n = opt.pca_n, pca_1 = opt.pca):\r\n\ttrain_n = train_data.shape[0]\r\n\tvalidation_n = validation_data.shape[0]\r\n\tdata = np.row_stack((train_data, validation_data, test_data))\r\n\tif pca_1 == 'PCA':\r\n\t\tpca_1 = PCA(n_components = pca_n,svd_solver='randomized')\r\n\telif pca_1 == 'incrementalPCA':\r\n\t\tpca_1 = IncrementalPCA(n_components = pca_n)\r\n\tdata = pca_1.fit_transform(data)\r\n\ttrain_PCA_data = np.array(data[:train_n,:])\r\n\tvalidation_PCA_data = np.array(data[train_n:train_n + validation_n,:])\r\n\ttest_PCA_data = np.array(data[train_n + validation_n:,:])\r\n\treturn train_PCA_data, validation_PCA_data, test_PCA_data\r\n\r\n\r\ndef anova(train_data, train_label, validation_data, validation_label, test_data):\r\n\ttrain_n = train_data.shape[0]\r\n\ttrain_nc = train_data.shape[1]\r\n\tdata = np.row_stack((train_data, validation_data))\r\n\tlabel = np.row_stack((train_label, validation_label))\r\n\tdata = np.column_stack((data, label))\r\n\tanova_result = ANOVA.ANOVA(data)\r\n\thead_list = []\r\n\ttrain_anova_data = []\r\n\tvalidation_anova_data = []\r\n\ttest_anova_data = []\r\n\tfor i in range(train_nc):\r\n\t\tindex = 'A' + str(i)\r\n\t\tF = anova_result['F'][index]\r\n\t\tif F > opt.F:\r\n\t\t\tstring = '第' + str(i + 1) + '特征'\r\n\t\t\thead_list.append(string)\r\n\t\t\ttrain_anova_data.append(list(train_data[:,i]))\r\n\t\t\tvalidation_anova_data.append(list(validation_data[:,i]))\r\n\t\t\ttest_anova_data.append(list(test_data[:,i]))\r\n\ttrain_anova_data = np.array(train_anova_data).T\r\n\tvalidation_anova_data = np.array(validation_anova_data).T\r\n\ttest_anova_data = np.array(test_anova_data).T\r\n\treturn train_anova_data, validation_anova_data, test_anova_data\r\n\t#return head_list, train_anova_data, validation_anova_data, test_anova_data\r\n\r\ndef delete_character(train_data, validation_data, test_data, character = opt.character):\r\n\ttrain_n = train_data.shape[0]\r\n\tvalidation_n = validation_data.shape[0]\r\n\tdata = np.row_stack((train_data, validation_data, test_data))\r\n\tdata_list = []\r\n\tfor i in range(data.shape[1]):\r\n\t\tif i not in character:\r\n\t\t\tdata_list.append(data[:, i])\r\n\tdata = np.reshape(np.array(data_list).T, [data.shape[0], data.shape[1] - len(character)])\r\n\ttrain_data = data[:train_n, :]\r\n\tvalidation_data = data[train_n:train_n + validation_n, :]\r\n\ttest_data = data[train_n + validation_n:, :]\r\n\treturn train_data, validation_data, test_data"
},
{
"alpha_fraction": 0.707245409488678,
"alphanum_fraction": 0.7118146419525146,
"avg_line_length": 30.617021560668945,
"blob_id": "9fd5d92495ed97ca2b1ccc93ce3a0f3c211c7c8e",
"content_id": "d5ac0f9b2414acb7fab9fd02c0ac183f70258b05",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3130,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 94,
"path": "/工业蒸汽量预测/main.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "import data\r\nimport model\r\nimport utils\r\nimport numpy as np\r\nfrom sklearn.metrics import mean_squared_error as mse\r\n\r\ntrain_data, train_label = data.get_data(train = True)\r\nvalidation_data, validation_label = data.get_data(train = False, test = False)\r\ntest_data, test_label = data.get_data(train = False, test = True)\r\n\r\n# 查看分布\r\n#utils.distribution_view(train_data, validation_data, test_data)\r\n\r\n# 删除特征\r\ntrain_data, validation_data, test_data = data.delete_character(\r\n\ttrain_data, validation_data, test_data)\r\n\r\n# 排除异常点\r\n#train_data, train_label = data.remove(train_data, train_label)\r\n\r\n# 降维\r\ntrain_data, validation_data, test_data = data.pca(train_data, validation_data, test_data)\r\n\r\n# 方差分析\r\n#train_data, validation_data, test_data = data.anova(train_data, train_label, \r\n#\tvalidation_data, validation_label, test_data)\r\n\r\n# 标准化\r\ntrain_data, validation_data, test_data = data.scale(train_data, validation_data, test_data)\r\n'''\r\nmlp_model = model.sklearn_mlp()\r\nmlp_model.train(train_data, train_label, validation_data, validation_label)\r\nmlp_test_label = mlp_model.predict(test_data)\r\nutils.write_txt(mlp_test_label)\r\npredict = mlp_model.predict(validation_data)\r\n'''\r\n\r\n\r\n'''\r\ntorch_mlp = model.torch_train(train_data, train_label, validation_data, validation_label)\r\ntest_label = model.torch_predict(test_data, torch_mlp)\r\npredict_torch = model.torch_predict(validation_data, torch_mlp)\r\nfor i in range(predict.shape[0]):\r\n\tprint(str(predict[i]) + str(predict_torch[i]) + str(validation_label[i]))\r\n'''\r\n\r\n\r\n\r\n# 线性回归\r\nlinear_model = model.Linear_model_build(train_data, train_label)\r\npredict_linear = linear_model.predict(validation_data)\r\nprint(\"linear_loss:\" + str(mse(predict_linear, validation_label)))\r\nlinear_test_label = linear_model.predict(test_data)\r\nutils.write_txt(linear_test_label)\r\n\r\npredict = linear_model.predict(train_data)\r\ndelete_list = []\r\nfor i in range(train_data.shape[0]):\r\n\tif abs(predict[i] - train_label[i]) > 1:\r\n\t\tdelete_list.append(i)\r\ndata = []\r\nlabel = []\r\nfor i in range(train_data.shape[0]):\r\n\tif i not in delete_list:\r\n\t\tdata.append(list(train_data[i]))\r\n\t\tlabel.append(train_label[i])\r\ntrain_data = np.array(data)\r\ntrain_label = np.reshape(np.array(label),[-1,1])\r\nlinear_model = model.Linear_model_build(train_data, train_label)\r\npredict_linear = linear_model.predict(validation_data)\r\nprint(\"linear_loss:\" + str(mse(predict_linear, validation_label)))\r\nlinear_test_label = linear_model.predict(test_data)\r\nutils.write_txt(linear_test_label)\r\n\r\n\r\n\r\n'''\r\ntest_label = []\r\nfor i in range(linear_test_label.shape[0]):\r\n\tif linear_test_label[i, 0] > -3:\r\n\t\ttest_label.append(linear_test_label[i, 0])\r\n\telse:\r\n\t\ttest_label.append(mlp_test_label[i, 0])\r\ntest_label = np.reshape(np.array(test_label), [linear_test_label.shape[0],1])\r\nutils.write_txt(test_label)\r\n'''\r\n\r\n# 多项式回归\r\n'''\r\npoly_model, train_data, validation_data, test_data = model.polynomial_model_build(\r\n\ttrain_data, validation_data, test_data, train_label, 1)\r\npredict_poly = poly_model.predict(validation_data)\r\nprint(mse(predict_poly, validation_label))\r\n'''"
},
{
"alpha_fraction": 0.8153846263885498,
"alphanum_fraction": 0.8153846263885498,
"avg_line_length": 31.5,
"blob_id": "ed4327abceb51f57650d1b041878531e4f136860",
"content_id": "a023d14b1f7946b0eb73f4c96096cfa9685ccc47",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 65,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 2,
"path": "/工业蒸汽量预测/utils/__init__.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "from .write import write_txt\r\nfrom .plot import distribution_view"
},
{
"alpha_fraction": 0.7256097793579102,
"alphanum_fraction": 0.7256097793579102,
"avg_line_length": 18.75,
"blob_id": "99e022ce90cf4411a5c72e2a6c60aedfe3129061",
"content_id": "b966d540bbb476af370fe26e80651ae641d3374b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 164,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 8,
"path": "/工业蒸汽量预测/utils/write.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "import txt\r\nfrom config import DefaultConfig\r\n\r\nopt = DefaultConfig()\r\n\r\ndef write_txt(test_label):\r\n\ttxt_path = opt.txt_path\r\n\ttxt.write_data(txt_path, test_label)"
},
{
"alpha_fraction": 0.7900552749633789,
"alphanum_fraction": 0.7900552749633789,
"avg_line_length": 28.16666603088379,
"blob_id": "deeff88c683200ea583ec45550e547401c3375ef",
"content_id": "a7a87feef65f24d06c5d313fb0eeaed9807f36f4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 181,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 6,
"path": "/工业蒸汽量预测/data/__init__.py",
"repo_name": "kira-youshikage/zhengqi",
"src_encoding": "UTF-8",
"text": "from .dataset import get_data\r\nfrom .dataset import scale\r\nfrom .dataset import remove\r\nfrom .dataset import pca\r\nfrom .dataset import anova\r\nfrom .dataset import delete_character\r\n"
}
] | 15 |
zhangyongheng78/Web-Crawling-and-Search-Engine
|
https://github.com/zhangyongheng78/Web-Crawling-and-Search-Engine
|
10fe4e45bd54fbd3b555af128ef8a901a78def3d
|
11aa5bc89e05a98d4d2548102cc140b409cce820
|
41c038883274f558d64a8d788b6975dbe5edb3bb
|
refs/heads/main
| 2023-08-04T10:49:49.879722 | 2021-09-17T05:10:06 | 2021-09-17T05:10:06 | 367,650,336 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.552338182926178,
"alphanum_fraction": 0.5614593625068665,
"avg_line_length": 33.64321517944336,
"blob_id": "3d48173bc293de8733e9ca4156b9cd61068967ce",
"content_id": "6c7ff6575697aeb7be55514ddec6e5e5d006ce3f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6907,
"license_type": "no_license",
"max_line_length": 127,
"num_lines": 199,
"path": "/Milestone2/Milestone2.py",
"repo_name": "zhangyongheng78/Web-Crawling-and-Search-Engine",
"src_encoding": "UTF-8",
"text": "import json\nfrom bs4 import BeautifulSoup\nimport mysql.connector\nimport math\nimport time\nimport json\n\ncharacters = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789\"\n\ndata_Dict = {}\n \ndef store_words():\n with open('bookkeeping.json') as f:\n data = json.load(f) #{0/106,\"www.ics.uci.edu\"}, {} , ...\n file_count = 0\n overall_data = data\n #change folder_document into doc_num\n for doc_num in data: #every file. eg. 0/116\n # ----\n if file_count%200==0:\n print file_count\n # ----\n file_total_word = 0\n file_dict = {}\n file_count +=1\n doc_num = doc_num.encode(\"ASCII\")\n new_data = open(doc_num).read() #Read html for 0/106\n soup = BeautifulSoup(new_data, 'html.parser')\n\n tags_to_look = ['body','title','h1','h2','h3','b','strong','cite']\n for tag in tags_to_look:\n substr = \"\"\n for i in soup.find_all(tag): #Find all text that after the tags\n substr += i.text\n tokenize(file_dict, substr, tag, data[doc_num])\n\n for each_file_word in file_dict.keys():\n file_dict[each_file_word][\"position_weight\"] = calculate_location_weight(file_dict[each_file_word][\"position\"])\n if each_file_word not in data_Dict:\n data_Dict[each_file_word] = {}\n data_Dict[each_file_word][doc_num] = file_dict[each_file_word]\n else: #word exists in data_Dict\n data_Dict[each_file_word][doc_num] = file_dict[each_file_word]\n #done going over all files\n get_tfidf(file_count)\n #write_output(\"output1.txt\",data_Dict)\n return\n\n'''\n@parameter: file_count: the total number of files\n@function: this function loop over the data_Dict to get each word,\n and for each word, it loops over each file and uses\n information in each file to calculate the word's tf-idf\n@return: None. The data_Dict will be modified, i.e. it's tf-idf spot will be filled.\n'''\ndef get_tfidf(file_count):\n for each_word in data_Dict:\n #print \"word: \" + each_word\n for each_doc in data_Dict[each_word]:\n #print \"doc: \"+each_doc\n tf = data_Dict[each_word][each_doc][\"tf\"]\n #print \"tf: \"+str(tf)\n len_file = len(data_Dict[each_word])\n #print \"# of files contains the word: \" + str(len_file)\n data_Dict[each_word][each_doc][\"tf_idf\"] = tfidf_calculator(tf,len_file,file_count)\n\n\ndef calculate_location_weight(loc_list):\n #['body','title','h1','h2','h3','b','strong','cite']\n weight = 0\n for loc in loc_list:\n if loc == \"body\":\n weight += 2\n elif loc == \"title\":\n weight += 6\n elif loc == \"h1\":\n weight += 1\n elif loc == \"h2\":\n weight += 1\n elif loc == \"h3\":\n weight += 1\n elif loc == \"b\":\n weight += 6\n elif loc == \"strong\":\n weight += 6\n elif loc == \"cite\":\n weight += 6\n return weight\n\n\n\n'''\n@parameter: 1) tf: the term frequency of a word\n 2) word_exists_file: the total number of files that the word exists in\n 3) all_num_file: the total number of files\n@function: calculate the weight of the tf-idf of a word\n@return: return the weight of a word's tf-idf in a file\n'''\ndef tfidf_calculator(tf, word_exists_files, all_num_file):\n weight = (1+ math.log10(float(tf))) * math.log10(float(all_num_file)/word_exists_files)\n return weight\n \n\n\n'''\n@function: write data I want to know into a file, instead of waiting them to print out. Printing\n to the console is too slow.\n@return: None\n'''\ndef write_output(file_name, d):\n f = open(file_name,\"w\")\n for word in d:\n f.write(word + \"\\n\")\n for each_doc in d[word].keys():\n f.write(\"doc_num: \"+each_doc + \"\\n\")\n f.write(\"url: \" + d[word][each_doc][\"url\"] +\"\\n\")\n f.write(\"tf_idf: \" + str(d[word][each_doc][\"tf_idf\"]) +\"\\n\")\n f.write(\"position: \")\n for pos in d[word][each_doc][\"position\"]:\n f.write(pos + \" \")\n f.write(\"\\n\")\n f.write(\"position_weight: \" + str(d[word][each_doc][\"position_weight\"]) + \"\\n\")\n f.write(\"\\n\")\n f.close()\n\n'''\n@parameter: an empty dictionary\n@function: initialize the dictionary with keys:url(empty string), tf(float), tf(float), position(str)\n@return: NONE. Because in the function the dictionary is modified and saved.\n'''\ndef _init_eachDocDict(tar_dict):\n tar_dict[\"url\"] = \"\"\n tar_dict[\"tf\"] = 0\n tar_dict[\"tf_idf\"] = float(0.0)\n tar_dict[\"position\"] = []\n\n\n\n'''\n@parameter: an empty dictionary, url(str), tf(float), tf_idf(float), position(str)\n@function: put those information into it's corresponding value position in the empty dictionary\n@return: NONE. Because in the function the dictionary is modified and saved.\n\ndef _fill_eachDocDict(doc_dict, url, tf, tf_idf, position):\n doc_dict[\"url\"] = url\n doc_dict[\"tf\"] = tf\n doc_dict[\"tf_idf\"] = tf_idf\n doc_dict[\"position\"].append(position)\n'''\n\n \n'''\n@parameter: take a string and tokenize it according to the space\n@return: a dictionary, the key is each word, the value is the occruence of each word in the string\n'''\ndef tokenize(file_dict, text, position, url):\n tokenized_word = \"\"\n #Loop over every character in the file and find the character\n #that satisfies the standard\n for i in text:\n if i in characters:\n tokenized_word = tokenized_word + i\n else:\n #Add the satisfied words and their frequencies into\n #a directionary\n if tokenized_word != \"\":\n new_text = tokenized_word.lower()\n new_text = new_text.encode(\"ASCII\")\n if new_text not in file_dict:\n file_dict[new_text] = {}\n _init_eachDocDict(file_dict[new_text])\n file_dict[new_text][\"url\"] = url\n file_dict[new_text][\"position\"].append(position)\n file_dict[new_text][\"tf\"] = 1\n \n elif new_text in file_dict: #in the same tag or new tag has the same word\n file_dict[new_text][\"tf\"] += 1\n if position not in file_dict[new_text][\"position\"]:\n file_dict[new_text][\"position\"].append(position)\n tokenized_word = \"\"\n return\n\ndef sortsecond(val):\n return val[1]\n\n'''\n'''\n\ndef write_json():\n with open(\"index.json\",\"w\") as f:\n json.dump(data_Dict,f)\n\n\nif __name__ == \"__main__\":\n start_time = time.time()\n store_words()\n write_json()\n end_time = time.time()\n print \"total time: \" + str(end_time-start_time)\n\n\n\n\n\n\n\n \n\n"
},
{
"alpha_fraction": 0.49878767132759094,
"alphanum_fraction": 0.5167994499206543,
"avg_line_length": 35.49367141723633,
"blob_id": "3a48ee42d170b772a10ee002d33adc240b712b76",
"content_id": "fb1b2957923ac71ab82bfe44fd7e473384d88d03",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2887,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 79,
"path": "/Milestone2/GUI.py",
"repo_name": "zhangyongheng78/Web-Crawling-and-Search-Engine",
"src_encoding": "UTF-8",
"text": "from flask import Flask, url_for, render_template, request, redirect\nimport Milestone2_query\n\napp = Flask(__name__)\nnavigation = [\n {'message':'', 'length': 3},[\n {\n 'author': 'Frank zhang',\n 'title' : 'Blog Post 1',\n 'content' : 'First post conetent',\n 'date_posted' : 'April 20, 2018'\n },\n {\n 'author': 'Sam',\n 'title' : 'Blog post 2',\n 'content' : 'Second post conetent',\n 'date_posted' : 'April 20, 2018'\n }]\n ]\nnavigation[1].append({\n 'author': 'Qiren xiaodi',\n 'title' : 'Blog post 3',\n 'content' : 'Third post conetent',\n 'date_posted' : 'April 20, 2018'\n })\n\noutput = [{'message':'', 'length': 0},[]]\nindex = {}\nindex = Milestone2_query.load_json_index(index)\n\n\[email protected]('/', methods=['POST', 'GET'])\ndef cool():\n\n if request.method == \"GET\":\n return render_template('index.html', message = output)\n else:\n text = request.form['search']\n if text != '':\n message = ''\n count = 0\n output[1] = []\n \n raw_query_list = Milestone2_query.tokenize(text)\n query_list = Milestone2_query._check_word_exists(index, raw_query_list)\n if len(query_list) == 0:\n output[0]['message'] = \"there is no key word \" + text.upper() +\" in any websit\"\n return redirect('/search')\n url_list = []\n rank_doc_dict = Milestone2_query.rank(index, query_list)\n sorted_by_rank_list = sorted(rank_doc_dict.items(),key=lambda kv: kv[1])\n if len(sorted_by_rank_list) < 20:\n for info in sorted_by_rank_list[:len(sorted_by_rank_list)]:\n url = index[query_list[0]][info[0]][\"url\"]\n url_list.append(url)\n rank_term = Milestone2_query._get_term_ranking(index, query_list)\n sorted_rank_term_list = sorted(rank_term.items(),key=lambda kv: kv[1])\n remain_count = 20-len(sorted_by_rank_list) \n extra_url_list = Milestone2_query._get_remain(index, remain_count, sorted_rank_term_list)\n url_list = url_list + extra_url_list\n else:\n for info in sorted_by_rank_list[:len(sorted_by_rank_list)]:\n url = index[query_list[0]][info[0]][\"url\"]\n url_list.append(url)\n for url in url_list:\n if count >= 20:\n break\n output[1].append({\"url\": url})\n count += 1\n \n output[0]['message'] = text.upper()\n output[0]['length'] = count\n return redirect('/search')\n output[0]['message'] = \"please input something\"\n return redirect('/')\n\[email protected]('/search', methods=['GET'])\ndef login():\n return render_template('search.html', navigation = output)\n\n\n\n\n"
},
{
"alpha_fraction": 0.5688357353210449,
"alphanum_fraction": 0.5828607082366943,
"avg_line_length": 36.07279586791992,
"blob_id": "ee618d847c9d72ce552ccb09c862b144c734f88b",
"content_id": "d6c4d08db5a48f9e44acc3de6514902fb95a0485",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9697,
"license_type": "no_license",
"max_line_length": 149,
"num_lines": 261,
"path": "/Milestone2/Milestone2_query.py",
"repo_name": "zhangyongheng78/Web-Crawling-and-Search-Engine",
"src_encoding": "UTF-8",
"text": "import json\nimport math\nimport time\nimport json\n\ncharacters = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789\"\n\n\ndef load_json_index(index):\n with open(\"index.json\",\"r\") as f:\n index = json.load(f)\n return index\n\n\n'''\n@function: write data I want to know into a file, instead of waiting them to print out. Printing\n to the console is too slow.\n@return: None\n'''\ndef write_output(file_name, d):\n f = open(file_name,\"w\")\n for word in d:\n f.write(word + \"\\n\")\n for each_doc in d[word].keys():\n f.write(\"doc_num: \"+each_doc + \"\\n\")\n f.write(\"url: \" + d[word][each_doc][\"url\"] +\"\\n\")\n f.write(\"tf_idf: \" + str(d[word][each_doc][\"tf_idf\"]) +\"\\n\")\n f.write(\"position: \")\n for pos in d[word][each_doc][\"position\"]:\n f.write(pos + \" \")\n f.write(\"\\n\")\n f.write(\"position_weight: \" + str(d[word][each_doc][\"position_weight\"]) + \"\\n\")\n f.write(\"\\n\")\n f.close()\n\n\ndef get_query():\n query = input('Enter query to search, or only type \"exit\" to exit: ')\n query_list = tokenize(query)\n return query_list\n\n\ndef tokenize(text):\n tokenized_word = \"\"\n d = []\n c = 0\n #Loop over every character in the file and find the character\n #that satisfies the standard\n for i in text:\n c += 1\n if i in characters:\n tokenized_word = tokenized_word + i\n if(c == len(text)):\n new_text = tokenized_word.lower()\n d.append(new_text)\n else:\n #Add the satisfied words and their frequencies into\n #a directionary\n if tokenized_word != \"\":\n new_text = tokenized_word.lower()\n d.append(new_text)\n tokenized_word = \"\"\n return d\n\n\n\n'''\n@parameter: tf_idf(float), pos_weight(int)\n@function: calculate the score of product of tf_idf and position weight.\n tf_idf is 60% important, and position weight is 40% important\n@return: return the score of tf_idf and position weight. It's a float.\n'''\ndef _sum_tf_idf_pos(tf_idf, pos_weight):\n return 0.6*float(tf_idf)+0.4*float(pos_weight)\n \n'''\n@function: this function is the function that does the ranking.\n@parameter: index: the whole inverted index database; query_list: the list that contains each query terms\n@return: a dictionary that contains the intersect documents of those query terms, and their value is their overall rank\n e.g.{\"12/100\":3.54, \"5/203\": 4.2, \"8/160\":2.3231} ...\n'''\ndef rank(index,query_list):\n #get all query word's intersection files first\n if len(query_list) >= 2:\n intersect = _find_intersect(index,query_list)\n for each_doc in intersect:\n intersect[each_doc] = _doc_overall_score(each_doc,query_list,index,intersect)\n return intersect\n elif len(query_list) == 1:\n result = {}\n for doc in index[query_list[0]]:\n result[doc] = _sum_tf_idf_pos(index[query_list[0]][doc][\"tf_idf\"],index[query_list[0]][doc][\"position_weight\"])\n return result\n\n'''\n@parameter: - index: the whole inverted index database;\n - query_list: a list that contains all query terms.\n@function: this function finds the intersect files of those query terms\n@return: it returns 2 things.\n 1. intersect: a dictionary which key is each doc that all query terms appear in, the value is 0\n 2. rank_term: a dictionary which key is each term, and the value is each term's number of total existing document\n'''\n#need to check if some query term is in the index. if it is not in index and I wanna index it,\n#it will give me a keyerror.\ndef _find_intersect(index, query_list):\n intersect_docs= {}\n for i in range(len(query_list)-1):\n if i == 0:\n intersect_docs = _find_intersect_helper(index[query_list[i]],index[query_list[i+1]])\n else:\n intersect_docs = _find_intersect_helper(intersect_docs, index[query_list[i+1]]) \n return intersect_docs\n\n'''\n@parameter: dictionary a, b, both are dictionary that has all those files and affiliated file information\n@function: find the intersect documents in both files, and put them into the result{}.\n@return: result{}, which keys are the intersect docs, value are 0.\n'''\ndef _find_intersect_helper(a,b):\n result = {}\n for each_doc in a:\n if each_doc in b:\n result[each_doc] = 0\n return result\n\n\n'''\n@parameter: doc: document number, e.g. 12/100;\n term_order_dict: a dictionary, key is all the query terms, value is each term's weight.\n index: the whole inverted_index database\n@function: it calculates the document's overall score.\n e.g.\n term_order_dict: {\"computer\": 2, \"science\": 1}\n in doc 12/100:\n the computr tf_idf is 1.6, pos_weight is 9\n the science tf_idf is 0.9, pos_weight is 3\n doc 12/100 overall score:\n term_order_dict[\"computer\"](the weight of computer) * _sum_tf_idf_pos(index[\"computer\"][\"12/100\"][\"tf_idf\"], ...[\"position_weight\"]) \n -> 2 * (0.6*1.6)+(0.4*9)\n +\n science's score.\n@return: the overall score of the document\n''' \ndef _doc_overall_score(doc, query_list,index, intersect):\n score = 0\n for each_term in query_list:\n score += _sum_tf_idf_pos(index[each_term][doc][\"tf_idf\"],index[each_term][doc][\"position_weight\"])\n return score \n\n'''\n@parameter: rank_term: a dict, key is query terms, value is # of total doc that each term exists\n e.g. {\"computer\": 10, \"science\": 4} -> computer is in 10 docs, science is in 4 docs\n@function: sort those query terms from order: term that has less doc has more weight.\n e.g. science has more weight than computer science.\n@return: a dict, key is each query terms, value is their weight.\n e.g. {\"science\": 2, \"computer\": 1}\n'''\ndef _get_term_from_small_to_big(rank_term):\n term_ordered_dict = {}\n sorted_by_value = sorted(rank_term.items(), key=lambda kv: kv[1])\n for i in range(len(sorted_by_value)):#from important to less important\n term_ordered_dict[sorted_by_value[i][0]] = len(sorted_by_value)-i\n return term_ordered_dict\n\ndef _write_to_file(filename, sorted_by_rank_list, query_list, index):\n f = open(filename, \"w\")\n for info in sorted_by_rank_list:\n index[query_list[0]][info[0]][\"url\"]\n f.write(\"\\n\")\n f.close()\n\ndef _check_word_exists(index, query_list):\n valid_query_list = []\n for term in query_list:\n if term in index:\n valid_query_list.append(term)\n return valid_query_list\n\ndef _get_term_ranking(index, query_list):\n rank_term = {}\n for term in query_list:\n rank_term[term] = len(index[term])\n return rank_term\n\ndef _get_remain(index, remain_count, sorted_rank_term_list):\n url_list = []\n query_list = []\n count = 0\n for term_docNum in sorted_rank_term_list:\n query_list.append(term_docNum[0])\n count += int(term_docNum[1])\n if count >= remain_count:\n break\n rank_doc_dict = rank(index, query_list)\n sorted_by_rank_list = sorted(rank_doc_dict.items(),key=lambda kv: kv[1],reverse=True)\n for info in sorted_by_rank_list:\n url_list.append(index[query_list[0]][info[0]][\"url\"])\n return url_list\n\n\n\nif __name__ == \"__main__\":\n index = {}\n index = load_json_index(index)\n exit_status = False\n while(exit_status == False):\n raw_query_list = get_query()\n if len(raw_query_list) == 1 and raw_query_list[0] == \"exit\":\n exit_status = True\n else:\n query_list = _check_word_exists(index, raw_query_list)\n if len(query_list) == 0:\n pass\n else:\n start = time.time()\n url_list = []\n rank_doc_dict = rank(index, query_list)\n sorted_by_rank_list = sorted(rank_doc_dict.items(),key=lambda kv: kv[1],reverse=True)\n if len(sorted_by_rank_list) < 20:\n for info in sorted_by_rank_list[:len(sorted_by_rank_list)]:\n url_list.append(index[query_list[0]][info[0]][\"url\"])\n rank_term = _get_term_ranking(index, query_list)\n sorted_rank_term_list = sorted(rank_term.items(),key=lambda kv: kv[1])\n remain_count = 20-len(sorted_by_rank_list) \n extra_url_list = _get_remain(index, remain_count, sorted_rank_term_list)\n url_list = url_list+extra_url_list\n else:\n for info in sorted_by_rank_list[:20]:\n url_list.append(index[query_list[0]][info[0]][\"url\"])\n\n \n print(\"Done.\")\n\n\n\n\n\n'''\n \n\nif __name__ == \"__main__\":\n index = {}\n index = load_json_index(index)\n exit_status = False\n while(exit_status == False):\n raw_query_list = get_query()\n if len(raw_query_list) == 1 and raw_query_list[0] == \"exit\":\n exit_status = True\n else:\n query_list = _check_word_exists(index, raw_query_list)\n if len(query_list) == 0:\n pass\n else:\n start = time.time()\n rank_doc_dict = rank(index, query_list)\n sorted_by_rank_list = sorted(rank_doc_dict.items(),key=lambda kv: kv[1])\n for info in sorted_by_rank_list[:20]:\n print(index[query_list[0]][info[0]][\"url\"])\n print(time.time()-start)\n print(\"Done.\")\n'''\n \n \n\n\n\n\n\n\n \n"
},
{
"alpha_fraction": 0.7861111164093018,
"alphanum_fraction": 0.7944444417953491,
"avg_line_length": 71,
"blob_id": "b2eb957027cc2fd2801f0b4b70d98a95a7080767",
"content_id": "6204eb79f20bb1e1468144530a4a02f5ef6cc23b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 360,
"license_type": "no_license",
"max_line_length": 188,
"num_lines": 5,
"path": "/README.md",
"repo_name": "zhangyongheng78/Web-Crawling-and-Search-Engine",
"src_encoding": "UTF-8",
"text": "# Web-Crawling-and-Search-Engine\n\nCrawer_frame(_crawer_frame.py_): Checked all URLs were valid and in absolute form before sending to frontier and storing into JSON file\n\nMilestone2 (_Milestone2.py, Milestone2_query.py, GUI.py_): Built GUI search engine with prompt and ranked searching result based on TF-IDF, cosine similarity, and weights on different tags\n"
}
] | 4 |
AzzyOxx/pec-atividades
|
https://github.com/AzzyOxx/pec-atividades
|
d3a37ecc2156e8682f90941bdf89da8b5dda435c
|
3a89c1135de3caf0a19cffbf6142ca28ca012295
|
df432760949b3e64bd73f363b92f82871c51ab4f
|
refs/heads/master
| 2023-05-31T15:26:29.938246 | 2021-07-06T23:48:27 | 2021-07-06T23:48:27 | 370,512,103 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5614407062530518,
"alphanum_fraction": 0.5974576473236084,
"avg_line_length": 28.5,
"blob_id": "157fcd7527254cc5ed8b1598031d1cc737d0c14d",
"content_id": "23ebc0092d262c1a2e71b0994ac55f8eb2044731",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1450,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 48,
"path": "/sem14-q1-dicionario-dicionario_com_dados_de_livros.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n01.Crie um dicionário e armazene nele os dados de livros: título, isbn, autor e preço. A chave do dicionários será um \ncódigo numérico e sequencial, gerado automaticamente, iniciando pelo número 101 (cento e um). A leitura de uma \ndescrição vazia para o título finaliza a leitura. Imprima todos os dados usando o padrão “Nome do Campo: valor”.\nPor exemplo:\n Código: 101\n Título: Programação Java para a Web - 1ª Edição\n ISBN: 978-85-7522-238-6\n Autor: Décio H. Luckow\n Preço: 99.00\n ...\n Código: 114\n Título: Novelas, Espelhos e um Pouco de Choro\n ISBN: 85-7480-052-X\n Autor: Thelma Guedes\n Preço: 52.00\n'''\n\n#criando o dicionário com as informações dos livros\ndef recebeDadosLivros():\n livros = {}\n key = 100\n while True:\n key += 1\n title = input().strip()\n if title == '': break \n isbn = input().strip()\n author = input().strip()\n price = float(input())\n livros[key] = (title, isbn, author, price)\n\n return livros\n\n\ndef main():\n #entrada\n biblioteca = recebeDadosLivros()\n\n #saída\n for livro in biblioteca:\n print(f'Código: {livro}')\n print(f'Título: {biblioteca[livro][0]}')\n print(f'ISBN: {biblioteca[livro][1]}')\n print(f'Autor: {biblioteca[livro][2]}')\n print(f'Preço: {biblioteca[livro][3]:.2f}')\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.65355384349823,
"alphanum_fraction": 0.6580241322517395,
"avg_line_length": 31.897058486938477,
"blob_id": "bd7a1d8c8b9b420fa39856bfe3998cbdb830873c",
"content_id": "1562b31a7158bd5c315146853ef39f22d45b9c92",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2245,
"license_type": "no_license",
"max_line_length": 152,
"num_lines": 68,
"path": "/sem13-q3-arrays_operacoes_em_uma_matriz_m_x_n.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n03.Fazer um programa para ler uma matriz n x m de números inteiros. Os valores de n e m são inteiros, positivos e \ndevem ser informados pelo usuário, calcular e armazenar em uma tupla para mostrar, respectivamente:\na) a soma dos elementos da primeira linha\nb) a soma dos elementos da última coluna\nc) a média de todos os elementos\nd) o menor elemento\ne) o maior elemento\n'''\n\ndef maiorEmenor_matriz(matriz):\n maior = menor = matriz[0][0]\n for linha in matriz:\n for elemento in linha:\n if elemento > maior:\n maior = elemento\n if elemento < menor:\n menor = elemento\n return maior, menor\n\ndef media_dos_elementos_matriz(matriz, total): #total = len(matriz)*len(matriz[0])\n soma = 0\n for linha in matriz:\n soma += soma_lista(linha)\n return soma / total\n\ndef pega_coluna_matriz(matriz, coluna):\n matriz_colunaX = []\n for linha in matriz:\n matriz_colunaX.append(linha[coluna-1])\n return matriz_colunaX\n\ndef soma_lista(lista):\n soma = 0\n for cont in lista:\n soma += cont\n return soma\n\ndef preenche_matriz(linhas, colunas):\n matriz = [] #lista vazia\n for lin in range(linhas):\n linha = [] # cada linha é uma lista (vetor)\n for col in range(colunas):\n n = int(input('Insira um número inteiro: '))\n linha.append(n)\n #insere a linha na matriz\n matriz.append(linha)\n return matriz\n\ndef main():\n #entrada de dados\n n = int(input('Quantidades de linhas: '))\n m = int(input('Quantidades de colunas'))\n matrizNxM = preenche_matriz( n, m )\n \n #processamento\n somaPrimeiraLinha = soma_lista(matrizNxM[0])\n lista_dos_elementos_ultima_coluna = pega_coluna_matriz(matrizNxM, m)\n somaUltimaColuna = soma_lista(lista_dos_elementos_ultima_coluna)\n media_dos_elementosMatriz = media_dos_elementos_matriz(matrizNxM, m*n)\n maior_elemento_matriz, menor_elemento_matriz = maiorEmenor_matriz(matrizNxM)\n tupla_com_as_informacoes = (somaPrimeiraLinha, somaUltimaColuna, round(media_dos_elementosMatriz, 4), menor_elemento_matriz, maior_elemento_matriz,)\n \n #saída\n print(tupla_com_as_informacoes)\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5770738124847412,
"alphanum_fraction": 0.5816459655761719,
"avg_line_length": 27.88679313659668,
"blob_id": "a3fbce7c57f33de231b276911697b4a68b991bca",
"content_id": "c3493506e1ff711346e14f27162e92abb726d449",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3090,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 106,
"path": "/sem14-dicionario-codeclube_desafio01_adicionandoTraducoes.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "def desafio_AdicionandoTraducoes(sentence):\n textSpeakDictionary = {\n 'rs' : 'risos',\n 'tmb' : 'também',\n 'vc' : 'você',\n 'pq' : 'porque',\n 'msm' : 'mesmo',\n 'q' : 'que',\n 'blz': 'beleza',\n 'lgl' : 'legal',\n 'rlx' : 'relaxa',\n 'xau' : 'tchau',\n 'qlqr': 'qualquer',\n 'fzr' : 'fazer',\n 'pd' : 'pode',\n 's' : 'sim',\n 'n' : 'não'\n }\n\n #obtém a frase para tradução\n ##sentence = input('Insira uma frase para traduzir: ').lower()\n\n #divide a frase em uma lista de palavras\n wordsToTranslate = sentence.split()\n translatedSentence = ''\n\n #passa por cada palavra da lista\n for word in wordsToTranslate:\n #adiciona a palavra traduzida caso ela exista no dicionário\n if word in textSpeakDictionary:\n translatedSentence += textSpeakDictionary[word] + ' '\n #mantém a palavra original caso não exista tradução\n else:\n translatedSentence += word + ' '\n #imprime a frase traduzida\n ##print('==>')\n ##print(translatedSentence)\n return translatedSentence\n\n\ndef etapa2_traduzindo_frases():\n textSpeakDictionary = {\n 'rs' : 'risos',\n 'tmb' : 'também',\n 'vc' : 'você'\n }\n\n #obtém a frase para tradução\n sentence = input('Insira uma frase para traduzir: ').lower()\n\n #divide a frase em uma lista de palavras\n wordsToTranslate = sentence.split()\n translatedSentence = ''\n\n #passa por cada palavra da lista\n for word in wordsToTranslate:\n #adiciona a palavra traduzida caso ela exista no dicionário\n if word in textSpeakDictionary:\n translatedSentence += textSpeakDictionary[word] + ' '\n #mantém a palavra original caso não exista tradução\n else:\n translatedSentence += word + ' '\n #imprime a frase traduzida\n print('==>')\n print(translatedSentence)\n\n\ndef etapa1_traduzindo_palavras():\n textSpeakDictionary = {\n 'rs' : 'risos',\n 'tmb' : 'também'\n }\n\n #imprime o dicionário inteiro\n print('Dicionário = ', textSpeakDictionary)\n\n #imprime somente o conteúdo relacioando à chave 'rs'\n print('\\nrs =', textSpeakDictionary[\"rs\"])\n\n #texto que pede a entrada do usuário\n key = input('\\nO que você gostaria de converter? : ')\n print(key, '=', textSpeakDictionary[key])\n \n\ndef main():\n #etapa1_traduzindo_palavras()\n #etapa2_traduzindo_frases()\n \n print(f'{\"=\"*10}Traduzindo uma frase{\"=\"*10}')\n frase = input('Insira uma frase para traduzir: ').lower()\n print(f'==>\\n{desafio_AdicionandoTraducoes(frase)}')\n\n print(f'\\n{\"=\"*10}Traduzindo um bloco de frases{\"=\"*10}')\n bloco = ()\n print('Insira as frase para serem traduzidas(Press 0 to stop): ')\n while True:\n frase = input('\\>').lower()\n if frase == '0': break\n bloco += frase, \n print('==>') \n for frase in bloco:\n print(desafio_AdicionandoTraducoes(frase))\n \n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.39827585220336914,
"alphanum_fraction": 0.44913792610168457,
"avg_line_length": 15.338027954101562,
"blob_id": "a9f9e8cbdf2a10b99837dd8b3a6017c0004c1b95",
"content_id": "7a44eb9657b10c8aa6147ee58472ef3fbbc508e9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1186,
"license_type": "no_license",
"max_line_length": 62,
"num_lines": 71,
"path": "/sem05-q3-ContaDigitImparesDeNumDe10a99.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#Escreva um programa que leia um número inteiro entre 10 e 99,\n#mostre uma das mensagens, a seguir, conforme o \n#número lido.\n#• Nenhum dígito é ímpar.\n#• Apenas um dígito é ímpar.\n#• Os dois dígitos são ímpares\n\ndef separa(a):#123\n b = a % 10\n #print(b)\n c = a // 10\n d = c % 10\n #print(d)\n \n return b, d\n\ndef eh_par2(a, b, c):\n if a % 2 == 0:\n print(a)\n if b % 2 == 0:\n print(b)\n if c % 2== 0:\n print(c)\n\ndef eh_impar(a, b):\n i = 0\n if a % 2 != 0:\n i+=1\n if b % 2 != 0:\n i+=1\n \n return i\n\ndef classi(a):\n if a == 0:\n print('Nenhum dígito é ímpar.')\n elif a == 1:\n print('Apenas um dígito é ímpar.')\n else:\n print('Os dois dígitos são ímpares.')\n \ndef main():\n try:\n n = int(input('num: '))\n #n = int(input())\n if 10 <= n <= 99:\n n1, n2 = separa(n)\n c = eh_impar(n1, n2)\n #print(c)\n classi(c)\n #eh_par2(n1, n2, n3)\n else:\n print()\n\n except:\n print()\n \n\n\nif __name__ == '__main__':\n main()\n\n'''\n123 L 10\n120 12\n=3\n20\n30\n30\n0\n'''\n"
},
{
"alpha_fraction": 0.4617563784122467,
"alphanum_fraction": 0.5070821642875671,
"avg_line_length": 10.766666412353516,
"blob_id": "79f2e90d8b2a19f9af41920f165d54c8ea090996",
"content_id": "7fb2d9f47b54f59420039e9d12b7d8fc65dc09d7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 353,
"license_type": "no_license",
"max_line_length": 26,
"num_lines": 30,
"path": "/passo1-olaTurtle-I.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "from turtle import *\n\ndef tartaruga():\n \n shape(\"turtle\")\n speed(8)\n\n color(\"Purple\")\n pensize(7)\n right(90)\n forward(100)\n left(90)\n forward(50)\n\n color(\"Orange\")\n pensize(3)\n penup()\n forward(50)\n pendown()\n forward(50)\n\n done()\n\n\ndef main():\n \n tartaruga()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6196647882461548,
"alphanum_fraction": 0.648715078830719,
"avg_line_length": 39.31531524658203,
"blob_id": "496cec3cda6d86bfe28acaf5e9d99448c8b16ccb",
"content_id": "303e049a8751822f94961c63f19575352d26c900",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4490,
"license_type": "no_license",
"max_line_length": 144,
"num_lines": 111,
"path": "/sem13-q5-arrays_faturamento_de_empresa_em_array_tridimensional.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n05.Faça um programa que leia e armazene em um array tridimensional contendo os valores do faturamento anual de \numa empresa, especificados por filial e também mês a mês. Veja a estrutura do array seguinte:\nApós a leitura dos dados faça o seguinte:\na) Calcule o total de cada ano por filial;\nb) Calcule o total de todas as filiais por ano;\nc) Calcule o total do período para todas as \nfiliais;\nd) Mostre todos os dados lidos e calculados\nde acordo com o período que ocorrer, \npor exemplo:\n\n2014;Filial 1;Janeiro;210\n...\n2014;Filial 1;Dezembro;463\nTOTAL 2014 FILIAL 1;3526\n2014;Filial 2;Janeiro;430\n...\n2014;Filial 3;Dezembro;310\nTOTAL 2014 FILIAL 3;3346\nTOTAL 2014 TODAS FILIAIS;10727\n2015;Filial 1;Janeiro;316\n...\n2017;Filial 3;Dezembro;354\nTOTAL 2017 FILIAL 3;3550\nTOTAL 2017 TODAS FILIAIS;11123\nTOTAL PERIODO TODAS FILIAIS;42855\n\n'''\n\ndef soma_todo_faturamento_filial(matriz, filiais): # -->> c) Calcule o total do período para todas as filiais;\n i_filial = -1\n i_ano = -1\n valor_periodo_filial = []\n for filial in filiais:\n i_filial += 1\n soma = 0\n for ano in matriz:#linha\n soma += ano[i_filial]\n valor_periodo_filial.append(soma)\n\n return valor_periodo_filial\n\ndef faturamento_por_ano(matriz_tri): # -->> b) Calcule o total de todas as filiais por ano; \n matriz_linha_faturamento_ano = []\n for ano in matriz_tri:\n matriz_linha_faturamento_ano.append(sum(ano))\n return matriz_linha_faturamento_ano\n\ndef faturamento_ano_filial(matriz_tri):#, filiais, anos, meses / -->> a) Calcule o total de cada ano por filial;\n matriz_tridimensional = []\n for ano in matriz_tri:#cada 'ano' vai pegar uma bidimensional\n matriz_linha_coluna_faturamento_ano = []\n for filial in ano:#cada filial representa uma linha dentro da matriz 'ano' \n faturamento_filial_ano = 0\n for mes in filial:\n faturamento_filial_ano += mes[3]\n matriz_linha_coluna_faturamento_ano.append(faturamento_filial_ano) \n matriz_tridimensional.append(matriz_linha_coluna_faturamento_ano) \n return matriz_tridimensional\n\ndef preenche_matriz( matrizes_bi, linhas , elementos):\n matriz_tridimencional = [] #matriz_tri[matriz_bi[linhas[]]] (matriz tridimensional)\n for ano in matrizes_bi:\n matriz_bidimencional = [] # matriz_bi[linhas[]] (matriz bidimensional)\n for filial in linhas:\n linha = [] #linha[] (linhas da matriz)\n for mes in elementos:\n #faturamento_mes = int(input())\n faturamento_mes = int(input(f'Insira as vendas da {filial} no mês de {mes} do ano de {ano}: '))\n #insere o elemento na linha\n linha.append((ano, filial, mes, faturamento_mes))\n #insere a linha na matriz bidimensional\n matriz_bidimencional.append(linha)\n #insere a matriz bidimensional na matriz tridimensional\n matriz_tridimencional.append(matriz_bidimencional)\n return matriz_tridimencional\n\ndef main():\n #entrada de dados\n filiais = ('Filial 1', 'Filial 2', 'Filial 3')\n lista_meses = ( 'Janeiro', 'Fevereiro', 'Março', 'Abril', 'Maio', 'Junho', 'Julho', 'Agosto', 'Setembro', 'Outubro', 'Novembro', 'Dezembro')\n periodo = list(ano for ano in range(2014, 2018))\n\n #processamento das informações\n faturamento_anual = preenche_matriz(periodo, filiais, lista_meses)\n faturamento_por_ano_filial = faturamento_ano_filial(faturamento_anual)#, filiais, periodo, lista_meses\n faturamento_total_por_ano = faturamento_por_ano(faturamento_por_ano_filial)\n faturamento_total_do_periodo_por_filial = soma_todo_faturamento_filial(faturamento_por_ano_filial, filiais)\n\n #saída \n i_ano = -1\n for ano in faturamento_anual:\n i_ano += 1\n i_filial = -1\n for filial in ano:\n i_filial += 1\n i_mes = -1\n for mes in filial:\n i_mes += 1\n print(f'{filial[i_mes][0]};{filial[i_mes][1]};{filial[i_mes][2]};{filial[i_mes][3]}')\n \n print(f'TOTAL {filial[i_mes][0]} {filial[i_mes][1].upper()}; {faturamento_por_ano_filial[i_ano][i_filial]}' )\n print(f'TOTAL {filial[i_mes][0]} TODAS FILIAIS;{faturamento_total_por_ano[i_ano]}')\n print(f'TOTAL PERÍODO TODAS FILIAIS;{sum(faturamento_total_do_periodo_por_filial)}')\n \nif __name__ == '__main__':\n ''' \n testes\n '''\n main()\n"
},
{
"alpha_fraction": 0.5995623469352722,
"alphanum_fraction": 0.6115973591804504,
"avg_line_length": 29.46666717529297,
"blob_id": "95c9a99536096386fb39b4562e9ab2523e16f42d",
"content_id": "6130f3ef60a418ba2c56a386f93945ed029d1703",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 918,
"license_type": "no_license",
"max_line_length": 176,
"num_lines": 30,
"path": "/sem08-q1-simulaAplicacaoNaPoupQuandoVaiDobrarValor.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n01. Escreva um programa que pergunte o depósito inicial e a taxa de juros ao ano de uma poupança. Mostre \nem quantos anos o valor acumulado será o dobro do valor inicial.\n'''\n\n\ndef dobro(din, taxa):\n taxa = taxa / 100\n ano = 0\n valor_com_juros = din\n while valor_com_juros <= (2 * din):\n valor_com_juros = valor_com_juros + (valor_com_juros * taxa)\n ano += 1\n return valor_com_juros, ano\n\ndef main():\n #dep_ini = float(input())\n dep_ini = float(input('Deposito inicial: '))\n \n #tax_juros = float(input())\n tax_juros = float(input('Taxa de juros: '))\n valor, anos = dobro(dep_ini, tax_juros )\n \n print(f'Com R$ {dep_ini:.2f} como deposito inicial, à uma taxa de juros de {tax_juros}%an, levaria {anos} ano(os) para dobrar o valor, resultando em R$ {valor:.2f} reais.')\n #print(f'{valor:.2f}')\n #print(f'{anos}')\n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.4295347332954407,
"alphanum_fraction": 0.48415374755859375,
"avg_line_length": 17.30864143371582,
"blob_id": "6d7518dd5c42758f480e234017598a9634b0e76b",
"content_id": "9b7a6a02c875779a02b40625f2f90c17ab7ceece",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1489,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 81,
"path": "/desafio02_formatos-em-loop-tentandoFAzerUmaArvore.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "from turtle import *\n\n#essa função leva a \"caneta\" pro topo esquerdo e define algumas propriedades da mesma\ndef posicionando():\n shape(\"turtle\")\n pensize(5)\n speed(11)\n #levando para o topo esquerdo\n penup()\n backward(250)\n left(90)\n forward(150)\n right(90)\n\n#essa função faz um espaçamento à direita \ndef espaco():\n penup()\n forward(200)\n \ndef pentagono():\n pendown()\n for cont in range(5): \n forward(80)\n left(72)\n \ndef hexagono():\n pendown()\n for cont in range(6): \n forward(70)\n left(60)\n \ndef circulo():\n pensize(1)\n pendown()\n n = 0\n d=0\n while d < 3:\n for cont in range(360):\n n+=1\n #d = n\n #d += d * 100\n forward(n*0.01)\n right(fibonacci(n)*0.001)\n #forward((fibonacci(cont)/1000))\n #right(1/1000)\n d += 1\n \n \ndef fibonacci(n):\n fib = 0\n f1= 0\n f2 = 1\n while n-1 > 0: \n fib = f1 + f2\n #print(fib ,end = ',')\n f2 = f1\n f1 = fib\n n -= 1\n return fib\n'''\ndef main():\n n = int(input())\n print(fibonacci(n))\n\nif __name__ == '__main__':\n main()\n'''\ndef main():\n \n posicionando()#linha 4\n #pentagono()#linha 20\n #espaco()#linha 16\n #hexagono()#linha 26\n #espaco()#linha 16\n circulo()#linha 32\n #espaco()\n #fibonacci(360)\n done()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.7466443181037903,
"alphanum_fraction": 0.755033552646637,
"avg_line_length": 53.181819915771484,
"blob_id": "b300c9845ec10df944a115d8f2f0f5b5d5f72e95",
"content_id": "d05e36601f16109a6a34788f1a8a4b94ee277be0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 602,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 11,
"path": "/sem02-q2-descontoPrecoDinamico.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#variável \"valor\", do tipo float, recebe um valor a partir da leitura do teclado\nvalor = float(input('Valor de geladeira: '))\n#variável \"desconto\", do tipo float, recebe um valor a partir da leitura do teclado\ndesconto = float(input('Percentual do desconto: '))\n#variável \"fator\" recebe o valor da porcentagem convertida em decimais, conforme a fórmula\nfator = 1 - desconto / 100\nprint(fator)\n#variável \"valor\" recebe o valor com o desconto\nvalor = valor * fator\n#mensagem final ao usuário imprimindo o valor com o desconto\nprint(f'A geladeira com {desconto}% de desconto fica por {valor: .2f}')\n"
},
{
"alpha_fraction": 0.4552929103374481,
"alphanum_fraction": 0.48818087577819824,
"avg_line_length": 18.85714340209961,
"blob_id": "a03d537facd176ba49937bf4d868500a0948f990",
"content_id": "ca0167b8c6b5f4eaa62d2cad672640e6295980dd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 973,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 49,
"path": "/desafio-sem07-04_variaveis-e-loops.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "from turtle import *\nimport random\n\ndef posicionando():\n shape(\"turtle\")\n pensize(5)\n speed(11)\n\n #levando para o topo esquerdo\n penup()\n backward(450)\n left(90)\n forward(150)\n right(90)\n\ndef espaco():\n penup()\n forward(200)\n \ndef figuras_planas():\n lado = 3\n tam = 100\n for cont in range(5):\n a = random.choice([\"Blue\", \"Red\", \"Yellow\", \"Green\", \"Purple\", \"Brown\", \"Pink\", \"Orange\"])\n color(a)\n pendown() \n angulo = 360 / lado\n for cont in range(lado): \n forward(tam)\n left(angulo)\n tam -= 15\n espaco()\n lado += 1\n \n pendown()\n a = random.choice([\"Blue\", \"Red\", \"Yellow\", \"Green\", \"Purple\", \"Brown\", \"Pink\", \"Orange\"])\n color(a)\n for cont in range(360): \n forward(1)\n left(1)\n \ndef main():\n posicionando()\n figuras_planas()\n \n done()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5642245411872864,
"alphanum_fraction": 0.5823025703430176,
"avg_line_length": 31.84375,
"blob_id": "9e3a50434f28eb9f230a7da691cc27b4cb0765cf",
"content_id": "f843c24bd36e22a876b89307f61ebaa5bfa2f194",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1082,
"license_type": "no_license",
"max_line_length": 158,
"num_lines": 32,
"path": "/sem09-q3-natalidadeDe2Paises.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n02. Dado um país A, com taxa de natalidade de 2% ao ano, e um país B com uma taxa de natalidade de 3% \nano. Sabe-se que, atualmente, o país A tem população maior que o país B. Faça um programa que leia a \npopulação de cada país e imprima o tempo necessário para que a população do país B ultrapasse a \npopulação do país A.\n'''\n\ndef popB_superaA(popA, popB):\n ano = 0 \n if popA > popB:\n while popB < popA:#=\n popA = 1.02 * popA\n popB = 1.03 * popB\n ano += 1\n else:\n aux = popA\n popA = popB\n popB = aux\n while popB < popA:#=\n popA = 1.02 * popA\n popB = 1.03 * popB\n ano += 1\n return ano\n\ndef main():\n populacao_A = int(input('Qual é a população do país A? '))\n populacao_B = int(input('Qual é a população do país B? '))\n \n print(f'Será necessário {popB_superaA(populacao_A,populacao_B )} ano(s) para que o páis com a menor população atualmente, se torne o país mais populoso.')\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.7394247055053711,
"alphanum_fraction": 0.7428088188171387,
"avg_line_length": 72.875,
"blob_id": "aa18ee458275ad5c0f6ea36f9e999575c8c79256",
"content_id": "38af652127ac9cb358ffd88c425edf1f6e0afe45",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 627,
"license_type": "no_license",
"max_line_length": 138,
"num_lines": 8,
"path": "/sem01-q2-bonusSobreAnos.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "# variável “anos”, do tipo int, recebe um valor a partir da leitura do teclado\nanos = int(input('Anos de serviço: '))\n#variável “valor_por_ano”, do tipo float, recebe um valor a partir da leitura do teclado\nvalor_por_ano = float(input('Valor por ano: '))\n#variável “bonus” recebe o resultado da multiplicação entre os valores guardados nas variáveis “anos” e “valor_por_ano”, respectivamente. \nbonus = anos * valor_por_ano\n#imprimir na tela, após o processamento, o valor correspondente ao bonus, com desconto guardado na variável “preco”_com_desconto”\nprint('Bônus de R$ %5.2f ' % bonus)\n"
},
{
"alpha_fraction": 0.5922330021858215,
"alphanum_fraction": 0.6135922074317932,
"avg_line_length": 24.75,
"blob_id": "20e5a810303a05346eba6e6f831108023bbe9917",
"content_id": "b14a875a194dc15d85dd315280729668e8709fd3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1032,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 40,
"path": "/sem11-2-q3-lista-intercalando2listaESoamando.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n3. Leia duas listas A e B contendo 20 elementos inteiros cada, gerar e exibir uma lista C do mesmo \ntamanho cujos elementos sejam a soma dos respectivos elementos de A e B.\n'''\ndef recebe20():\n num = 20\n lista = []\n cont = 0\n print('Criando uma lista com 20 itens: ')\n while cont < num:\n n_paraInserir =int(input(f'{cont+1}º elemento: '))\n lista.append(n_paraInserir)\n cont += 1\n return lista\n\ndef intercalacaoSomaDe2Listas(listaA, listaB):\n num = 20\n cont = 0\n listaC = []\n while cont < num:\n itemToInsert = listaA[cont] + listaB[cont]\n listaC.append(itemToInsert)\n cont += 1\n return listaC\n\ndef main():\n #entrada\n listaA = recebe20()\n listaB = recebe20()\n \n #processamento\n listaC = intercalacaoSomaDe2Listas(listaA, listaB)\n\n #saída\n print(f'Itens da lista A: {listaA}')\n print(f'Itens da lista B: {listaB}')\n print(f'Lista da soma dos itens da lista A e lista B: {listaC}')\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5667641162872314,
"alphanum_fraction": 0.5774853825569153,
"avg_line_length": 29.62686538696289,
"blob_id": "f00cdaaf985cc500709bd01103d61a1b7a9abbf6",
"content_id": "0795051f3b2d40b8fa0c1430dcb6ddb9654eeab5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2069,
"license_type": "no_license",
"max_line_length": 142,
"num_lines": 67,
"path": "/sem12-q3-tuplas-lendoArquivosCSV-retornando-cidadesAniversariantes.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n03. Leia um dia e um mês como números inteiros distintos e informe as cidades que fazem aniversário nessa data.\nVeja o exemplo para o dia 9 e mês 2:\nCIDADES QUE FAZEM ANIVERSÁRIO EM 9 DE FEVEREIRO:\nSão Miguel do Passa Quatro(GO)\nCentralina(MG)\nItaporanga(PB)\n...\n'''\n\n#essa função ler um arquivo .csv\n#função modelo\ndef carrega_cidades():\n resultado = []\n with open('cidades.csv', 'r', encoding='utf-8') as arquivo:\n for linha in arquivo:\n uf, ibge, nome, dia, mes, pop = linha.split(';')\n resultado.append(\n (uf, int(ibge), nome, int(dia), int(mes), int(pop))\n )\n arquivo.close()\n return resultado\n\n#essa função ler um arquivo .csv e retorna os dados solicitados\ndef cidades_aniversariantes(dia_ani, mes_ani):\n resultado = []\n with open('cidades.csv', 'r', encoding='utf-8') as arquivo:\n for linha in arquivo:\n uf, ibge, nome, dia, mes, pop = linha.split(';')\n if int(dia) == dia_ani and int(mes) == mes_ani:\n resultado.append(\n #(uf, int(ibge), nome, int(dia), int(mes), int(pop))\n (uf, nome)\n )\n arquivo.close()\n return resultado\n\ndef mes_literal(mes):\n lista_mes = [ 'JANEIRO', 'FEVEREIRO', 'MARÇO', 'ABRIL', 'MAIO', 'JUNHO', 'JULHO', 'AGOSTO', 'SETEMBRO', 'OUTUBRO', 'NOVEMBRO', 'DEZEMBRO']\n return lista_mes[mes-1]\n \ndef main():\n '''\n cidades = carrega_cidades()\n print(cidades[:3] + cidades[-2:])\n print(cidades[1])\n print(cidades[:555])\n '''\n print(f'{\"=\"*15}Cidades que fazem aniversário nessa data{\"=\"*15}')\n #entrada de dados\n dia = int(input('Digite o dia: '))\n mes = int(input('Digite o mês: '))\n\n #processamento\n cidades = cidades_aniversariantes(dia, mes)\n\n #saída\n print(f'CIDADES QUE FAZEM ANIVERSÁRIO EM {dia} DE {mes_literal(mes)}:')\n cont = len(cidades)\n i = 0\n while i < cont:\n print(f'{cidades[i][1]}({cidades[i][0]})')\n i += 1\n \n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6019108295440674,
"alphanum_fraction": 0.6035031676292419,
"avg_line_length": 24.1200008392334,
"blob_id": "b3181e4273fdb62d4d59d4f902002a499c7343d1",
"content_id": "10ed37d02dc9256596271f079e68a73534ad8157",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 638,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 25,
"path": "/sem04-2-q1-retornaBooleanoSeVogal.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#Escreva um programa que leia um caractere e mostra o valor booleano True (verdadeiro) se for uma VOGAL ou o \n#valor booleano False (falso) caso contrário.\n\n#processamento da entrada\ndef resul(l):\n print(f'O caractere {l} é uma vogal??')\n if vog(l) == True:\n print(f'{vog(l)}, o caractere {l} é uma vogal.')\n else:\n print(f'{vog(l)}, o caractere {l} não é uma vogal.')\n\n#função 'é uma vogal??' \ndef vog(l):\n return l in 'aeiou'\n\ndef main():\n \n s = input('Digite UM, e SOMENTE UM, caractere: ').lower().strip()\n\n resul(s)#função resul(), linha 5\n \n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5906021595001221,
"alphanum_fraction": 0.6057962775230408,
"avg_line_length": 26.984251022338867,
"blob_id": "d5809020430920927d3e626f6b42631b360b9e3d",
"content_id": "860c97798bffe5d2e7a3abd6c02fb4b292a349ff",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3580,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 127,
"path": "/sem11-2-q1-lista-recriando-len_reverse_min_max_sum.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n1. As estruturas básicas de programação são sequência, condição e repetição. Usando apenas as \nestruturas básicas de programação, reescreva as funções abaixo (sem utilizá-las):\na) len(), que recebe uma lista e retorna número de itens;\nb) reverse(), que recebe uma lista e retorna uma lista com os itens na ordem invertida;\nc) min(),que recebe uma lista e retorna o menor valor\nd) max(), que recebe uma lista retorna o maior valor\ne) sum(), que recebe uma lista retorna a soma dos valores\nFaça a leitura dos valores necessários pelo teclado, a leitura de um número 0 (zero) encerra a \nleitura dos elementos da lista. Para cada uma das opções, imprima a lista que foi lida e o \nresultado encontrado.\nDica: Você pode usar esses nomes para suas funções: comprimento(), inverter(), \nminimo(), maximo(), soma().\n'''\n\ndef criaLista():\n lista = []\n print(f'{\"=\"*4}Criando uma lista: {\"=\"*4}')\n while True:\n n = int(input('Adicione(press 0 to stop): '))\n #n = int(input())\n if n == 0:\n break \n lista.append(n)\n \n return lista\n\ndef comprimento(lista):\n tem = True\n cont = 0\n i = 0\n while True:\n try:\n if lista[i] != '':\n cont += 1\n i += 1\n except:\n break\n return cont\n\ndef inverter(lista):\n tamanho = comprimento(lista)\n lista_invertida = []\n i = 0\n while i < tamanho:\n lista_invertida.insert(0,lista[i])\n i += 1\n \n return lista_invertida \n\ndef minimo(lista):\n tamanho = comprimento(lista)\n i = 1\n minimo = lista[0]\n while i < tamanho:\n if lista[i] < minimo:\n minimo = lista[i]\n i += 1\n return minimo\n\ndef maximo(lista):\n tamanho = comprimento(lista)\n i = 1\n maximo = lista[0]\n while i < tamanho:\n if lista[i] > maximo:\n maximo = lista[i]\n i += 1\n return maximo\n\ndef soma(lista):\n tamanho = comprimento(lista)\n i = 0\n soma = 0\n while i < tamanho:\n soma += int(lista[i])\n i += 1\n return soma\n \ndef main():\n #entrada\n lista1 = criaLista()\n #lista2 = criaLista()\n #lista3 = criaLista()\n #lista4 = criaLista()\n #lista5 = criaLista()\n \n #processamento\n comprimento_lista1 = comprimento(lista1) \n lista2_invertida = inverter(lista1)\n \n try:\n minimo_lista3 = minimo(lista1)\n except:\n minimo_lista3 = 0\n \n try:\n maior_lista4 = maximo(lista1)\n except:\n maior_lista4 = 0\n \n soma_itens_lista= soma(lista1)\n\n \n #saída\n '''{\n print(f'Primeira lista: {lista1}')\n print(f'Quantidade de itens da primeira lista: {comprimento_lista1}')\n print(f'Segunda lista: {lista2}')\n print(f'Segunda lista invertida: {lista2_invertida}')\n print(f'Terceira lista: {lista3}')\n print(f'Menor item da terceira lista: {minimo_lista3}')\n print(f'Quarta lista: {lista4}')\n print(f'Maior item da quarta lista: {maior_lista4}')\n print(f'Quinta lista: {lista5}')\n print(f'Soma dos itens da quinta lista: {soma_itens_lista}')\n }'''\n\n print(f'Itens inseridos na lista: {lista1}')\n print(f'Número de itens existentes na lista: {comprimento_lista1}')\n print(f'Lista com a ordem dos itens invertida: {lista2_invertida}')\n print(f'Menor item existente na lista: {minimo_lista3}')\n print(f'Maior item existente na lista: {maior_lista4}')\n print(f'Valor correspondente a soma de todos os itens dessa lista: {soma_itens_lista}')\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6493598818778992,
"alphanum_fraction": 0.683499276638031,
"avg_line_length": 41.60606002807617,
"blob_id": "550dfc71af71e40f003a42ba7d3713fc688203ae",
"content_id": "1313409a366bf3da02b0cbea41f683fa84a6faf1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1441,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 33,
"path": "/sem03-q7-inverteSequenciaNumeroInteiro.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#início da função \"inverter\", com um argumento\ndef inverter(numero):\n #variável \"u\" receber o valor da divisão, resto, do valor passado por argumento por 10\n u = numero % 10\n print(u)\n #variável \"numero\" atualiza 0 seu valor com a divisão, divisãi inteira, do seu próprio valor por 10\n numero = numero // 10\n print(numero)\n #variável \"d\" recebe o resto da divisão da variável \"numero\" por 10\n d = numero % 10\n print(d)\n #variável \"numero\" atualiza 0 seu valor com a divisão, divisãi inteira, do seu próprio valor por 10\n numero = numero // 10\n print(numero)\n #variável \"c\" recebe o resto da divisão da variável \"numero\" por 10\n c = numero % 10\n print(c)\n #variável \"numero\" atualiza 0 seu valor com a divisão, divisãi inteira, do seu próprio valor por 10\n numero = numero // 10\n print(numero)\n #variável \"m\" recebe o resto da divisão da variável \"numero\" por 10\n m = numero % 10\n print(m)\n #variável \"numero_invertido\" recebe o valor resultante da fórmula\n numero_invertido = (u*1000) + (d*100) + (c * 10) + m\n \n #retorno da função\n return numero_invertido\n\n#variável \"n\" recebe um valor, do tipo int, a partir da leitura do teclado\nn = int(input(\"Digite um número entre 1000 e 9999: \"))\n#imprimir na tela a mensagem concatenada com os valores inseridos conforme a ordem da formatação\nprint(f'o inverso de {n} é {inverter(n)}')\n"
},
{
"alpha_fraction": 0.534166693687439,
"alphanum_fraction": 0.5475000143051147,
"avg_line_length": 27.571428298950195,
"blob_id": "55e5a77c00b11cd5a312041ac5a14cd0a17b9d6d",
"content_id": "f85ff9f9f69353276aaa5522fcb3ea5618d0e397",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2421,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 84,
"path": "/sem15-code_clube-porta_da_fortuna-desafio01-contando_os_pontos.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "from random import *\n\ndef passo01():\n #imprime as três portas e as instruções do jogo\n print('''Porta da Fortuna!\n=========\n\nExiste um super prêmio atrás de uma dessas 3 portas!\nAdivinhe qual é a porta certa para ganhar o prémio!\n _____ _____ _____\n| | | | | |\n| [1] | | [2] | | [3] |\n| o| | o| | o|\n|_____| |_____| |_____|\n''')\n \n #deixe o jogador fazer 3 tentativas\n for attempt in range(3):\n\n print('\\nEscolha um porta (1, 2 ou 3):') \n #get the chosen door and store it as an integer (whole number)\n chosenDoor = input()\n chosenDoor = int(chosenDoor)\n\n #randomly choose the winning door number (between 1 and 3)\n winningDoor = randint(1,3)\n\n #show the player the winning and chosen door numbers\n print(\"A porta escolhida foi a\", chosenDoor)\n print(\"A pota certa é a\", winningDoor)\n\n #player wins if the chosen door and winning door number are the same\n if chosenDoor == winningDoor:\n print(\"Parabéns!\")\n else:\n print(\"Que peninha!\")\n\ndef desafio_contando_os_pontos():\n #imprime as três portas e as instruções do jogo\n print('''Porta da Fortuna!\n=========\n\nExiste um super prêmio atrás de uma dessas 3 portas!\nAdivinhe qual é a porta certa para ganhar o prémio!\n _____ _____ _____\n| | | | | |\n| [1] | | [2] | | [3] |\n| o| | o| | o|\n|_____| |_____| |_____|\n''')\n\n score = 0\n \n #deixe o jogador fazer 3 tentativas\n for attempt in range(3):\n\n print('\\nEscolha um porta (1, 2 ou 3):') \n #get the chosen door and store it as an integer (whole number)\n chosenDoor = input()\n chosenDoor = int(chosenDoor)\n\n #randomly choose the winning door number (between 1 and 3)\n winningDoor = randint(1,3)\n\n #show the player the winning and chosen door numbers\n print(\"A porta escolhida foi a\", chosenDoor)\n print(\"A pota certa é a\", winningDoor)\n\n #player wins if the chosen door and winning door number are the same\n if chosenDoor == winningDoor:\n print(\"Parabéns!\")\n score += 1\n else:\n print(\"Que peninha!\")\n \n print(\"\\nSua pontuação final é\", score,'.')\n \n\ndef main():\n #passo01()\n desafio_contando_os_pontos()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.36890020966529846,
"alphanum_fraction": 0.3896576762199402,
"avg_line_length": 22.672412872314453,
"blob_id": "6b82dda8c36af90135aa81a5a7c5e07d96a825cf",
"content_id": "6acfee731877b768409c37cf4f7bfd85aed9cc2f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2779,
"license_type": "no_license",
"max_line_length": 156,
"num_lines": 116,
"path": "/art_ascii_sem01.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "print(\"--------Antes de mais nada; Desculpe as gambiarras professor!!! :):):)--------\")\nprint(\"Como eu ainda não entendi como usar o github,\\neu tentei fazer algo um pouco diferente das suas orientações.\\nEspero que possa considerar.:) :)\\n\\n\")\n#print(\"--------Digite 0(zero) para encerrar--------\\n\\n\")\n\na = 1\nwhile a != 0:\n print('''Escolha o número da página do desafio a ser corrigida: \n 5 (Desafio: No que você está pensando?)\n 7 (Desafio: Sobre você)\n 10 (Desafio: Palavras e números)\n 13 (Desafio: Programe um dinossauro)\n ''')\n b=int(input(\"= \"))\n if b == 5:\n print(\"--------Desafio: No que você está pensando?--------\")\n print('\\n\\nEstou com fome!\\n\\n')\n\n if b == 7:\n print(\"--------Desafio: Sobre você--------\")\n print('''\n\n Me interesso muito pelo mundo da informática.\n ________\n | |\n | WWW |\n |________| -->notebook\n / //\n /________//\n\n \n Outra paixão minha é a música.\n __________\n ||´´´´´´´´||\n || ||\n || ||\n || ||\n || ||\n || ||\n oOo oOo\n O O O O\n ºOº ºOº\n\n\n \n \n ''')\n if b == 10:\n print(\"--------Desafio: Palavras e números--------\")\n #print(\"ha \"*4)\n #print(\"ba\" + \"na\"*2)\n #print(\"Bra\" + \"sil\" + \"!\"*10)\n print(\"Para\" + \"le\"*2 + \"pípedo\")\n print(\"A\"+ \"ra\"*2)\n print(\"F \"*6)\n print(\"E \"*6)\n print(\"L \"*6)\n print(\"I \"*6)\n print(\"P \"*6)\n print(\"E \"*6)\n print(\"\\n\")\n print(\"K\"*6)\n\n if b == 13:\n print(\"-\"*13+\"Desafio: Programe um dinossauro\"+\"-\"*13)\n print(\"\\nParabéns\"+\"!\"*10)\n print(\"\\nAqui vai um dinossauro!\\n\")\n print(\" \"*12 + \"__\")\n print(\" \"*11 + \"/ _)\")\n print(\" \"*4 + \".-^^^-/ /\")\n print(\" __/\" + \" \"*7 + \"/\")\n print(\"<__.|_|-|_|\")\n print(\"\\nE aqui vai um bolo! hu\" +\"m\"*8)\n print(\"\\n\")\n print(\" i\"*10)\n print(\"#\"*21)\n print(\"=\"*21)\n print(\"#\"*21)\n print(\"=\"*21)\n print(\"#\"*21)\n print(\"\\n\")\n \n \n print(\"------------------------------------------------\")\n print('Continuar? (0 = não)')\n a=int(input(\"=\"))\n\n\n\n \n#else:\n# print(\"nada\")'''\n\n##print('''\n#'''\n \n#'''#)\n\n#print(\"Oi, tudo bem?\")\n\n#print('Estou com fome!')\n\n#print(\"Uma imagem de um cachorro...\")\n#print(\"0____ \")\n#print(\" |||| \")\n\n#print('''\n#Uma imagem de um cachorro...\n# 0____ \n# |||| ''')\n\n#Desafio: Sobre você\n\n#print('''\n#Uma imagem de um cachorro...\n# 0____ \n# |||| ''')\n"
},
{
"alpha_fraction": 0.7311475276947021,
"alphanum_fraction": 0.7442622780799866,
"avg_line_length": 54.45454406738281,
"blob_id": "138c6be45694ef2b857fa2d2f6192ab3ebf214c9",
"content_id": "c7df8d85b705d72baab87052f213dfd01c0c1a51",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 629,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 11,
"path": "/sem03-q2-funcaoParOuImpar.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#início da função denominada \"eh_par\", com um argumento\ndef eh_par(numero):\n # retorno da função com valores booleanos, confrome expressão\n return numero % 2 == 0\n\n#realiazar impressão das frases concatenadas com o retorno gerado pela função \"eh_par\" chamada de dentro do print.\nprint('2 é par?', eh_par(2))\n##realiazar impressão das frases concatenadas com o retorno gerado pela função \"eh_par\" chamada de dentro do print.\nprint('3 é par?', eh_par(3))\n##realiazar impressão das frases concatenadas com o retorno gerado pela função \"eh_par\" chamada de dentro do print.\nprint('5 é ímpar?', not eh_par(5))\n"
},
{
"alpha_fraction": 0.5418367385864258,
"alphanum_fraction": 0.5734694004058838,
"avg_line_length": 24.128204345703125,
"blob_id": "3e86b0fcca0377b8c1d7725fcffc48efaf7183f0",
"content_id": "c00b9b62c71c8cc6fbf79e7bc8b10d8fac2e750a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 999,
"license_type": "no_license",
"max_line_length": 139,
"num_lines": 39,
"path": "/sem07-q2-recebe100NumEClassificaParOuImpar.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#02. Escreva um programa que leia o um conjunto de 100 números inteiros positivos e determine a quantidade \n#de números pares e números ímpares contidos no mesmo.\n\nimport random\n\n#incrementa as variáveis globais 'par' ou 'imp', conforme o número seja par ou impar\ndef eh_par(a):\n global imp\n global par\n \n if a % 2 == 0:\n par += 1\n else:\n imp += 1\n \n \n\ndef main():\n \n global par\n par = 0\n global imp\n imp = 0\n \n i = 0 #contador \n # o emlaço recebe 100 números, um a um, a cada número é chamada a função eh_par(a), para determinar se o número recebido é par ou impar\n while i <= 99:\n \n #a = int(input(Digite um número({i+1}/100): ))\n a = random.randrange(1, 100)\n print(f'Digite um número({i+1}/100): {a}')\n b = eh_par(a)#linha 7\n i += 1\n\n \n print(f'\\nQuantos números pares: {par}.\\nQuantos números ímpares: {imp}.')\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.74219810962677,
"alphanum_fraction": 0.74219810962677,
"avg_line_length": 55.69230651855469,
"blob_id": "222386c479a6e8507c98414ce4c21893a0055e86",
"content_id": "8afac6ef6435664995fafe3fb2197c258e66fec9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 755,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 13,
"path": "/sem01-q6-trocaValoresEntreVariaveis.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#variável \"valor_a\", do tipo int, recebe um valor a partir da leitura do teclado\nvalor_a = int(input(\"Valor da variável A: \"))\n#variável \"valor_b\", do tipo int, recebe um valor a partir da leitura do teclado\nvalor_b = int(input(\"Valor da variável B: \"))\n#a variável \"auxiliar\" recebe o mesmo valor que está guardado na variável \"valor_a\"\nauxiliar = valor_a\n#a variável \"valor_a\" atualiza o seu valor recebendo o valor da variável \"valor_b\"\nvalor_a = valor_b\n#a variável \"valor_b\" atualiza o seu valor recebendo o valor da variável \"auxiliar\"\nvalor_b = auxiliar\n#impresão com formatação, f'', e valores contatenados na string conforme indicação das chaves\nprint(f\"Valor da variável A: {valor_a}\")\nprint(f\"Valor da variável B: {valor_b}\")\n"
},
{
"alpha_fraction": 0.6205357313156128,
"alphanum_fraction": 0.6264880895614624,
"avg_line_length": 34.3684196472168,
"blob_id": "1a665efc94ceeb957aa6615ca3abdc4ce50c08ac",
"content_id": "476362deb89e10a61f1cdcf163881909938f13e8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 682,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 19,
"path": "/sem11-desafio01_adicione-mais-cumprimentos.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#Desafio: Adicione mais cumprimentos (pág.: 05)\n\nfrom random import *\n\ndef main():\n print(\"Gerador de Cumprimentos\")\n print(\"-\"*23)\n \n adjetivos = ['maravilhoso', 'acima da média', 'excelente', 'excepicional', 'muito bom', 'habilidoso' ]\n hobbies = ['andar de bicicleta', 'programar', 'fazer chá', 'tocar violão', 'nadar', 'jogar futebol', 'cantar' ]\n\n nome = input(\"Qual é o seu nome?: \")\n print(f'Aqui está o seu cumprimento {nome}:' ) \n\n #obtém um item aleatório de ambas as listas e adiciona-os ao cumprimento\n print(f'{nome}, você é {choice(adjetivos)} em {choice(hobbies)}!')\n print(\"De nada!\")\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6186224222183228,
"alphanum_fraction": 0.6284013390541077,
"avg_line_length": 29.9342098236084,
"blob_id": "d6e97cfe658d543626e629d92f9d1dba2cfb76a7",
"content_id": "606af4b6d78ba493deb5175372f264f0168652ab",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2358,
"license_type": "no_license",
"max_line_length": 129,
"num_lines": 76,
"path": "/sem11-2-q4-lista-associando-dados-em-2-listas.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n4. Um time de basquete possui 12 jogadores. Deseja-se um programa que, dado o nome e a altura \ndos jogadores, determine:\na. o nome e a altura do jogador mais alto;\nb. a média de altura do time;\nc. os jogadores com altura superior à média, listando o nome e a altura de cada um.\n'''\n\ndef recebe12dados():\n num = 12\n nome = []\n altura = []\n cont = 0\n while cont < num:\n nome_paraInserir =input('Digite o nome do(a) atleta: ').strip()\n altura_paraInserir = float(input(f'Qual a altura do(a) {nome_paraInserir}: '))\n nome.append(nome_paraInserir)\n altura.append(altura_paraInserir) \n cont += 1\n return nome, altura\n\ndef maior(nomes, alturas):\n indice_mais_alto = alturas.index(max(alturas))\n return nomes[indice_mais_alto], alturas[indice_mais_alto]\n\ndef media_aritimetrica(lista):\n tam = len(lista)\n i = 0\n total = 0\n while i < tam:\n total += lista[i]\n i += 1\n return total/tam\n\ndef acima_da_media(media, nomes, alturas):\n Nomes_acimaDaMedia = []\n alturas_acimaDaMedia = []\n tam = len(alturas)\n i = 0\n total = 0\n while i < tam:\n if alturas[i] > media:\n Nomes_acimaDaMedia.append(nomes[i])\n alturas_acimaDaMedia.append(alturas[i])\n i += 1\n \n return Nomes_acimaDaMedia, alturas_acimaDaMedia\n \ndef main():\n #entrada\n nome_atletas, altura_atletas = recebe12dados()\n\n #processamento\n nome_jogador_mais_alto, altura_jogador_mais_alto = maior(nome_atletas, altura_atletas)\n media_das_alturas = media_aritimetrica(altura_atletas)\n atletas_acima_da_mediaNomes, atletas_acima_da_mediaAlturas = acima_da_media(media_das_alturas, nome_atletas, altura_atletas )\n\n #saída\n print('JOGADOR MAIS ALTO DO TIME')\n print(f'Nome do(a) jogador(a) mais alto(a): {nome_jogador_mais_alto}')\n print(f'Altura do(a) {nome_jogador_mais_alto}: {altura_jogador_mais_alto:.2f}')\n print('ALTURA MÉDIA DO TIME')\n print('={:.2f}'.format(media_das_alturas))\n print('JOGADORES MAIS ALTOS QUE A MÉDIA DO TIME')\n\n tam = len(atletas_acima_da_mediaNomes)\n i = 0\n total = 0\n while i < tam:\n print('Nome: ',atletas_acima_da_mediaNomes[i])\n print(f'Altura: {atletas_acima_da_mediaAlturas[i]:.2f}\\n')\n i += 1\n \n \nif __name__ == '__main__':\n main() \n"
},
{
"alpha_fraction": 0.5898876190185547,
"alphanum_fraction": 0.604097843170166,
"avg_line_length": 36.35802459716797,
"blob_id": "de14b6e4239e2e6a1d9eba93da2602d9bd002025",
"content_id": "0611f1a417ede3a86d208cb9e82592030909a904",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3047,
"license_type": "no_license",
"max_line_length": 142,
"num_lines": 81,
"path": "/sem13-q2-arrays_media_das_temperaturas_em_Kelvin.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n02.Faça um programa que receba a temperatura média de cada mês do ano. A temperatura pode ser informada em graus\nCelsius, Fahrenheit ou Kelvin. Após isto, calcule a média anual das temperaturas e mostre, em Kelvin, todas as\ntemperaturas acima da média anual e em que mês elas ocorreram (mostrar o mês por extenso: 1 – Janeiro, 2 –\nFevereiro, ... ).\n'''\n\ndef preenche_matriz(linhas, colunas):\n matriz = [] #lista vazia\n mes = 1\n for lin in range(linhas):\n linha = [] # cada linha é uma lista (vetor)\n for col in range(colunas):\n if col == 0:\n n = mes\n mes += 1\n elif col == 1:\n n = float(input('temperatura: '))\n else:\n n = input('escala: ')[0].upper().strip() \n linha.append(n)\n # insere a linha na matriz\n matriz.append(linha)\n return matriz\n\n''' essa função monta a matriz bidimensional no formato dos livros de matemática\ndef imprime_matriz_indice(matriz):\n for i_linha in range(len(matriz)):\n print('|', end = '')\n for i_coluna in range(len(matriz[i_linha])):\n #print(f'{matriz[i_linha][i_coluna]:3d}', end = ' ')\n print(f'{matriz[i_linha][i_coluna]}', end = ' ')\n print('|')\n'''\n\n#vamos converter tudo para Celseius, para depois somar e calcular a média\ndef media_anualTemperatura(matriz): \n soma_de_temperaturas = 0\n new_matriz = []\n for linha in matriz:\n temperatura = linha[1]\n if linha[2] != 'K':\n if linha[2] == 'C':\n #convertendo o tupla_com_as_temperaturas[3] para celsius\n temperatura = temperatura + 273.15\n if linha[2] == 'F':\n temperatura = ((temperatura - 32) *( 5/9 )) + 273.15\n soma_de_temperaturas += temperatura\n new_matriz.append([linha[0], temperatura, 'K'])\n\n return (soma_de_temperaturas / 12), new_matriz\n\ndef acimaMediaTempsMes(matriz, media):\n lista_dos_acimaMediaIndice = []\n for linha in matriz:\n if linha[1] > media:\n lista_dos_acimaMediaIndice.append(linha)\n return lista_dos_acimaMediaIndice\n\ndef mes_literal(mes):\n lista_mes = [ 'Janeiro', 'Fevereiro', 'Março', 'Abril', 'Maio', 'Junho', 'Julho', 'Agosto', 'Setembro', 'Outubro', 'Novembro', 'Dezembro']\n return lista_mes[mes-1]\n\ndef main():\n #entrada de dados\n matriz_mes_temperatura = preenche_matriz( 12, 3 )\n \n #processamento\n mediaTemperaturas, temperaturas_convertidasKelvin = media_anualTemperatura(matriz_mes_temperatura)\n mesesComTemperaturasAcimaMedia = acimaMediaTempsMes(temperaturas_convertidasKelvin, mediaTemperaturas)\n\n #saída\n print('TEMPERATURA MÉDIA ANUAL')\n print(f'{round(mediaTemperaturas, 2)}K')\n print('TEMPERATURAS ACIMA DA MÉDIA ANUAL:') \n for linha in mesesComTemperaturasAcimaMedia:\n print(f'{mes_literal(linha[0])}: {round(linha[1], 2)}{linha[2]}')\n \nif __name__ == '__main__':\n \n main()\n"
},
{
"alpha_fraction": 0.4733560085296631,
"alphanum_fraction": 0.5379818677902222,
"avg_line_length": 34.95918273925781,
"blob_id": "d1db5fa3309998cfc9fda3af22c5451becb5f538",
"content_id": "22518b27b85cb368ac28e414e9a58ea015e55e85",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1777,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 49,
"path": "/ano2025.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "print(\"-\"*7 + \"Antes de mais nada; Desculpe as gambiarras professor!!! :):):)\" + \"-\"*7)\nprint(\"Professor, ainda não deu pra entregar pelo git.\\nEspero que possa considerar.:) :)\\n\\n\")\n#print(\"--------Digite 0(zero) para encerrar--------\\n\\n\")\n\na = 1\nwhile a != 0:\n print('''Escolha o número da página do desafio a ser corrigida: \n 3 (Desafio: Dinheiro no bolso)\n 4 (Desafio: Mudando datas)\n 8.1 (Desafio: O ano 3000!)\n 8.2 (Desafio: Sua idade em anos de cachorro)\n ''')\n b=float(input(\"= \"))\n if b == 3:\n print(\"-\"*15 + \"Desafio: Dinheiro no bolso\" + \"-\"*15)\n #print(f'\\n\\nCobrando R$ 12,50 por lavagem, 8 carros totalizariam R$ {8*12.5} reais!\\n\\n')\n print(\"\\n\\nCobrando R$ 12,50 por lavagem, 12 carros totalizariam R$ %.2f reais!!\\n\\n\"%(12*12.5))\n\n if b == 4:\n print(\"-\"*15 + \"Desafio: Mudando datas\" + \"-\"*15)\n\n print(f'\\nUma pessoa nascida em 1998 terá {2025-1998} anos em 2025.\\n')\n print(f'Por fim, uma pessoa nascida hoje, 28/03/2021, completará {2050-2021} anos no dia 28/03/2050.\\n')\n\n \n if b == 8.1:\n print(\"-\"*15 + \"Desafio: O ano 3000!\" + \"-\"*15)\n ano1= int(input(\"\\nEm que ano você nasceu?\\n\"))\n ano2= int(input(\"Para qual ano você quer saber sua idade?\\n\"))\n idade = ano2 - ano1\n print(f'No ano {ano2} você terá {idade} anos!\\n')\n \n\n if b == 8.2:\n print(\"-\"*15 + \"Desafio: Sua idade em anos de cachorro\" + \"-\"*15)\n\n ano1 = int(input(\"\\nQuantos anos você tem?\\n\"))\n print(f'Se você fosse um cachorro, você teria {7*ano1} anos!!')\n\n print('''\n '0'_____'\n || ||\n ''')\n\n \n \n print(\"=\"*60)\n print('Continuar? (0 = não)')\n a=int(input(\"=\"))\n\n\n"
},
{
"alpha_fraction": 0.581632673740387,
"alphanum_fraction": 0.5888355374336243,
"avg_line_length": 28.73214340209961,
"blob_id": "53678997a8d53f9e11a00483a0d22b22a81dcb18",
"content_id": "d60abcb4cce547cd90c14e46022ff55699c4d1a0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1706,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 56,
"path": "/sem04-q1-saudacoesHomemOuMulher.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n#outra forma de fazer...\ndef ident(n, s):\n if s == 1:\n print(f'Ilmo Sr. {n}')\n elif s == 2:\n print(f'Ilma Sra. {n}')\n else:\n print('Você não digitou um valor válido!!')\n \ndef main():\n try:\n n = input('Qual é o seu nome?\\n').strip()\n s = int(input(\"Qual é o seu sexo?(1 = masc ou 2 = fem)\\n\"))\n ident(n, s)\n except:\n print('Você não digitou um valor válido!!')\n \nif __name__ == '__main__':\n main()\n'''\n#Escreva um programa que leia o nome e o sexo de uma pessoa, e mostre o nome prece)dido da mensagem “Ilmo \n# Sr.”, caso seja informado o sexo masculino, ou “Ilma Sra.” se for informado o sexo feminino. Use o número inteiro \n# 1 para identificar masculino e 2 para identificar feminino.\n\n\n#função que processa as entradas\ndef ident(n, s):\n #caso seja masculino\n if s == 1:\n print(f'Ilmo Sr. {n}')\n #caso seja feminino\n elif s == 2:\n print(f'Ilma Sra. {n}')\n #caso digite um int diferente de 1 ou 2\n else:\n print('Você não digitou um valor válido!!')\n #chamando a função principal de novo, se as entradas não corresponderem ao esperado\n main()\n\ntry:\n # função principal\n def main():\n n = input('Qual é o seu nome?\\n').strip()\n s = int(input(\"Qual é o seu sexo?(1 = masc ou 2 = fem)\\n\"))\n #chamando a função de processamento\n ident(n, s)\n\n # função inicial\n if __name__ == '__main__':\n main()\n#caso seja inserido um valor inválido\nexcept:\n print('Você não digitou um valor válido!!')\n #chamando a função principal de novo, se as entradas não corresponderem ao esperado\n main()\n\n"
},
{
"alpha_fraction": 0.41153573989868164,
"alphanum_fraction": 0.4600141644477844,
"avg_line_length": 39.371429443359375,
"blob_id": "9dad04a7d0bd05ad95eb4d4cafc49c671ac1a38b",
"content_id": "e7387c898284ba0eb5f4cfe0759fdd0ac833a076",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2834,
"license_type": "no_license",
"max_line_length": 170,
"num_lines": 70,
"path": "/sem15-q4-dicionario-corrida_de_kart_com_6_corredores.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n04.Uma pista de Kart permite 10 voltas para cada um de 6 corredores. Escreva um programa que leia o nome do corredor \ne todos os tempos em segundos e os guarde em um dicionário, onde a chave é o nome do corredor. Ao final mostre\nclassificação final em ordem (primeiro o vencedor(a)), como mostrado abaixo:\n\n-------|----------------------|---------------|---------------|---------------\n Ordem | Nome do Corredor | Tempo Total | Tempo Médio | Melhor Volta \n-------|----------------------|---------------|---------------|---------------\n 1 | Eloá | 719.8 | 72.0 | 66.5 \n 2 | Luiz Henrique | 725.8 | 72.6 | 65.5 \n 3 | Gabriela | 729.2 | 72.9 | 65.7 \n 4 | Heitor | 745.3 | 74.5 | 65.0 \n 5 | Bianca | 756.6 | 75.7 | 67.4 \n 6 | Joana | 776.7 | 77.7 | 69.0 \n-------|----------------------|---------------|---------------|---------------\n\n'''\n\ndef classificacao(cronometragens):\n ranking_corredores = {}\n lugar = 1 \n for num in range(6):\n chave = list(cronometragens.keys())\n melhor_tempo = sum(cronometragens[chave[0]]), chave[0] \n for corredor in chave:\n if sum(cronometragens[corredor]) < melhor_tempo[0]:\n melhor_tempo = sum(cronometragens[corredor]), corredor\n\n ranking_corredores[melhor_tempo[1]] = lugar, melhor_tempo[0], cronometragens[melhor_tempo[1]]\n lugar += 1\n del cronometragens[melhor_tempo[1]]\n \n\n return ranking_corredores\n \n \n \n\ndef desempenho_corredores():\n corredores = {}\n for corredor in range(6):\n nome = input().strip()\n voltas_completas = []\n for volta in range(10):\n tempo = float(input())\n voltas_completas.append(tempo)\n corredores[nome] = voltas_completas\n\n return corredores\n \ndef main():\n #entrada\n cronometragem_da_corrida = desempenho_corredores()\n\n #processsanebto\n ranking = classificacao(cronometragem_da_corrida)\n \n #saída\n print( '-'*7 + '|' + '-'*22 + '|' + '-'*15 + '|' + '-'*15 + '|' + '-'*15 )\n print(f\"{'Ordem': ^7}|{'Nome do Corredor': ^22}|{'Tempo Total':^15}|{'Tempo Médio': ^15}|{'Melhor Volta':^15}\")\n print( '-'*7 + '|' + '-'*22 + '|' + '-'*15 + '|' + '-'*15 + '|' + '-'*15 )\n\n for corredor in ranking:\n print(f\"{ranking[corredor][0]: ^7}|{corredor: ^22}|{round(ranking[corredor][1], 1):^15}|{round(ranking[corredor][1]/10, 1): ^15}|{min(ranking[corredor][2]):^15}\")\n\n print( '-'*7 + '|' + '-'*22 + '|' + '-'*15 + '|' + '-'*15 + '|' + '-'*15 ) \n \n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6427145600318909,
"alphanum_fraction": 0.652694582939148,
"avg_line_length": 36.11111068725586,
"blob_id": "d371a5ec53c4db96707e9b5b2b0fd52e80a732b7",
"content_id": "1ea6598f083d8d341b2adc0083e394007a157ac0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1009,
"license_type": "no_license",
"max_line_length": 163,
"num_lines": 27,
"path": "/sem14-q2-dicionario-agenda_de_contatos_com_dicionario.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n02.Crie um programa que, usando dicionário, crie uma agenda de tamanho fornecido inicialmente pelo usuário. Leia os \ndados de todos os contatos do usuário (nome, cidade, estado, telefone) de forma que a agenda fique completa e por \nfim imprima todos os contatos. Crie um código numérico sequencial para usar como chave do dicionário.\n'''\n\ndef cria_agenda(qntd):\n contatos = {}\n for pessoa in range(qntd):\n nome = input().strip()\n cidade = input().strip()\n estado = input().strip()\n telefone = input().strip()\n contatos[pessoa] = (nome, cidade, estado, telefone)\n return contatos\n \ndef main():\n #entrada\n qntd_contatos = int(input())\n agenda_telefonica = cria_agenda(qntd_contatos)\n\n #saída\n for contato in agenda_telefonica:\n print(f'{agenda_telefonica[contato][0]:<25}{agenda_telefonica[contato][1] + \"(\" + agenda_telefonica[contato][2] + \")\":<30}{agenda_telefonica[contato][3]}')\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.7253885865211487,
"alphanum_fraction": 0.7253885865211487,
"avg_line_length": 31.16666603088379,
"blob_id": "e8b3961aef0575f60fa05b50023c8f24a5e3ca5e",
"content_id": "ed517effdb7edbaab2a81b026879e0b7fbe5b9e9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 396,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 12,
"path": "/sem03-q1-usandoFuncoes.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#inicio de uma função denominada \"bem_vindo\" \ndef bem_vindo():\n #imprimir na tela uma frase\n print('Bem-vindo ao python.')\n#inicio de uma função denominada \"mensagem\"\ndef mensagem(msg):\n #imprimir na tela a string \"alocada\" no arguemento \"msm\"\n print(msg)\n#chamando a função \"bem_vindo\"\nbem_vindo()\n#chamando a função \"mensagem\"\nmensagem(\"Curso de Programação Estruturada\")\n"
},
{
"alpha_fraction": 0.5724637508392334,
"alphanum_fraction": 0.5845410823822021,
"avg_line_length": 22,
"blob_id": "a49f7d6b519f12f5555c9dca7480bf9813b92567",
"content_id": "21293f0a86b9ae28ecc7c92df36a66f7ef545fd1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 424,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 18,
"path": "/sem08-2-q1-flag0RetornaSoma.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n01. Escreva um programa que leia um conjunto de números inteiros e exiba a soma dos mesmos. Observação: \nA condição de saída do laço será a leitura do valor 0 (flag).\n'''\n\n\ndef main():\n soma = 0\n while True:\n num = int(input('Digite um número: '))\n soma += num\n \n if num == 0: break\n\n print(f'A soma dos valores inseridos é {soma}.')\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6347344517707825,
"alphanum_fraction": 0.6474421620368958,
"avg_line_length": 31.989246368408203,
"blob_id": "684cb0263023136635f88ec75391f5a15838777b",
"content_id": "4332d4b92e3716e322e038cb13660799fc4772b1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3084,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 93,
"path": "/sem11-q3-listas.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n3. Escreva um programa que leia um número n. Considere uma lista com n posições, e então:\na) preencha com valores reais lidos pelo teclado e imprima na ordem inversa. Considere até 4 (quatro) \ncasas decimais.\nb) preencha com n notas lidas pelo teclado e imprima as notas e a média na tela. Considere 1 (uma) casa \ndecimal. Se n = 0, imprima “SEM NOTAS”.\nc) preencha com n letras lidas pelo teclado e imprima quantas vogais foram lidas. Imprima as consoantes.\nDica: certifique-se de ler apenas um caractere com input()[0]\n'''\ndef formataFloatCasas(num,casas):\n semCasasDecimais = int(num)\n depoisDaVirgula = num - semCasasDecimais\n depoisDaVirgula = depoisDaVirgula * 10**casas\n depoisDaVirgula = int(depoisDaVirgula)\n depoisDaVirgula = depoisDaVirgula / 10**casas\n\n return semCasasDecimais + depoisDaVirgula \n \ndef inverteInsersaoFloats(num):\n lista = []\n cont = 0\n print('Preenchendo a lista com {num} termos: ')\n while cont < num:\n n_paraInserir =float(input(f'{cont+1}º termo: '))\n #n_paraInserir = formataFloatCasas(n_paraInserir,4)\n n_paraInserir = round(n_paraInserir, 4)\n lista.insert(0,n_paraInserir)\n cont += 1\n return lista\n\ndef recebe_notas(num):\n lista = []\n cont = 0\n print('Lista de notas: ')\n while cont < num:\n n_paraInserir =float(input('Digite uma nota: '))\n #n_paraInserir = formataFloatCasas(n_paraInserir,1)\n lista.append(n_paraInserir)\n cont += 1\n return lista\n\ndef mediaAritimetrica(lista, num):#n é igual ao númuero de termos da lista\n cont = 0\n total = 0\n while cont < num:\n total += lista[cont]\n #n_paraInserir = formataFloatCasas(n_paraInserir,1)\n cont += 1\n media = total / num\n media = round(media, 1) \n return media\n\ndef recebe_Nletras(num):\n lista = []\n cont = 0\n vogais = 0\n consoantes = []\n print('Preenchendo uma lista de caracteres: ')\n while cont < num:\n l_paraInserir =input('Digite uma letra: ')[0]\n lista.append(l_paraInserir)\n if l_paraInserir in 'AEIOUaeiou':\n vogais += 1\n elif l_paraInserir not in '0123456789':\n consoantes.append(l_paraInserir)\n cont += 1\n \n \n return lista , vogais, consoantes\n\ndef main():\n #entrada\n n_ezimoTermo = int(input('Tamanho da Lista: '))\n\n #processamento\n recebe_na_ordemInversa = inverteInsersaoFloats(n_ezimoTermo)\n n_notas = recebe_notas(n_ezimoTermo)\n lista_completa, vogais, consoantes = recebe_Nletras(n_ezimoTermo)\n \n\n #saída\n print(f'Termos da lista na ordem inversa: {recebe_na_ordemInversa}')\n print(f'Listas das notas inseridas: {n_notas}')\n if n_ezimoTermo != 0:\n print(f'Média das notas: {mediaAritimetrica(n_notas, n_ezimoTermo)}') \n else:\n print('SEM NOTAS')\n #lista_completa,'\\n',\n print(f'Vogais encontradas na lista de caracteres: {vogais}')\n print(f'Consoantes encontradas na lista de caracteres: {consoantes}')\n \nif __name__ == '__main__':\n main()\n\n"
},
{
"alpha_fraction": 0.6099210977554321,
"alphanum_fraction": 0.6369785666465759,
"avg_line_length": 31.851852416992188,
"blob_id": "1b28e243cd39dc1105a9df73a85f3d34661c5b26",
"content_id": "4a23c11457e737b7ea498c2daa2d8b44b5a24eee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 903,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 27,
"path": "/sem09-q2-juros_compostos.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n01. Você tem uma poupança de 10 mil reais, que rende 0,7% ao mês. Você deseja comprar um carro, mas o\npreço do carro sobe a taxa de 0,4% ao mês. Escreva um programa que leia o preço de um carro hoje e \ncalcule em quantos meses, com o dinheiro dessa aplicação, você terá dinheiro suficiente para comprar o \ncarro à vista.\n'''\n\ndef quando_vou_comprar(valor_car):\n poup = 10000\n aplicacao = 0\n mes = 0\n while poup < valor_car:\n aplicacao = 0.007 * poup\n valor_car += 0.004 * valor_car\n poup += aplicacao\n mes += 1\n return mes\n \ndef main():\n valor_car = float(input('Valor do carro: R$ '))\n #valor_car = float(input())\n \n print(f'Levariam {quando_vou_comprar(valor_car)} meses para a aplição render o valor necessário para comprar o carro à vista.')\n #print(mes, poup, valor_car)\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.4862721264362335,
"alphanum_fraction": 0.5356925129890442,
"avg_line_length": 17.839080810546875,
"blob_id": "7c0dfb6b5ebf10291d52ee49447d004e26958c7c",
"content_id": "f1c6f20d9d3663081b3e4a0a789c8841b68b8908",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1655,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 87,
"path": "/desafio_desenhando-padroes - Copia.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "from turtle import *\n\n#essa função leva a \"caneta\" pro topo esquerdo e define algumas propriedades da mesma\ndef posicionando():\n shape(\"turtle\")\n pensize(6)\n speed(11)\n color(\"Red\")\n\n #levando para o topo esquerdo\n penup()\n backward(250)\n left(90)\n forward(150)\n right(90)\n\n#essa função faz um espaçamento à direita\ndef espaco():\n penup()\n forward(250)\n\n#essa função desenha algo que lembra uma flor\ndef flor():\n color(\"Red\")\n pensize(5)\n pendown()\n for cont in range(36): \n forward(100)\n left(100)\n\n#essa função desenha algo que lembra uma sol \ndef sol():\n color(\"Yellow\")\n pensize(1)\n pendown()\n i = 0\n g = 0 \n for cont in range(220): \n forward(i)\n left(g)\n i += 1\n g+=1\n\n#essa função desenha algo que lembra uma flor \ndef flor2(): \n color(\"Pink\")\n pensize(3)\n pendown()\n for cont in range(8):\n color('Pink')\n for cont in range(360):\n if cont == 275:#225\n color('White')\n pendown()\n forward(1)\n left(1)\n penup()\n left(45)\n forward(80)\n \ndef main():\n posicionando()#linha 4\n flor()#linha 23\n espaco()#linha 18\n sol()#linha 32\n\n #ajustando a posição da \"caneta\" \n penup()\n forward(280) \n left(30)\n \n flor()#linha 23\n\n #levando a \"caneta\" para uma área abaixo dos desenhos já feitos\n penup()\n backward(450)\n right(90)\n forward(280)\n left(90)\n forward(280)\n \n flor2()#linha 45\n \n done()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5935637950897217,
"alphanum_fraction": 0.6209773421287537,
"avg_line_length": 33.95833206176758,
"blob_id": "bdb3fe1d603e559a7645f2281be8ba7003c23b27",
"content_id": "1f212b86ba478f9f0a46a97795523848b135e7fc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 854,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 24,
"path": "/sem04-q5-tratamentoDeMediaAri3Notas.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#Escreva um programa que leia três números inteiros correspondentes a três notas de um aluno. Apresente a média \n#das três notas, mas, se a terceira nota for superior a 8, o aluno deve ganhar mais um ponto na média. Além disso, \n#se a média final, em função do ponto extra, ficar acima de 10 ela deve ser ajustada para 10.\n\ndef media(a, b, c):\n return (a + b + c) / 3\n\ndef nota( m, n3):\n if n3 > 8:\n m += 1\n if m > 10:\n m = 10\n print(f'O aluno obteve como média final {m} pontos.')\n\ndef main():\n n1 = int(input('Digite a primeira nota do aluno:\\n'))\n n2 = int(input('Digite a segunda nota do aluno:\\n'))\n n3 = int(input('Digite a terceira nota do aluno:\\n')) \n med = media(n1, n2, n3)#função media(), linha 5\n nota(med, n3)#função nota(), linha 8\n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.3862704932689667,
"alphanum_fraction": 0.4200819730758667,
"avg_line_length": 16.727272033691406,
"blob_id": "31547bb7f85b738f946c1d869d0044e957888ee7",
"content_id": "83594ff609718be99affe024e5e17c577ffcd47f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 979,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 55,
"path": "/sem08-q4-retornaNumInvertido.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#Escreva um programa que leia número inteiro qualquer e mostre na forma invertida.\n\ndef inverte(num):\n a = num\n aux = 0\n while a > 0:\n b = a % 10\n a = a // 10\n aux = aux * 10 + b\n \n return aux\n\ndef main():\n num = int(input('Digite um número inteiro qualquer: '))\n\n print(f'Número invertido: {inverte(num)}.')\n\nif __name__ == '__main__':\n main()\n\n\n '''\n tentando fazer de outro jeito:\n i = -1\n b = 10\n a = num\n while a >= 1:\n a = a / b #a= 0.9\n i += 1 # i = 0\n \n \n print(i)\n d = 10**i #d = 1\n cont1 = 0\n aux = 0\n while cont1 < i:\n div = num % d #div = 9\n aux = aux*10 + div \n cont1 += 1\n d = d**(i-1)\n return aux\n\n \n \n #\n 11\n while True:\n \n if num > maior:\n maior = num\n if num < menor and num != 0:\n menor = num\n aux = num\n if num == 0: break\n '''\n\n"
},
{
"alpha_fraction": 0.43455496430397034,
"alphanum_fraction": 0.47643980383872986,
"avg_line_length": 10.9375,
"blob_id": "ec66890861d8a54b5fed3105c0992e7e59bc5766",
"content_id": "debca6ae5fe7ea701cd1139cbdc9c8252fdbdce1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 191,
"license_type": "no_license",
"max_line_length": 26,
"num_lines": 16,
"path": "/passo2-auto-repetição.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "from turtle import *\n\n\ndef main():\n \n speed(11)\n shape(\"turtle\")\n\n for cont in range(4):\n forward(100)\n right(90)\n\n done()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6279707551002502,
"alphanum_fraction": 0.6297988891601562,
"avg_line_length": 31.176469802856445,
"blob_id": "1eb5d74f44f21e1c4f0c0cc7cb29ebf05e37d92f",
"content_id": "3e448a5af2e1b5fffb8975182b41f82d5cbce8c8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1125,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 34,
"path": "/sem04-q2-retornaBooleanoSeParOuImpar.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#Escreva um programa que leia um número e mostra o valor booleano True (verdadeiro) se o número for ímpar ou \n#o valor booleano False (falso) caso contrário.\n\n#função retorna valores booleanos caso seja impar(true) ou par(false)\ndef eh_impar(n):\n return n % 2 != 0\n\n#processamento do valor recebido na função n\ndef par_ou_impar(n):\n if n == True:\n return 'ímpar'\n else:\n return 'par'\n\ntry:#função principal\n def main():\n \n #n = int(input())\n n = int(input('Digite um número: '))\n #variável 'c' que recebe o valor da função chamada eh_impar() \n c = eh_impar(n)\n print(f'{n} é impar?? {c}')\n #imprimir na tela resultado final caso seja ímpar ou par\n #print(eh_impar(n))\n print(f'O número {n} é um número {par_ou_impar(c)}.')\n # função inicial\n if __name__ == '__main__':\n main()\n\n#caso não seja inserido um número\nexcept:\n print('Você não digitou um número! Por favor digite um número inteiro.')\n #chamando a função principal de novo, se as entradas não corresponderem ao esperado\n main()\n"
},
{
"alpha_fraction": 0.49051937460899353,
"alphanum_fraction": 0.5004122257232666,
"avg_line_length": 20.660715103149414,
"blob_id": "7c68aef5a31224547e10b7fd9e1c9a654aeaea98",
"content_id": "76dfecffc6f8c5959f8f9c21f0cfe33a4c80651e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1237,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 56,
"path": "/sem14-q4-dicionario-contagem_de_convais_considerando_acentos.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n04.Escreva um programa que conta a quantidade de vogais em um texto e armazena tal quantidade em um dicionário, \nonde a chave é a vogal considerada. Inclua as vogais com acentos na contagem.\n'''\ndef conta_vogais(frase):\n vogais = {}\n tem = 0\n for char in 'AÂÃÁaáàâã':\n for letra in frase:\n if letra == char:\n tem += 1\n vogais['A'] = tem\n\n tem = 0\n for char in 'EÊÉeéê':\n for letra in frase:\n if letra == char:\n tem += 1\n vogais['E'] = tem\n\n tem = 0\n for char in 'IÍií':\n for letra in frase:\n if letra == char:\n tem += 1\n vogais['I'] = tem\n\n tem = 0\n for char in 'OÔÓÕoóôõ':\n for letra in frase:\n if letra == char:\n tem += 1\n vogais['O'] = tem\n\n tem = 0\n for char in 'UÚuú':\n for letra in frase:\n if letra == char:\n tem += 1\n vogais['U'] = tem\n\n return vogais\n\ndef main():\n #entrada\n frase = input().strip()\n\n #processamento\n vogais = conta_vogais(frase)\n\n #saída\n for vogal in vogais:\n print(f'{vogal}: {vogais[vogal]}')\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5787451863288879,
"alphanum_fraction": 0.5787451863288879,
"avg_line_length": 27.925926208496094,
"blob_id": "c7d5b0b9100256363df9a3b0a4a26166abf75148",
"content_id": "6ae5f339c7cafa3b9c88b27a9969f42455055697",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 820,
"license_type": "no_license",
"max_line_length": 124,
"num_lines": 27,
"path": "/sem04-q3-simulaSemafaro.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#Escreva um programa que leia a cor de um sinal de trânsito\n#(“V” é verde; “A” é amarelo; “E” é vermelho) e retorne \n#a respectiva mensagem “Siga”, “Atenção”, ou “Pare”. Assuma entradas válidas.\n \ndef sinal(s):\n if s == 'v':\n return 'Siga'\n elif s == 'a':\n return 'Atenção'\n elif s == 'e':\n return 'Pare'\n #caso não seja digitado v, a ou e\n else:\n print(\"Você não digitou 'V', 'A' ou 'E'. Tente novamente!\")\n #chamando a função principal de novo, se as entradas não corresponderem ao esperado\n main()\n\n\ndef main():\n \n s = input('Digite o caractere correspondente a cor sinal:( V = verde ou A = amarelo ou E = vermelho)\\n').lower().strip()\n print(f'{sinal(s)}\\n')\n \n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.4143884778022766,
"alphanum_fraction": 0.4258992671966553,
"avg_line_length": 16.375,
"blob_id": "117df9a2e255f2d0bd48a97c77c6ff39544acfb5",
"content_id": "26e6b35d184167e7cdf655e760dfcbe633e73836",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 701,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 40,
"path": "/sem05-q5-recebe3NumERetornaEmOndemCres.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n05. Escreva um programa que leia três números por\nparâmetro e mostre na tela em ordem crescente.\n'''\ndef maior(a, b):\n if a > b:\n return a , b\n else:\n return b , a\n\ndef ordem(a, b, c):\n t = a\n if b > a:\n t = b\n if c > t:\n t = c\n \n if t == a:\n s , p = maior(b , c)\n elif t == b:\n s , p = maior(a , c)\n else:\n s , p = maior(a , b)\n \n return p, s, t\n \n \ndef main():\n n1 = int(input('número: '))\n n2 = int(input('número: '))\n n3 = int(input('número: '))\n p, s, t = ordem(n1, n2, n3)\n\n #print(p , s, t)\n print(p)\n print(s)\n print(t)\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.4213414490222931,
"alphanum_fraction": 0.4518292546272278,
"avg_line_length": 22.098590850830078,
"blob_id": "ec282dcdce2541eafe1982b91ab420e879d296eb",
"content_id": "c1c9235c42a5e17330efdfb0829a00add0768dca",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1649,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 71,
"path": "/sem05-2-q2-retornaDataMaisRecente.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "def data_mais_recente(d, m, a, d1, m1, a1):\n if a > a1:\n return d, m, a\n elif a < a1:\n return d1, m1, a1\n else:\n if m > m1:\n return d, m, a\n elif m < m1:\n return d1, m1, a1\n else:\n if d > d1:\n return d, m, a\n elif d < d1:\n return d1, m1, a1\n else:\n return d1, m1, a1\n \n''' \n #0 mais antigo, 1 mais recente\n # a = 0, a1 = 1\n if a <= a1: #sim\n if m < m1\n return d1 , m1 , a1\n elif m > m1:\n return d , m, a\n elif\n sei la\n elif a < a1 and m < m1:\n return d , m , a\n elif a \n if d < dn and m < mn and a < a1:\n return d , m , a\n elif d > dn and m <\n else:\n return a - and\n sei la\n else:\n return d , m , a\n'''\n\ndef main():\n \n dd1 = int(input('dia da primeira data: \\n'))\n mm1 = int(input('mês da primeira data: \\n'))\n yy1 = int(input('ano da primeira data: \\n'))\n\n dd2 = int(input('dia da segunda data: \\n'))\n mm2 = int(input('mês da segunda data: \\n'))\n yy2 = int(input('ano da segunda data: \\n'))\n \n a, b, c = data_mais_recente(dd1, mm1, yy1, dd2, mm2, yy2)\n \n print(f'{a}/{b}/{c}')\n \n\nif __name__ == '__main__':\n main()\n'''\n#a é mais recente ou o mesmo ano que a1??\n #se sim, então \nse a <= a1:\n SE m1 < m:\n return \n a é menos recente ou mesmo ano\n\nse não:\n a é mais recente\n\nprofessor, gostaria de sugerir que coloque pelo menos ums exemplo de entrada e saída em cada questão\n'''\n"
},
{
"alpha_fraction": 0.7480106353759766,
"alphanum_fraction": 0.7480106353759766,
"avg_line_length": 57,
"blob_id": "8099919028e91e1fc2806a49fd376271863d5a27",
"content_id": "aac8c46ab902c12073e6dbeaf3f7c67c368c0f46",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 766,
"license_type": "no_license",
"max_line_length": 133,
"num_lines": 13,
"path": "/sem01-q4-divisoesInteiraEResto.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#imprime uma mensagem na tela\nprint(\"Demonstração de divisão inteira(//) e resto (%).\")\n#variável \"dividendo\", do tipo int, recebe um valor a partir da leitura do teclado\ndividendo = int(input(\"Digite o dividendo: \"))\n#variável \"divisor\", do tipo int, recebe um valor a partir da leitura do teclado\ndivisor = int(input(\"Digite o divisor: \"))\n#variável \"quociente\" recebe o valor INTEIRO do resultado da divisão entre os valores guardados nas variáveis \"dividendo\" e \"divisor\"\nquociente = dividendo // divisor\n#variável \"resto\" recebe o valor do 'RESTO' da divisão entre os valores guardados nas variáveis \"dividendo\" e \"divisor\"\nresto = dividendo % divisor\n#\nprint(f'{dividendo} dividido por {divisor}')\nprint(f' é igual a {quociente} e resto {resto}.')\n"
},
{
"alpha_fraction": 0.5203937888145447,
"alphanum_fraction": 0.5555555820465088,
"avg_line_length": 27.440000534057617,
"blob_id": "525bda6a6d8e6cbbc36260321067582c7859432e",
"content_id": "8af82709311f6d8d0d2e8b8ea7ac5140355634e7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 745,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 25,
"path": "/sem04-2-q5-classificaCaractere.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#05. Escreva um programa que leia um caractere e mostra uma das mensagens: “vogal”, “consoante”, “número” ou \n#“símbolo”. Observação: O cedilha “ç”, caracteres acentuados, espaço em branco e outros como “símbolo”;\n\ndef entr():\n s = input('Digite um, e somente um, caractere: ').lower().strip()\n return ord(s)\n\ndef oQue(l):\n if l >= 97 and l <= 122:\n if (l == 97 or l == 101 or l == 105 or l== 111 or l == 117):\n return 'vogal'\n else:\n return 'consoante'\n elif l >= 48 and l <= 57:\n return 'número'\n else:\n return 'símbolo'\n\n\ndef main():\n a = entr()\n print(f'O caractere {chr(a)}, é um(a) {oQue(a)}.')\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5701530575752258,
"alphanum_fraction": 0.6045918464660645,
"avg_line_length": 30.360000610351562,
"blob_id": "c28e563a1ba4320d1e9241d18acbdd563953ca5e",
"content_id": "863590859a8bf7749b3faab3597003b0ca9f161e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 800,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 25,
"path": "/sem09-2-q2-fibonacci.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n02. A Sequência de Fibonacci é uma sequência de números inteiros, começando por 0 e 1, na qual, cada termo \nsubsequente corresponde à soma dos dois anteriores (0, 1, 1, 2, 3, 5, 8, 13, ...). Escreva um programa que \nleia um número n, calcule e mostre os n primeiros termos da sequência de Fibonacci. O valor lido para n\nsempre será maior ou igual a 2.\n'''\n\ndef fibonacci(n):\n fib = 0\n f1= 0\n f2 = 1\n while n-1 > 0:\n #print(fib ,end = ', ')\n fib = f1 + f2\n f2 = f1\n f1 = fib \n n -= 1\n return fib\n\ndef main():\n n = int(input('Digite a posição de um terno da sequência de Fibonacci que deseja saber: '))\n print(f'O {n}º termo da Sequência de Fibonacci é o número {fibonacci(n)}.')\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5621370673179626,
"alphanum_fraction": 0.6248548030853271,
"avg_line_length": 26.774192810058594,
"blob_id": "b0714f0a4bf415d022bb17457e6d560a9d3dee71",
"content_id": "77e6f13dd4b3816a4e71c3ad89c66f7ae32525aa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 876,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 31,
"path": "/sem07-2-q5t2-simulacaoDeAte24PrestcDeUmPreco.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n05. Escreva um programa que simula o valor (com duas casas decimais) da prestação de uma compra \nparcelada sem juros de 1x até 24x. O valor da compra é digitado pelo usuário. O valor da prestação sem \njuros, deve ser calculado como o valor da compra dividido pelo número de prestações de 1 até 24. O \nprograma estará correto se o usuário informar o valor 1000 e o programa produzir o seguinte resultado:\n 1x de R$ 1000.00\n 2x de R$ 500.00\n 3x de R$ 333.33\n 4x de R$ 250.00\n [...]\n 23x de R$ 43.48\n 24x de R$ 41.67\n'''\n\ndef prestacao(v):\n i = 0\n for cont in range(24):\n i += 1\n prest = v / i\n print(f'{i}x de R$ {prest:.2f}')\n \n\n\ndef main():\n valor = float(input('Digite o valor do produto: R$ '))\n print('\\nOpções de parcelamento:\\n')\n prestacao(valor)\n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.46548324823379517,
"alphanum_fraction": 0.4911242723464966,
"avg_line_length": 22.045454025268555,
"blob_id": "f82f4ace0d2182a95b676ca05d8279e534280d66",
"content_id": "f8caf78576fc17ffa0b50997f43e0de57065a96f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 511,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 22,
"path": "/sem09-2-q1-fatorial.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n01. Escreva um programa que calcule o fatorial de um número inteiro lido, sabendo-se que:\n N ! = 1 x 2 x 3 x ... x N-1 x N\n 0 ! = 1\n'''\n\ndef fatorial(n):\n aux = n - 1\n if n == 0:\n return 1\n else: \n while aux >= 1:\n n *= aux\n aux -= 1\n return n\n \ndef main():\n n = int(input('Digite um número para calcular a sua fatorial: '))\n print(f'A fatorial do número {n} é igual a {fatorial(n)}.') \n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6505900621414185,
"alphanum_fraction": 0.6803488731384277,
"avg_line_length": 47.724998474121094,
"blob_id": "8e882991d6700d3d2cc67f7a216ac7a812b406fe",
"content_id": "a5f725a0c6274315f696664ce21f227a8e710fd0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1958,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 40,
"path": "/sem12-q2-tuplas-converteSomaDuasTemperaturas.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n02. Utilizando a definição de valor da temperatura com tupla da questão anterior, desenvolva uma função\nque soma duas temperaturas que podem estar em Celsius ou em Fahrenheit. Se as duas temperaturas estiverem\nna mesma escala, a resposta deve estar na mesma escala. Se as temperaturas estiverem em escalas diferentes,\na resposta deve ser dada na escala da segunda temperatura. Considere até 4 (quatro) casas decimais.\n'''\n\n#soma duas temperaturas, se necessário converte para a escala da primeira temperatura\ndef soma_temperatura(tupla_com_as_temperaturas):\n temperatura1 = tupla_com_as_temperaturas[0]\n temperatura2 = tupla_com_as_temperaturas[2]\n if tupla_com_as_temperaturas[1] != tupla_com_as_temperaturas[3]:\n if tupla_com_as_temperaturas[3] == 'C':\n #convertendo o tupla_com_as_temperaturas[3] para celsius\n temperatura1 = (tupla_com_as_temperaturas[0] - 32) * (5/9), 'C'\n else:\n #convertendo o tupla_com_as_temperaturas[3] para fahrenheit\n temperatura1 = (tupla_com_as_temperaturas[0] * (9/5)) + 32, 'F'\n soma_temp = round(temperatura1[0] + temperatura2,4) , temperatura1[1]\n else:\n soma_temp = round(temperatura1 + temperatura2, 4), tupla_com_as_temperaturas[1] \n return soma_temp\n \ndef main():\n #entrada de dados\n print(f'{\"=\"*15}Soma duas temperaturas{\"=\"*15}')\n temperatura01 = float(input('Digite a primeira temperatura: '))\n escala01 = input('Agora digite a escala (\"C\" ou \"F\": ): ').strip().upper()[0]\n temperatura02 = float(input('Digite a segunda temperatura: '))\n escala02 = input('Agora digite a escala (\"C\" ou \"F\": ): ').strip().upper()[0]\n temperaturas = (temperatura01, escala01, temperatura02, escala02)\n\n #processamento\n soma_temp = soma_temperatura(temperaturas)\n\n #saída\n print(f'A soma das temperatura informadas foi: {soma_temp[0]} {soma_temp[1]}º.')\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.4163491427898407,
"alphanum_fraction": 0.4192933738231659,
"avg_line_length": 35.5379753112793,
"blob_id": "db4b4ca2039047e31a1d63498c8f4499b35f4cd4",
"content_id": "2f27c9d61aa7269c50aaf24fb5d3d7c57ad31673",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5812,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 158,
"path": "/sem11-desafio03_servico-de-escolha-de-nome-para-pets.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "from random import *\n\ndef nome_do_pet():\n nomes_femininos = ['Malu', 'Mika', 'Tantam', 'Bibinha', 'Soph', 'Hannah' ]\n nomes_masculinos = ['Tantam', 'Nagro', 'Mathaus', 'Pimpim', 'Bolinha', 'Brutos' ]\n \n executa = True\n while executa == True:\n print('''\nmenu\n s = sortear o nome\n a = adicionar nome\n d = remover nome\n p = imprimir nomes\n q = sair\n ''')\n # \n menuChoice = input(\"\\n>_\").lower()\n #'s' para um sortear um nome\n if menuChoice == 's':\n escolhe_nome = True\n while escolhe_nome == True:\n print('''\nSexo do animal de estimação?\n f = feminino\n m = masculino''')\n sexo = input(\"\\n>_\").lower()\n if sexo == 'm':\n print(f'Aqui está uma sugestão para um pet do sexo masculino:' )\n print(f'Você deve chamar seu animal de estimação de {choice(nomes_masculinos)}.')\n print(\"De nada!\")\n escolhe_nome = False\n \n elif sexo == 'f':\n print(f'Aqui está uma sugestão para um pet do sexo femino:' )\n print(f'Você deve chamar seu animal de estimação de {choice(nomes_femininos)}.')\n print(\"De nada!\")\n escolhe_nome = False\n else:\n print('Você não informou o sexo do pet corretamente. Digite f para feminino ou m para masculino.')\n\n\n #'a' para adicionar nome\n elif menuChoice == 'a':\n escolhe_nome = True\n while escolhe_nome == True:\n print('''\nSexo do animal de estimação?\n f = feminino\n m = masculino''')\n sexo = input(\"\\n>_\").lower()\n nameToAdd = input('Adicione o nome: ').strip()\n ###\n tam_p = len(nameToAdd)\n itemToAdd = nameToAdd[0].upper()\n iii = 1\n while iii < tam_p:\n itemToAdd += nameToAdd[iii].lower()\n iii+=1\n itemToAdd = itemToAdd.strip()\n ###\n if sexo == 'm':\n \n #só adiciona um item se ele não estiver na lista\n tam = len(nomes_masculinos)\n i = 0\n tem = False\n while i < tam:\n if itemToAdd.lower() == nomes_masculinos[i].lower(): \n tem = True\n i += 1\n \n if tem != True:\n nomes_masculinos.append(itemToAdd) \n else:\n print(\"O nome já está na lista!\")\n escolhe_nome = False\n \n elif sexo == 'f':\n \n #só adiciona um item se ele não estiver na lista\n tam = len(nomes_femininos)\n i = 0\n tem = False\n while i < tam:\n if itemToAdd.lower() == nomes_femininos[i].lower(): \n tem = True\n i += 1\n \n if tem != True:\n nomes_femininos.append(itemToAdd) \n else:\n print(\"O nome já está na lista!\")\n escolhe_nome = False\n else:\n print('Você não informou o sexo do pet corretamente. Digite f para feminino ou m para masculino.')\n\n #'d' para remover um hobby\n elif menuChoice == 'd':\n\n nameToDelete = input(\"Inserir o nome a ser removido: \")\n\n #primeira = itemToDelete[0].lower()\n tam_p = len(nameToDelete)\n itemToDelete = nameToDelete[0].upper()\n iii = 1\n while iii < tam_p:\n itemToDelete += nameToDelete[iii].lower()\n iii+=1\n itemToDelete = itemToDelete.strip() \n #só remove um item se ele estiver na lista\n ###\n tem = False\n tam_f = len(nomes_femininos)\n i = 0\n while i < tam_f:\n if itemToDelete.lower() == nomes_femininos[i].lower(): \n nomes_femininos.remove(itemToDelete)\n tem = True\n break\n i += 1\n \n tam_m = len(nomes_masculinos)\n ii = 0\n while ii < tam_m:\n if itemToDelete.lower() == nomes_masculinos[ii].lower(): \n nomes_masculinos.remove(itemToDelete)\n tem = True\n break\n ii += 1\n if tem == False:\n print(\"O nome não está na lista!\")\n ###\n '''\n if itemToDelete in hobbies:\n hobbies.remove(itemToDelete)\n else:\n print(\"O nome não está na lista!\")\n '''\n #'p' para imprimir a lista de hobbies\n elif menuChoice == 'p':\n print(f'Nomes para pets machos:\\n{nomes_masculinos}.\\n')\n print(f'Nomes para pets fêmeas:\\n{nomes_femininos}.\\n')\n #'q' para sair\n elif menuChoice == 'q':\n\n executa = False\n else:\n print('Escolha uma opção válida!')\n \ndef main():\n print(\"Serviço de escolha de nome para animais de estimação\")\n print(\"-\"*53)\n\n nome_do_pet() \n \nif __name__ == '__main__':\n main()\n\n"
},
{
"alpha_fraction": 0.523052453994751,
"alphanum_fraction": 0.5779014229774475,
"avg_line_length": 14.924050331115723,
"blob_id": "9f8fd37f4fecc7a908c9f49b36e4cf96a4e7c615",
"content_id": "59c998da5b64b12d33b0fb0d4f209c5481688b0c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1267,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 79,
"path": "/sem15-q3-dicionario-quantas_vezes_cada_face_de_um_dado_caiu.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n03.Suponha que vamos jogar um dado e queremos saber quantas vezes cada face (de 1 a 6) caiu. Faça um programa que \nleia o resultado de cada jogada do dado e armazena em um dicionário. A face do dado é a chave para o dicionário e \na leitura de um valor 0 (zero) na face encerra o jogo. Mostre quantas vezes o dado foi lançado e quantas vezes cada \nface saiu.\n'''\n\ndef main():\n faces_dado = {1: 0,\n 2: 0,\n 3: 0,\n 4: 0,\n 5: 0,\n 6: 0\n }\n\n #entrada\n jogadas_feitas = 0\n while True:\n jogada = int(input('Joque o dado e informe qual o número da face sorteada:\\n(press 0 to stop)\\n'))\n if jogada == 0: break\n faces_dado[jogada] = faces_dado[jogada] + 1\n jogadas_feitas += 1\n\n #saída\n print(f'O dado foi lançado {jogadas_feitas} vezes.')\n for item in faces_dado:\n print(f'A face {item} saiu {faces_dado[item]} vezes.')\n \n \n \nif __name__ == '__main__':\n main()\n\n'''\nO dado foi lançado 32 vezes.\nA face 1 saiu 4 vezes.\nA face 2 saiu 7 vezes.\nA face 3 saiu 5 vezes.\nA face 4 saiu 9 vezes.\nA face 5 saiu 6 vezes.\nA face 6 saiu 1 vezes.\n'''\n\n'''\n1\n3\n3\n2\n5\n4\n4\n4\n1\n5\n2\n2\n5\n4\n5\n2\n5\n4\n2\n4\n5\n4\n4\n4\n2\n2\n3\n1\n6\n1\n3\n3\n0\n'''\n"
},
{
"alpha_fraction": 0.6650809049606323,
"alphanum_fraction": 0.6812559366226196,
"avg_line_length": 37.925926208496094,
"blob_id": "bca74b83211ce074f94c41f85ed982eaf5645058",
"content_id": "8c127c6fbe7fb498969b4519025c6fd77736be4b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1062,
"license_type": "no_license",
"max_line_length": 127,
"num_lines": 27,
"path": "/sem09-q1-tartarugaEALebre.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n01. A tartaruga e a lebre vão apostar uma corrida. A lebre concede à tartaruga o direito de sair n sua frente. \nA tartaruga corre a 1 metro por minuto e a lebre corre a 10 metros por minuto. Faça um programa que \nleia quantos metros a tartaruga sai à frente da lebre e calcule quantos minutos levará até que a lebre alcance \na tartaruga. Por exemplo, se a tartaruga sair 500 metros à frente a lebre alcança em 56 minutos.\n'''\n\n\ndef lebre_passaCoelhinho(vantagem):\n dist_coelho = vantagem\n dist_lebre = 0\n mnts = 0\n while dist_lebre < dist_coelho:#<=\n dist_coelho += 1\n dist_lebre += 10\n mnts += 1\n return mnts, dist_coelho, dist_lebre\n \ndef main():\n dist_coelho = float(input('Quantos metros a tartaruga saiu na frente? '))\n #dist_coelho = float(input())\n mnts, dist_coelho2, dist_lebre = lebre_passaCoelhinho(dist_coelho)\n \n print(f'Com essa vantagem, a lebre vai precisar de {mnts} minuto(s), após o início da corrida, para alcançar a tartaruga.')\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6038894653320312,
"alphanum_fraction": 0.6330603957176208,
"avg_line_length": 41.434783935546875,
"blob_id": "879459de732329ecdf1fa0cde812d86bda1de86d",
"content_id": "71e23c20f88f7ee3ffd056aec19cdf81263d482d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1977,
"license_type": "no_license",
"max_line_length": 269,
"num_lines": 46,
"path": "/sem12-q5-tuplas-lendoArquivosCSV-retornando-cidadesMaisPopulosasQueEqueFazemAniversarioEm.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n4. Leia uma população e informe as cidades com população maior que o valor lido. Veja o exemplo:\nVeja o exemplo para a leitura de 50000 para a população:\nCIDADES COM MAIS DE 50000 HABITANTES:\nIBGE: 120040 - Rio Branco(AC) - POPULAÇÃO: 290639\nIBGE: 270030 - Arapiraca(AL) - POPULAÇÃO: 202398\nIBGE: 270040 - Atalaia(AL) - POPULAÇÃO: 50323\n'''\n\ndef cidades_mais_populosa(habitantes, mes_niver):\n resultado = []\n with open('cidades.csv', 'r', encoding='utf-8') as arquivo:\n for linha in arquivo:\n uf, ibge, nome, dia, mes, pop = linha.split(';')\n if int(pop) > habitantes and int(mes) == mes_niver: \n resultado.append(\n (uf, int(ibge), nome, int(dia), int(mes), int(pop))\n #(uf, int(ibge), nome, int(pop))\n )\n arquivo.close()\n return resultado\n\ndef mes_literal(mes):\n lista_mes = [ 'JANEIRO', 'FEVEREIRO', 'MARÇO', 'ABRIL', 'MAIO', 'JUNHO', 'JULHO', 'AGOSTO', 'SETEMBRO', 'OUTUBRO', 'NOVEMBRO', 'DEZEMBRO']\n return lista_mes[mes-1]\n \ndef main():\n print(\"Cidades que fazem aniversário no mês informado a seguir e com mais hapitantes que o informado a seguir: \")\n #entrada de dados\n mes = int(input('Mês de aniversário: '))\n populacao = int(input('População mínima: '))\n \n #processamento\n cidades_com_populacao_maior_mesAniver = cidades_mais_populosa(populacao, mes)\n\n #saída\n print(f'CIDADES COM MAIS DE {populacao} HABITANTES E ANIVERSÁRIO EM {mes_literal(mes)}:')\n cont = len(cidades_com_populacao_maior_mesAniver)\n i = 0\n while i < cont:\n print(f'{cidades_com_populacao_maior_mesAniver[i][2]}({cidades_com_populacao_maior_mesAniver[i][0]}) tem {cidades_com_populacao_maior_mesAniver[i][5]} habitantes e faz aniversário em {cidades_com_populacao_maior_mesAniver[i][3]} de {mes_literal(mes).lower()}.')\n i += 1\n #print(cidades[:555])\n \nif __name__ == '__main__':\n main()\n\n\n"
},
{
"alpha_fraction": 0.48507070541381836,
"alphanum_fraction": 0.5353588461875916,
"avg_line_length": 25.51388931274414,
"blob_id": "c7e3e08ed7b77e9d6f53c0eb92bc87fd1d937a5e",
"content_id": "0fe9934c0a2ea6581caca1cb8bc653ff072299e1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1925,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 72,
"path": "/sem14-dicionario-codeclube_desafio04_protecao_de_senha.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "distances = {\n 1: (\"mercúrio\", 91700000),\n 2: (\"vênus\", 41400000),\n 3: (\"marte\", 78300000),\n 4: (\"júpiter\", 628000000),\n 5: (\"saturno\", 1280400000),\n 6: (\"urano\", 2720400000),\n 7: (\"netuno\", 4350400000)\n }\n\n\ndef displayMenu():\n print(f\"{'='*13}Planetas distantes{'='*13}\")\n print(\"=\" * 44)\n print(\"Menu:\")\n print('Escolha qual planeta você deseja saber a distância até nós:')\n print(\" 1 = Mercúrio\")\n print(\" 2 = Vênus\")\n print(\" 3 = Marte\")\n print(\" 4 = Júpiter\")\n print(\" 5 = Saturno\")\n print(\" 6 = urano\")\n print(\" 7 = Netuno\")\n print(\" q = sair\")\n\ndef planetas_distantes():\n running = True\n\n displayMenu()\n\n #repete até que o usuário digite 'q' para sair\n while running == True:\n\n menuChoice = input(\">_\").lower()\n \n if menuChoice in '1234567':\n print(f'{distances[int(menuChoice)][0]} tá a {distances[int(menuChoice)][1]} km da Terra.')\n \n #q para sair\n elif menuChoice == 'q':\n running = False\n\n else:\n print(\"Escolha inválida!\")\n\n\npasswordDictionary = {\n \"programador\" : \"acesso\" \n }\n\ndef main():\n \n print(\"Programa super secreto\")\n print(\"====================\")\n\n loginAttempts = 0\n while loginAttempts < 3:\n print('Login:')\n name = input(\"Nome : \").lower()\n password = input(\"Senha : \").lower()\n\n if name in passwordDictionary and passwordDictionary[name] == password:\n print(\"\\nBEM-VINDO sr.\", name.upper(),'!!')\n print('Acesso liberado ao programa \"Planetas Distantes\"!!\\n')\n planetas_distantes()\n\n else:\n loginAttempts += 1\n print(f\"Acesso negado :(\\n({loginAttempts}/3 tentativas).\\n\")\n print('Acesso negado. Parece que você não possui cadastro no sistema.') \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.538922131061554,
"alphanum_fraction": 0.56886225938797,
"avg_line_length": 27.79310417175293,
"blob_id": "9ac97891fe3bff535e5648e28a582a78a51247a9",
"content_id": "ddd885ee7c4d65d430fef5fc1f77155b455c9b82",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 868,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 29,
"path": "/sem07-2-q2t2-cancaoDosProgramadores.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n02. Modifique a canção dos programadores do exercício anterior para incluir o refrão: Tecle “Ctrl+F5”. \n Termine a canção com “Vamos fazer mais um café!”.\n 99 bugs no software, pegue um deles e conserte...\n Tecle “Ctrl+F5”\n 100 bugs no software, pegue um deles e conserte...\n Tecle “Ctrl+F5”\n 101 bugs no software, pegue um deles e conserte...\n Tecle “Ctrl+F5”\n ...\n 250 bugs no software, pegue um deles e conserte...\n Tecle “Ctrl+F5”\n Vamos fazer mais um café!\n'''\n\ndef cancao():\n i = 99\n for cont in range(152): \n print(f'{i} bugs no software, pegue um deles e conserte...')\n print('Tecle \"Ctrl+F5\"')\n i += 1\n \n print('Vamos fazer mais um café!')\n\ndef main():\n cancao()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.7710280418395996,
"alphanum_fraction": 0.7757009267807007,
"avg_line_length": 70.33333587646484,
"blob_id": "1ce1eb7ee28156489e8d451eba689326e6127c1b",
"content_id": "701e8b7b0dc5cc36595c16b1835039ba606d0ee9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 439,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 6,
"path": "/sem02-q7-harasFerraduras.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#variável \"qtd_cavalos\", do tipo int, recebe um valor a partir da leitura do teclado\nqtd_cavalos = int(input(\"Quantidade de cavalos no haras: \"))\n#variável \"ferraduras\" recebe o valor da multiplicação do valores guardado na variável \"qtd_cavalos\" por 4.\nferraduras = qtd_cavalos * 4\n#impressão do valor das \"ferradura\"s com uma formatação para melhor compreensão\nprint(f'São necessários {ferraduras} ferraduras para o haras.')\n"
},
{
"alpha_fraction": 0.7123287916183472,
"alphanum_fraction": 0.7397260069847107,
"avg_line_length": 47.66666793823242,
"blob_id": "a74f6b496eb26b4d364da1155a7be88319cb2295",
"content_id": "f21f6bc5980b433b3c48db0951a24cf5247f3d6a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 597,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 12,
"path": "/sem03-q6-funcaoTrocaValoresEntreVariaveis.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#início da função nomeada \"troca\"\ndef trocar(x1, x2):\n #retorno da função com os valores trocados\n return x2, x1\n#variável n1, do tipo int, recebe um valor a partir da leitura do teclado\nn1 = int(input('Primeiro número: '))\n#variável n2, do tipo int, recebe um valor a partir da leitura do teclado\nn2 = int(input('Segundo número: '))\n#chamando a função \"troca\" passando n1 e n2 como argumentos e alocando os valores nos mesmos\nn1, n2 = trocar(n1, n2)\n#imprimir na tela a mensagem concatenada com os valores inseridos conforme a formatação\nprint(f'Primeiro {n1}; Segundo {n2}.')\n"
},
{
"alpha_fraction": 0.4784946143627167,
"alphanum_fraction": 0.5107526779174805,
"avg_line_length": 20.882352828979492,
"blob_id": "5899de7c79d1fcccdccb5f688cb9f6ae73273c47",
"content_id": "33d2ad6ad36035e0c238ff8ba970b8441a0ee247",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 378,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 17,
"path": "/sem09-2-q3-sequenciaAteNtermos.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n03. Sendo H = 1 + 1/2 + 1/3 + ... + 1/n, escreva um programa para calcular o valor de H. O número n é lido.\n'''\n\ndef valorH(n):\n h = 0\n while n > 0:\n h += 1/n\n n -= 1\n return h\n\ndef main():\n n = int(input('Digite qual será o n-ésimo termo: '))\n print(f'O valor da soma da sequência é {valorH(n)}.')\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5925449728965759,
"alphanum_fraction": 0.6266067028045654,
"avg_line_length": 36.95121765136719,
"blob_id": "5765f649c8ee7d52dd39d47e6654236cfdc90e8b",
"content_id": "b50ce7922e570562b6ea315366f3dc73ea97050f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1577,
"license_type": "no_license",
"max_line_length": 183,
"num_lines": 41,
"path": "/sem12-q4-tuplas-lendoArquivosCSV-retornando-cidadesMaisPopulosasQue.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n4. Leia uma população e informe as cidades com população maior que o valor lido. Veja o exemplo:\nVeja o exemplo para a leitura de 50000 para a população:\nCIDADES COM MAIS DE 50000 HABITANTES:\nIBGE: 120040 - Rio Branco(AC) - POPULAÇÃO: 290639\nIBGE: 270030 - Arapiraca(AL) - POPULAÇÃO: 202398\nIBGE: 270040 - Atalaia(AL) - POPULAÇÃO: 50323\n'''\n\n#essa função lê um arquivo CSV que contêm dados sobre cidades BR e retorna as cidades mais populosas que a informada na entrada\ndef cidades_mais_populosa(habitantes):\n resultado = []\n with open('cidades.csv', 'r', encoding='utf-8') as arquivo:\n for linha in arquivo:\n uf, ibge, nome, dia, mes, pop = linha.split(';')\n if int(pop) > habitantes: \n resultado.append(\n #(uf, int(ibge), nome, int(dia), int(mes), int(pop))\n (uf, int(ibge), nome, int(pop))\n )\n arquivo.close()\n return resultado\n \ndef main():\n print('Cidades com população maior que o seguinte valor:')\n #entrada de dados\n populacao = int(input())\n \n #processamento\n cidades_com_populacao_maior = cidades_mais_populosa(populacao)\n\n #saída\n print(f'CIDADES COM MAIS DE {populacao} HABITANTES:')\n cont = len(cidades_com_populacao_maior)\n i = 0\n while i < cont:\n print(f'IBGE: {cidades_com_populacao_maior[i][1]} - {cidades_com_populacao_maior[i][2]}({cidades_com_populacao_maior[i][0]}) - POPULAÇÃO: {cidades_com_populacao_maior[i][3]}')\n i += 1\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5426356792449951,
"alphanum_fraction": 0.6124030947685242,
"avg_line_length": 24.799999237060547,
"blob_id": "7df723e5c689f75e4e533e1b557107416b79c2b5",
"content_id": "cb3271e185f2835cd3badc90c9c48be1a18f8edc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 258,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 10,
"path": "/sem01-q1-medidasCirculo.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "raio = float(input())\npi = 3.141592\ncircunferencia = 2 * pi * raio\na_circulo = pi * raio * 2\na_esfera = 4 * pi * raio ** 2\nvol_esfera = 4/3 * pi * raio ** 3\nprint(\"%.6f\"%circunferencia)\nprint(\"%.6f\"%a_circulo)\nprint(\"%.6f\"%a_esfera)\nprint(\"%.6f\"%vol_esfera)\n"
},
{
"alpha_fraction": 0.7363083362579346,
"alphanum_fraction": 0.7444218993186951,
"avg_line_length": 48.29999923706055,
"blob_id": "df288ee6936e6969eaf5453fb437648db0652c92",
"content_id": "fbb00e4c171d9a8f75ced4e38050eb0d17f1643a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 506,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 10,
"path": "/sem01-q5-converteMinutosEmHorasEMinutos.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#variável \"minutos\", do tipo int, recebe um valor a partir da leitura do teclado\nminutos = int(input(\"Digite uma quantidade de minutos: \"))\n#variável \"h\" recebe o valor INTEIRO da divisão conforme a fórmula\nh = minutos // 60\nprint(h)\n#variável \"m\" recebe o valor do RESTO da divisão conforme a fórmula\nm = minutos % 60\nprint(m)\n#impresão com formatação, f'', e valores contatenados na string conforme indicação das chaves\nprint(f'{minutos} minuto(s) é equivalente a {h} horas e {m} minutos.')\n"
},
{
"alpha_fraction": 0.5171192288398743,
"alphanum_fraction": 0.5371900796890259,
"avg_line_length": 22.52777862548828,
"blob_id": "396854ddc30a3ea593b067e76a7813f7e8cda80e",
"content_id": "babd47fc4044455afc5b9978c46b57bd62e325d4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 856,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 36,
"path": "/sem11-q1-lendo10numsERetornaSomaEMult.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#1. Leia uma lista de 10 (dez) números inteiros, mostre os números, sua soma e a multiplicação.\n\ndef soma(lista):\n cont = len(lista) - 1\n soma = 0\n i = 0\n while i <= cont:\n soma += lista[i]\n i += 1\n return soma\n \ndef multiplica(lista):\n cont = len(lista) - 1\n mult = 1\n i = 0\n while i <= cont:\n mult *= lista[i]\n i += 1\n return mult\n \ndef main():\n lista_d_nums = []\n i = 0\n\n print('Preencha a lista com 10 números: ')\n while i <= 9:\n num = int(input(f'{i+1}º termo: '))\n lista_d_nums.append(num)\n i += 1\n\n print(f'Lista com os números inseridos: {lista_d_nums}') \n print(f'Soma dos termos da lista: {soma(lista_d_nums)}')\n print(f'Multiplicação dos termos da lista: {multiplica(lista_d_nums)}')\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.731663703918457,
"alphanum_fraction": 0.742397129535675,
"avg_line_length": 61.11111068725586,
"blob_id": "cb11d824a0965022c982b875d9ec464210887136",
"content_id": "286699a56a88096c0db36cf410a2d9064c3c5344",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 567,
"license_type": "no_license",
"max_line_length": 130,
"num_lines": 9,
"path": "/sem02-q4-valorAluguelCarro.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#variável \"dias\", do tipo int, recebe um valor a partir da leitura do teclado.\ndias = int(input(\"Quantidade de dias alugados: \"))\n#variável \"km\", do tipo float, recebe um valor a partir da leitura do teclado.\nkm = float(input(\"Quantos km foram percorridos: \"))\n#variável \"total\" recebe o valor do aluguel conforme a fórmula que calcula a taxa sobre a quantidade de dias e quilômetros rodados\ntotal = (dias * 60) + (km * 0.15)\nprint(total)\n#impressão do valor final formatado para melhor compreenssão\nprint(f'O total a pagar pelo aluguel é de R$ {total:.2f}')\n"
},
{
"alpha_fraction": 0.3302282392978668,
"alphanum_fraction": 0.3935724198818207,
"avg_line_length": 16.314516067504883,
"blob_id": "d2dce716edbf565fe0c89923216595451d575547",
"content_id": "ad6a6480dccc1001c2470c6b1ceb0385450a2367",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4296,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 248,
"path": "/desafio_desenhando-formas-aleatórias.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "from turtle import *\n\ndef figuras_planas():\n \n shape(\"turtle\")\n speed(6)#\n import random\n pensize(1)\n i=0\n \n while i <= 250:\n #a = random.choice([\"Blue\", \"Red\", \"Yellow\", \"Green\", \"Purple\", \"Brown\", \"Pink\", \"Orange\"])\n #color(a)\n #f = random.randrange(1, 360)\n f = random.randrange(1, 360)\n g = random.randrange(1, 40)\n d = random.randrange(1, 2)\n e = random.randrange(1, 3)\n t = random.randrange(1, 50)\n pendown()\n if d == 1:\n right(f)\n if d == 2:\n left(f)\n if e == 1:\n penup()\n \n forward(t)\n i+= 1\n while i <= 250:\n #a = random.choice([\"Blue\", \"Red\", \"Yellow\", \"Green\", \"Purple\", \"Brown\", \"Pink\", \"Orange\"])\n #color(a)\n #f = random.randrange(1, 360)\n f = random.randrange(1, 360)\n g = random.randrange(1, 40)\n d = random.randrange(1, 2)\n e = random.randrange(1, 3)\n t = random.randrange(1, 50)\n pendown()\n if d == 1:\n right(f)\n if d == 2:\n left(f)\n if e == 1:\n penup()\n \n forward(t)\n i+= 1\n\n while i <= 250:\n #a = random.choice([\"Blue\", \"Red\", \"Yellow\", \"Green\", \"Purple\", \"Brown\", \"Pink\", \"Orange\"])\n #color(a)\n #f = random.randrange(1, 360)\n f = random.randrange(1, 360)\n g = random.randrange(1, 40)\n d = random.randrange(1, 2)\n e = random.randrange(1, 3)\n t = random.randrange(1, 50)\n pendown()\n if d == 1:\n right(f)\n if d == 2:\n left(f)\n if e == 1:\n penup()\n \n forward(t)\n i+= 1\n \n #levando para o topo esquerdo\n penup()\n backward(250)\n left(90)\n forward(150)\n \n #triangula\n pendown()\n right(90)\n color(\"Blue\")\n pensize(10)\n a = 0\n tamtr = 117\n while a <= 35:\n ii = 0 \n while ii <= 2:\n if ii == 0:\n forward(tamtr)\n left(120)\n ii += 1\n elif ii == 1: \n tamtr -= 1\n forward(tamtr)\n left(120) \n ii += 1\n else:\n tamtr -= 1\n forward(tamtr)\n left(120)\n tamtr -= 1\n ii += 1\n \n speed(8) \n a += 1\n \n \n right(90)\n forward(a-3)\n left(90)\n\n \n\n\n \n \n ''' \n \n \n \n p += 3\n #tamtr -= 1\n speed(1)\n left(90)\n forward(2)\n right(90)\n forward(2)\n i+=1\n '''\n \n \n\n #quadrado\n penup()\n #left(120)\n forward(150)\n pendown()\n i = 0\n while i <= 3: \n forward(100)\n left(90)\n i+=1\n \n\n #pentágono\n penup()\n forward(150)\n pendown()\n i = 0\n while i <= 4:\n forward(65)\n left(72)\n i+=1\n \n \n #left(72)\n penup()\n backward(300)\n right(90)\n forward(200)\n \n \n \n \n \n \n \n\n \n '''\n color(\"Red\")\n pensize(8)\n right(90)\n forward(100)\n left(90)\n forward(50)\n\n color(\"Orange\")\n pensize(3)\n penup()\n forward(50)\n pendown()\n forward(50)\n '''\n \n #done()#\ndef casa():\n #triangulo\n pendown()\n left(90)\n i = 0\n while i <= 2:\n forward(117)\n left(120)\n i+=1\n \n #telhado\n forward(415)\n left(120)\n forward(117)\n left(60)\n forward(298)\n left(60)\n forward(117)\n\n #paredes\n left(120)\n right(90)\n forward(117)\n left(90)\n forward(117)\n left(90)\n forward(117)\n right(90)\n forward(298)\n right(90)\n forward(117)\n right(90)\n\n #porta\n forward(160)\n right(90)\n forward(80)\n right(90)\n forward(45)\n right(90)\n forward(80)\n right(90)\n forward(298-(160-45))\n\n #chão\n forward(415)\n backward(900)\n \n \n \n \n\n \n \n \n\ndef main():\n \n figuras_planas()\n casa()\n done()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5894378423690796,
"alphanum_fraction": 0.6252129673957825,
"avg_line_length": 26.952381134033203,
"blob_id": "b389807d3d7b730c23fd8785ed9e9ebd9348fda6",
"content_id": "a3bac6d5b46d0027348a0a0eb3c11a9da218b7bb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 592,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 21,
"path": "/sem07-2-q1t2-cancaoDosProgramados.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n01. Escreva um programa que gera a letra da canção muito popular entre os programadores:\n 99 bugs no software, pegue um deles e conserte...\n 100 bugs no software, pegue um deles e conserte...\n 101 bugs no software, pegue um deles e conserte...\n ...\nFaça o programa de forma a gerar a letra da música com o número de bugs no software variando de 99 a \n250.\n'''\n\ndef cancao():\n i = 99\n for cont in range(152): \n print(f'{i} bugs no software, pegue um deles e conserte...')\n i += 1\n\ndef main():\n cancao()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6164634227752686,
"alphanum_fraction": 0.6329268217086792,
"avg_line_length": 33.16666793823242,
"blob_id": "78af89d27124a0da365e2802c44b14cf56c5a032",
"content_id": "3bbf9ed5187ca5bf8af3250d06c3a848eb708671",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3288,
"license_type": "no_license",
"max_line_length": 184,
"num_lines": 96,
"path": "/sem11-2-q5-lista-associando-dados-em-3-listas.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n5. Foram anotados nomes, idades e alturas de 30 alunos. Faça um programa que determine quais\nalunos com mais de 13 anos possuem altura inferior à média de altura dos alunos. Considerar a \naltura arredondando para duas casas decimais.\n'''\n\ndef recebedados30():\n num = 30##\n nome = []\n idade = []\n altura = []\n cont = 0\n while cont < num:\n nome_paraInserir =input('Nome do(a) aluno(a): ').strip()\n idade_paraInserir = int(input(f'Idade do(a) {nome_paraInserir}: '))\n altura_paraInserir = float(input(f'Altura do(a) {nome_paraInserir}: '))\n nome.append(nome_paraInserir)\n idade.append(idade_paraInserir)\n altura.append(round(altura_paraInserir, 2)) \n cont += 1\n return nome, idade, altura\n\ndef media_aritimetrica(lista):\n tam = len(lista)\n i = 0\n total = 0\n while i < tam:\n total += lista[i]\n i += 1\n return round(total/tam,2)\n\ndef alunosMenores(media_alturas, nomes, idades, alturas):\n Nomes_abaixoDaMedia = []\n alturas_abaixoDaMedia = []\n idade_doAluno = []\n tam = len(nomes)\n i = 0\n total = 0\n while i < tam:\n if idades[i] > 13:\n if alturas[i] < media_alturas:\n Nomes_abaixoDaMedia.append(nomes[i])\n alturas_abaixoDaMedia.append(alturas[i])\n idade_doAluno.append(idades[i])\n i += 1\n return Nomes_abaixoDaMedia, alturas_abaixoDaMedia, idade_doAluno\n \ndef acima_da_media(media, nomes, alturas):\n Nomes_acimaDaMedia = []\n alturas_acimaDaMedia = []\n tam = len(alturas)\n i = 0\n total = 0\n while i < tam:\n if alturas[i] > media:\n Nomes_acimaDaMedia.append(nomes[i])\n alturas_acimaDaMedia.append(alturas[i])\n i += 1\n \n return Nomes_acimaDaMedia, alturas_acimaDaMedia\n\n \ndef main():\n nome_alunos, idade_alunos, altura_alunos = recebedados30()\n media_alturas = media_aritimetrica(altura_alunos)\n nome_maior_de13_inferiroAmediaAlt, altura_maior_de13_inferiroAmediaAlt, idade_maior_de13_inferiroAmediaAlt = alunosMenores(media_alturas, nome_alunos, idade_alunos, altura_alunos )\n '''\n print(f'lista dos nomes dos alunos: {nome_alunos}')\n print(f'lista das idades dos alunos: {idade_alunos}')\n print(f'lista das alturas dos alunos: {altura_alunos}')\n print(f'Média das alturas: {media_alturas}')\n print(f'lista dos nomes dos alunos abaixo da média: {nome_maior_de13_inferiroAmediaAlt}')\n print(f'lista das idades dos alunos abaixo da média: {idade_maior_de13_inferiroAmediaAlt}')\n print(f'lista das alturas dos alunos abaixo da média: {altura_maior_de13_inferiroAmediaAlt}')\n \n '''\n print('MAIORES DE 13 ANOS COM ALTURA ABAIXO DA MÉDIA')\n #print(f'{nome_alunos}')\n #print(f'{idade_alunos}')\n #print(altura_alunos)\n #print(media_alturas)\n tam = len(nome_maior_de13_inferiroAmediaAlt)\n i = 0\n total = 0\n while i < tam:\n print(nome_maior_de13_inferiroAmediaAlt[i])\n #print(f'{atletas_acima_da_mediaAlturas[i]:.2f}')\n i += 1\n \n #print(nome_maior_de13_inferiroAmediaAlt)\n #print(altura_maior_de13_inferiroAmediaAlt)\n #print(idade_maior_de13_inferiroAmediaAlt)\n \n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6477382183074951,
"alphanum_fraction": 0.6794995069503784,
"avg_line_length": 46.227272033691406,
"blob_id": "afa1dc682beeb54f5315ade8bc06759af6009cd7",
"content_id": "2346e2433218adf8bced839e10dc4067a62b79ab",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2101,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 44,
"path": "/sem12-q1-tuplas-comparaDuasTemperatuasERetornaAMaior.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n01. Considereuma tupla que guarde temperaturas em Celsius (C) ou Fahrenheit (F) como um valor em duas partes: \ntemperatura e escala. Por exemplo: 32,5 graus Celsius é representado como (32.5, ‘C’) e 45,2 graus Fahrenheit é \nrepresentado como (45.2, ‘F’). Crie uma função que recebe duas temperaturas e retorna a mais alta. Caso as \ntemperaturas sejam de escalas diferentes, a função deve fazer a conversão antes de compará-las. Faça a leitura de \nduas temperaturas, na forma temperatura e escala (t, e) separadamente, e mostre qual é a maior. Considere até 4 \n(quatro) casas decimais).\nUse upper() e colchetes\n'''\n\n#essa função compara duas temperaturas e retorna a maior delas\ndef maior_temperatura(tupla_com_as_temperaturas):\n temperatura1 = tupla_com_as_temperaturas[0]\n temperatura2 = tupla_com_as_temperaturas[2]\n maior = 0\n if tupla_com_as_temperaturas[1] != tupla_com_as_temperaturas[3]:\n if tupla_com_as_temperaturas[1] == 'C':\n #convertendo a tupla_com_as_temperaturas[3] para celsius\n temperatura2 = (tupla_com_as_temperaturas[2] - 32) * (5/9)\n else:\n #convertendo a tupla_com_as_temperaturas[3] para fahrenheit\n temperatura2 = (tupla_com_as_temperaturas[2] * (9/5)) + 32\n if temperatura1 < temperatura2:\n maior = 2 \n return round(tupla_com_as_temperaturas[maior], 4), tupla_com_as_temperaturas[maior + 1]\n \ndef main():\n #entrada de dados\n print(f'{\"=\"*15}Comparando duas temperaturas{\"=\"*15}')\n temperatura01 = float(input('Digite a primeira temperatura: '))\n escala01 = input('Agora digite a escala (\"C\" ou \"F\": ): ').strip().upper()[0]\n temperatura02 = float(input('Digite a segunda temperatura: '))\n escala02 = input('Agora digite a escala (\"C\" ou \"F\": ): ').strip().upper()[0]\n temperaturas = (temperatura01, escala01, temperatura02, escala02)\n\n #processamento\n maior_temp = maior_temperatura(temperaturas)\n\n #saída\n print(f'A maior temperatura informada foi: {maior_temp[0]} {maior_temp[1]}º.')\n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.33892616629600525,
"alphanum_fraction": 0.3590604066848755,
"avg_line_length": 18.19354820251465,
"blob_id": "3eda0978dbac16bef8d2d6df2975cd6989de1a81",
"content_id": "1ceca01d7e15c91fae138bb3a7ccda60f17a404f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 598,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 31,
"path": "/sem08-2-q5-conceitoNotaFlag.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "def conceito(a):\n if a >= 8.5:\n print('A')\n elif 7 <= a < 8.5:\n print('B')\n elif 5 <= a < 7:\n print('C')\n elif 4 <= a < 5:\n print('D')\n else:\n print('E')\n \n \n \ndef main():\n while True:\n nota = input('Nota do aluno: ').strip()\n try:\n nota = float(nota)\n if 0 <= nota <= 10:\n conceito(nota)\n break\n else:\n print('Nota inválida.')\n except:\n print('Nota inválida.')\n\n \n\nif __name__ == '__main__':\n main()\n\n"
},
{
"alpha_fraction": 0.7651083469390869,
"alphanum_fraction": 0.7662485837936401,
"avg_line_length": 53.8125,
"blob_id": "3a92ff45906d8abbac0cef88c01566e578ea60d6",
"content_id": "8c633a1eb611726d38e5e5ded30178474c9b3853",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 896,
"license_type": "no_license",
"max_line_length": 149,
"num_lines": 16,
"path": "/sem03-q3-funcaoAreaEPerimetroDoQuadr.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#inicio de uma função denominada \"area_quadrado\", com um argumento\ndef area_quadrado(lado):\n #retorno da função, multiplicação do argumento por ele mesmo.\n return lado * lado\n\n#inicio de uma função denominada \"perimetro_quadrado\", com um argumento\ndef perimetro_quadrado(lado):\n #retorno da função\n return lado * 4\n\n#variável \"valor_lado\", do tipo float, recebe um valor a partir da leitura do teclado\nvalor_lado = float(input('Lado do quadrado: '))\n#imprimir na tela mensagem concatenada com o retorno da função \"area_quadrado\" com o valor da variável |\"valor_lado\" sendo passado como argumento\nprint('Área do quadrado:', area_quadrado(valor_lado))\n#imprimir na tela mensagem concatenada com o retorno da função \"perimetro_quadrado\" com o valor da variável \"valor_lado\" sendo passado como argumento\nprint('Perímetro do quadrado: ', perimetro_quadrado(valor_lado))\n"
},
{
"alpha_fraction": 0.5937007665634155,
"alphanum_fraction": 0.6015747785568237,
"avg_line_length": 25.375,
"blob_id": "b71102d0fc14b80933a2f67947393dab5b3e366d",
"content_id": "a95e5c74d31ce8e40c08aa724366bf130d789742",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 639,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 24,
"path": "/sem04-2-q2-retornaBooleanoSeLetra.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#Escreva um programa que leia um caractere e mostra o valor booleano True (verdadeiro) se for uma LETRA (vogal \n#ou consoante) ou o valor booleano False (falso) caso contrário\n\ndef retorna_unicode(l): \n return ord(l)\n\ndef eh_letra(n, b):\n if letra(b) == True:\n print(f'O caractere {n}, é uma letra.')\n else:\n print(f'O caractere {n}, não é uma letra.')\n \n\ndef letra(l):\n return l >= 97 and l <= 122\n\ndef main():\n s = input('Digite um, E SOMENTE UM, caractere qualquer: ').lower().strip()\n a = retorna_unicode(s)\n eh_letra(s, a)\n #print(letra(a))\n\nif __name__ == '__main__':\n main()\n\n\n"
},
{
"alpha_fraction": 0.602497398853302,
"alphanum_fraction": 0.6191467046737671,
"avg_line_length": 29.03125,
"blob_id": "3cf9127350235d26592b0ce0de157ccae9be91dd",
"content_id": "e9376afe8b1f54f61bec80bc75139b7756449e74",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 973,
"license_type": "no_license",
"max_line_length": 139,
"num_lines": 32,
"path": "/sem11-q4-listas.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n4. Leia 20 números inteiros e armazene-os numa lista. Separe os números pares na lista PAR e os números \nímpares na lista IMPAR. Imprima as três listas.\n'''\n\ndef recebe20():\n num = 20\n lista = []\n lista_par = []\n lista_impar = []\n cont = 0\n print('Preenchendo uma lista com 20 números: ')\n while cont < num:\n n_paraInserir =int(input('Digite um número: '))\n #n_paraInserir = formataFloatCasas(n_paraInserir,1)\n lista.append(n_paraInserir)\n if n_paraInserir % 2 == 0:\n lista_par.append(n_paraInserir)\n else:\n lista_impar.append(n_paraInserir)\n cont += 1\n return lista, lista_par, lista_impar\n\ndef main():\n #entrada e processamento\n numeros, par, impar = recebe20()\n \n #saída\n print(f'Lista dos números inseridos: {numeros}\\nLista dos número pares inseridos: {par}\\nLista dos números ímpares inseridos: {impar}')\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.40762123465538025,
"alphanum_fraction": 0.4214780628681183,
"avg_line_length": 18.449438095092773,
"blob_id": "efbcb268afbf8325c26b2174659f22be66852615",
"content_id": "ec6348723b60dcb5cb3ff8659bf99d0364a2fb36",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1742,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 89,
"path": "/sem05-2-q3-retornaMaiorEMenorDe5.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#03. Escreva um programa que leia 5 números inteiros e escreva o maior e o menor deles.\n#Considere que todos os valores são diferentes. NÃO use as funções embutidas min() e max().\n'''\ndef maior(a, b, c, d, e):\n m = 0\n if a > b:\n m = a\n else:\n m = b\n if c > m:\n m = c\n if d > m:\n m = d\n if e > m:\n m = e\n return m\n'''\ndef maior(a, b, c, d, e):\n m = a\n if b > m:\n m = b\n if c > m:\n m = c\n if d > m:\n m = d\n if e > m:\n m = e\n return m\n\ndef menor(a, b, c, d, e):\n m = a\n if b < m:\n m = b\n if c < m:\n m = c\n if d < m:\n m = d\n if e < m:\n m = e \n return m\n \ndef main():\n \n n1 = int(input('Digite um número: '))\n n2 = int(input('Digite um número: '))\n n3 = int(input('Digite um número: '))\n n4 = int(input('Digite um número: '))\n n5 = int(input('Digite um número: '))\n\n \n a = maior(n1, n2, n3, n4, n5)\n b = menor(n1, n2, n3, n4, n5)\n \n #print(f'Maior: {a}\\nMenor: {b}')\n #print(a, b)\n print(a)\n print(b)\n\nif __name__ == '__main__':\n main()\n\n\n\n'''\ndef maior_e_menor(a, b, c, d, e):\n \n if (a > b and b > c and c > d and d > e):\n return a\n elif (a < b and b > c and c > d and d > e): #2\n return b\n elif (a < b and b < c and c > d and d > e): #3\n return c\n elif (a < b and b < c and c < d and d > e):\n return d\n else:\n return e\n \ndef maior_e_menor2(a, b, c, d, e):\n if a > b > c > d > e:\n return a \n elif a < b > c > d > e: #2\n return b \n elif a < b < c > d > e: #3\n return c\n elif a < b < c < d > e:\n return d\n else:\n return e\n''' \n"
},
{
"alpha_fraction": 0.5027144551277161,
"alphanum_fraction": 0.5483170747756958,
"avg_line_length": 17.420000076293945,
"blob_id": "3a5aa56fef07b1c187f8904120438ce526eb53c8",
"content_id": "b02dd165d1798235752532acfdd5e386396344db",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 927,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 50,
"path": "/desafio-sem07-02_formatos-em-loop.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "from turtle import *\n\n#essa função leva a \"caneta\" pro topo esquerdo e define algumas propriedades da mesma\ndef posicionando():\n shape(\"turtle\")\n pensize(5)\n speed(11)\n #levando para o topo esquerdo\n penup()\n backward(250)\n left(90)\n forward(150)\n right(90)\n\n#essa função faz um espaçamento à direita \ndef espaco():\n penup()\n forward(200)\n \ndef pentagono():\n pendown()\n for cont in range(5): \n forward(80)\n left(72)\n \ndef hexagono():\n pendown()\n for cont in range(6): \n forward(70)\n left(60)\n \ndef circulo():\n pendown()\n for cont in range(360): \n forward(1)\n left(1)\n \ndef main():\n \n posicionando()#linha 4\n pentagono()#linha 20\n espaco()#linha 16\n hexagono()#linha 26\n espaco()#linha 16\n circulo()#linha 32\n \n done()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.48716604709625244,
"alphanum_fraction": 0.6684128046035767,
"avg_line_length": 40.9560432434082,
"blob_id": "16183c6e94322be57f5c2f2af66d598e67623c70",
"content_id": "90824d43dde3c035ca86e3ef63a9fbbfe2694ae6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3873,
"license_type": "no_license",
"max_line_length": 137,
"num_lines": 91,
"path": "/sem16-q1-arquivos-maior_preco_de_abertura_e_a_data_que_correu.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\nA imagem, abaixo, mostra um trecho de arquivo csv (Comma-separated values) com informações acerca de empresas \nque negociam ações na bolsa de valores de São Paulo:\n\ntimestamp,open,high,low,close,volume\n2020-10-09,13.6400,13.9300,13.5400,13.5700,24150400\n2020-10-08,13.6100,13.7800,13.4300,13.6800,42956600\n2020-10-07,13.4300,13.8600,13.4000,13.5300,36713000\n2020-10-06,12.9300,13.7000,12.8900,13.3400,55057200\n2020-10-05,12.5600,12.8400,12.5400,12.8100,17205900\n2020-10-02,12.6100,12.8300,12.5300,12.5300,18790500\n2020-10-01,12.5500,12.7800,12.4800,12.7000,20801000\n2020-09-30,12.4500,12.6800,12.3600,12.5400,23701700\n2020-09-29,12.4600,12.6900,12.2800,12.3500,25093301\n2020-09-28,12.9700,12.9700,12.4200,12.4800,37529699\n2020-09-25,12.6500,12.9100,12.5000,12.8400,26282600\n2020-09-24,12.5000,12.8300,12.3600,12.6700,30795400\n2020-09-23,12.6800,12.7600,12.4400,12.4400,27644500\n2020-09-22,12.7200,12.8800,12.5700,12.8000,13561500\n2020-09-21,12.7000,12.8000,12.5100,12.7300,18931400\n2020-09-18,13.1100,13.1700,12.7900,12.7900,46635300\n2020-09-17,12.5200,13.2600,12.4000,13.1600,58315300\n[...]\n\nfonte: https://www.alphavantage.co/\n\nAs colunas, separadas por vírgula, representam:\ntimestamp: data do pregão em formato americano (aaaa-mm-dd);\nopen: valor da ação no momento da abertura dos negócios na data;\nhigh: maior valor atingido pela ação durante o pregão;\nlow: menor valor atingido pela ação durante o pregão;\nclose: valor da ação no momento de fechamento dos negócios na data.\nvolume: quantidade de negociações com as ações da empresa na data do pregão.\nLeia o nome de um arquivo csv com dados de uma empresa, conforme o apresentado, faça o carregamento das \ninformações e responda:\n1. Qual o maior preço de abertura e a data que correu?\n2. Qual o menor preço de fechamento e a data que correu?\n3. Qual o volume médio de negociações em um mês e ano informados?\n4. Qual o preço médio de abertura em um mês e ano informados?\n5. Em quais dias de um mês e ano lidos houve queda no preço da ação? Qual o preço de abertura e o preço do \nfechamento na data e a variação do preço em moeda corrente? Observação: há queda no preço quando o \nvalor de abertura é maior que o de fechamento.\n'''\n\nfrom operator import itemgetter\n\ndef formatar_data(data):\n meses = ('janeiro', 'fevereiro', 'março', 'abril', 'maio', 'junho', 'julho', 'agosto', 'setembro', 'outubro', 'novembro', 'dezembro')\n d, m, a = data['dia'], data['mes'], data['ano']\n return f'{d:0>2d} de {meses[m - 1]} de {a}'\n\n\ndef maior_preco_de_abertura(dados):\n ordenado = sorted(dados, key = itemgetter('abertura'))\n return round(ordenado[-1]['abertura'],2), formatar_data(ordenado[-1])\n\n\ndef carrega(arquivo):\n linhas = []\n with open(arquivo) as d:\n d.readline() #descarta a primeira linha(cabeçalho do arquivo)\n for linha in d.readlines():\n data, abertura, alta, baixa, fechamento, volume = linha.strip().split(',')\n ano, mes,dia = data.split('-')\n linhas.append(\n {\n 'ano' : int(ano),\n 'mes' : int(mes),\n 'dia' : int(dia),\n 'abertura' : float(abertura),\n 'alta' : float(alta),\n 'baixa' : float(baixa),\n 'fechamento' : float(fechamento),\n 'volume' : int(volume)\n }\n )\n return linhas\n \n\ndef main():\n #carrega os dados do pregão a partir do arquivo csv\n nome_arquivo = input('Nome do arquivo: ').strip()\n pregao = carrega(nome_arquivo) \n\n #Qual o maior preço de abertura e a data que correu?\n maior_preco, data = maior_preco_de_abertura(pregao)\n print(f'O maior preço na abertura foi {maior_preco:.2f} em {data}.')\n \n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5477873086929321,
"alphanum_fraction": 0.5619189143180847,
"avg_line_length": 25.106796264648438,
"blob_id": "3a40c676c8af769a00f0066f30fb2da5b178ec20",
"content_id": "4a3b9cc0111b6fba78cfeaa5a75eb7f991a08c59",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2728,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 103,
"path": "/sem15-code_clube-porta_da_fortuna-desafio04-vinte_e_um.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "from random import *\n\ndef passo03_quanta_sorte_voce_tem():\n \n #imprime as três portas e as instruções do jogo\n print('''Porta da Fortuna!\n=========\n\nExiste um super prêmio atrás de uma dessas 3 portas!\nAdivinhe qual é a porta certa para ganhar o prémio!\n _____ _____ _____\n| | | | | |\n| [1] | | [2] | | [3] |\n| o| | o| | o|\n|_____| |_____| |_____|\n''')\n\n tentativas = 0\n\n score = 0\n\n #o usuário muda esta variável para terminar o jogo\n jogando = True\n \n #repetir, enquanto a variável 'jogando' estiver com valor \"True\" \n while score < 3:\n\n tentativas += 1\n\n print('\\nTentativa', tentativas, ': Escolha uma porta (1, 2, 3)')\n \n #get the chosen door and store it as an integer (whole number)\n chosenDoor = input()\n chosenDoor = int(chosenDoor)\n\n #randomly choose the winning door number (between 1 and 3)\n winningDoor = randint(1, 3)\n\n #show the player the winning and chosen door numbers\n print(\"A porta escolhida foi a\", chosenDoor)\n print(\"A pota certa é a\", winningDoor)\n\n #player wins if the chosen door and winning door number are the same\n if chosenDoor == winningDoor:\n print(\"Parabéns!\")\n score += 1\n else:\n print(\"Que peninha!\")\n\n print('Sua pontuação atual é', score)\n\n print('\\n**Você conseguiu! Terminou o jogo em', tentativas, 'tentativas**') \n\ndef desafio_vinte_e_um():\n \n #essa variável deve ser alterada pelo usuário para terminar o jogo\n playing = True\n\n score = 0\n\n #imprime as instruções do jogo\n print('''Vinte e um!\n===========\nTente fazer exatamente 21 pontos!''')\n\n #repete enquanto a variável 'playing' for 'True'\n while playing == True:\n\n #escolhe um numero aleatoriamente entre 1 e 10\n newNumber = randint(1,10)\n\n #soma o novo número à pontuação\n score = score + newNumber\n\n #mostra os dados para o jogador\n print(\"\\nSeu próximo número é\", newNumber)\n print(\"Sua pontuação agora é\", score)\n\n #termina se o usuário digitar 'n'\n #ou se a pontuação for maior que 21\n print(\"\\nGostaria de somar mais um número? (s/n)\")\n answer = input()[0].lower()\n if answer.lower() == 'n' or score > 21:\n playing = False\n \n print(\"\\nSua pontuação final é\", score)\n\n #se o jogador marcar 21\n if score == 21:\n print(\"VOCÊ VENCEU!!\")\n else:\n print(\"Que pena!\")\n\n\n\n \ndef main():\n #passo03_quanta_sorte_voce_tem()\n desafio_vinte_e_um()\n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6109289526939392,
"alphanum_fraction": 0.6229507923126221,
"avg_line_length": 30.55172348022461,
"blob_id": "28bf54ad6b94f21a1f182428c0df183bc8095f2d",
"content_id": "7f07bb41c09954746b899eb12673aba1e1c213d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 927,
"license_type": "no_license",
"max_line_length": 116,
"num_lines": 29,
"path": "/sem14-q5-dicionario-media_de_aluno_com_dicionario.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n05.Escreva um programa que lê matrícula, nome e duas notas de vários alunos e armazena tais notas em um dicionário, \nonde a chave é a matrícula do aluno. A entrada de dados deve terminar quando for lida uma string vazia como \nmatrícula. Escreva uma função que retorna a média do aluno, dado sua matrícula, o programa finaliza com a leitura \nde uma matrícula vazia.\n'''\n\ndef matriculas():\n dados_matricula = {}\n while True:\n matricula = input().strip()\n if matricula == '': break\n nome = input().strip()\n nota1 = float(input())\n nota2 = float(input())\n dados_matricula[matricula] = (nome, nota1, nota2)\n\n return dados_matricula\n\ndef main():\n alunos = matriculas()\n\n while True:\n key = input().strip()\n if key == '': break\n print(f'{alunos[key][0]}: {(alunos[key][1] + alunos[key][2]) / 2:.1f}')\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5025783181190491,
"alphanum_fraction": 0.526775062084198,
"avg_line_length": 22.783018112182617,
"blob_id": "96c6f9e6bd00d2de7ede294b70c00aec8841ed33",
"content_id": "5b87093888149cbfd38dc547e59f44b12ed2cb72",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2529,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 106,
"path": "/sem08-2-q2-flag0recebeVariasIdadesRetornaDados.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n02. Escreva um programa que, para um número indeterminado de pessoas:\n a. leia a idade de cada pessoa, sendo que a leitura da idade 0 (zero) indica o fim dos dados (flag) \n e não deve ser considerada;\n b. calcule e escreva o número de pessoas;\n c. calcule e escreva a idade média do grupo;\n d. calcule e escreva a menor idade e a maior idade.\n'''\n\ndef media_idade(lista, pessoas):\n i = pessoas - 1\n #print(i)\n soma = 0\n while i >= 0:\n soma += lista[i]\n i -= 1 \n \n return soma / pessoas\n\ndef maior_menor(lista, tam):\n i = tam - 1\n maior = 0\n menor = lista[i]\n\n while i >= 0:\n if lista[i] > maior:\n maior = lista[i]\n if lista [i] < menor:\n menor = lista[i]\n i -= 1\n\n return maior, menor\n \n \ndef main():\n lista = []\n pessoas = 0\n while True:\n idade = int(input('Digite a idade de alguém: '))\n if idade == 0 : break\n else:\n lista.append(idade)\n pessoas += 1\n if pessoas != 0: \n media = media_idade(lista, pessoas)\n maior, menor = maior_menor(lista, pessoas)\n #print(lista)\n #print(pessoas, media, maior, menor)\n print(f'O número de idades inseridas foi: {pessoas}.')\n print(f'A idade média do grupo é: {media:.2f} anos.')\n print(f'A menor idade inserida foi: {menor}.')\n print(f'A maior idade inserida foi: {maior}.')\n \n else:\n pass\n\nif __name__ == '__main__':\n main()\n\n '''\n total = 0\n media_idade = 0\n pessoas = 0\n maior_idade = 0\n menor_idade = 0\n \n while True:\n idade = int(input())\n if idade == 0: break\n else:\n total += idade\n pessoas += 1\n\n if pessoas == 1:\n menor_idade = idade\n if idade > maior_idade:\n maior_idade = idade\n if idade < menor_idade:\n menor_idade = idade\n \n media_idade = total / pessoas\n\n print(pessoas, media_idade, maior_idade, menor_idade )\n '''\n\n'''\nlista = ['o carro', 'peire', 123, 111]\n>>> lista\n['o carro', 'peire', 123, 111]\n>>> nova_lista = ['pedra', lista]\n>>> nova_lista\n['pedra', ['o carro', 'peire', 123, 111]]\n>>> lista[0]\n'o carro'\n>>> lista[2]\n123\n>>> lista[0,1]\nTraceback (most recent call last):\n File \"<pyshell#15>\", line 1, in <module>\n lista[0,1]\nTypeError: list indices must be integers or slices, not tuple\n>>> lista[0][1]\n' '\n>>> nova_lista[1][2]\n123\n'''\n"
},
{
"alpha_fraction": 0.5566714406013489,
"alphanum_fraction": 0.5939741730690002,
"avg_line_length": 23.89285659790039,
"blob_id": "1d564ad09be5c7eb92d0ad1729490ce40f1fdd40",
"content_id": "cc89d69369b93847df9469dd665ccd8e89c2587b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 713,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 28,
"path": "/sem07-2-q3t2-cancaoDosProgramadores.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n03. Modifique a canção dos programadores novamente para aumentar os bugs de 7 em 7, iniciando em 99 e \nparando em 250 ou antes\n 99 bugs no software, pegue sete deles e conserte...\n Tecle “Ctrl+F5”\n 106 bugs no software, pegue sete deles e conserte...\n Tecle “Ctrl+F5”\n 113 bugs no software, pegue sete deles e conserte...\n Tecle “Ctrl+F5”\n ...\n Vamos fazer mais um café!\n'''\n\n\ndef cancao():\n i = 99\n for cont in range(22): \n print(f'{i} bugs no software, pegue sete deles e conserte...')\n print('Tecle \"Ctrl+F5\"')\n i += 7\n \n print('Vamos fazer mais um café!')\n\ndef main():\n cancao()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.4301490783691406,
"alphanum_fraction": 0.5096631646156311,
"avg_line_length": 14.885965347290039,
"blob_id": "1ab88d66ff5c753656efeb8300faea2c8a8bd9b2",
"content_id": "3704e3cea5225e64f7cb80cf61855e138ad50add",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1818,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 114,
"path": "/desafio-sem07-01_desenhando-formatos.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "from turtle import *\n\n# essa função desenha um triangulo, um quadrado e um pentágono\ndef figuras_planas():\n #definindos propriedades da 'caneta'\n shape(\"turtle\")\n pensize(8)\n speed(6) \n \n #levando a \"caneta\" para o topo esquerdo\n penup()\n backward(250)\n left(90)\n forward(150)\n \n #triangulo\n color(\"Blue\")\n pendown()\n right(90)\n i = 0\n while i <= 2:\n forward(117)\n left(120)\n i+=1 \n\n #quadrado\n penup()\n color(\"Red\")\n forward(150)\n pendown()\n i = 0\n while i <= 3: \n forward(100)\n left(90)\n i+=1\n \n #pentágono\n penup()\n color(\"Yellow\")\n forward(150)\n pendown()\n i = 0\n while i <= 4:\n forward(65)\n left(72)\n i+=1\n \n #posicionando a \"caneta em uma área abaixo dos desenhos já feitos\"\n penup()\n backward(300)\n right(90)\n forward(200) \n \ndef casa():\n #triangulo\n color(\"Brown\")\n pendown()\n left(90)\n i = 0\n while i <= 2:\n forward(117)\n left(120)\n i+=1\n \n #telhado \n forward(415)\n left(120)\n forward(117)\n left(60)\n forward(298)\n left(60)\n forward(117)\n\n #paredes\n color(\"Yellow\")\n left(120)\n right(90)\n forward(117)\n left(90)\n forward(117)\n left(90)\n forward(117)\n right(90)\n color(\"Brown\")\n forward(298)\n color(\"Yellow\")\n right(90)\n forward(117)\n right(90)\n\n #porta\n forward(160)\n right(90)\n forward(80)\n right(90)\n forward(45)\n right(90)\n forward(80)\n right(90)\n forward(298-(160-45))\n\n #chão\n color(\"Green\")\n forward(415)\n backward(900)\n \ndef main():\n figuras_planas()#linha 4\n casa()#linha 54\n \n done()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.4994138479232788,
"alphanum_fraction": 0.5158265233039856,
"avg_line_length": 23.371429443359375,
"blob_id": "0c56fe89ff61ad651c189c257ac6d7176f8280a9",
"content_id": "2bf2c9706d91c95b68bfb8b4fb480e5316cfa171",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 861,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 35,
"path": "/sem09-2-q5-primos_intervalo.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n05. Escreva um programa que leia dois valores inteiros (x e y) e mostre todos os números primos entre x e y.\n'''\n\ndef primos(a, b):\n if b < a:\n aux = a\n a = b\n b = aux \n print(f'No intervalo de {a} até o {b}, estão os seguintes números primos:')\n while a <= b:\n primo = eh_primo(a)\n if 0 < primo <= 2:\n print(a)\n a += 1\n \ndef eh_primo(n):\n if n == 0: return 0\n i = n\n cont = 0\n while i > 0:\n if n % i == 0:\n cont += 1\n i -= 1\n \n return cont\n\ndef main():\n print('Esse programa mostra os números primos de um intervalo entre números inteiros.')\n n = int(input('Digite o primeiro número do intervalo: '))\n n1 = int(input('Digite o segundo número do intervalo: '))\n primos(n, n1)\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5841280817985535,
"alphanum_fraction": 0.5909400582313538,
"avg_line_length": 27.230770111083984,
"blob_id": "b5c4df48db40f9d8606b7cf0e0b2ccc5d407fd4e",
"content_id": "ba8fdf4995cda66f353945f2f1484adb563f5af5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2958,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 104,
"path": "/sem15-q5-dicionario-agenda_de_telefones_em_um_dicionario.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\nEscreva um programa para armazenar uma agenda de telefones em um dicionário. Cada pessoa pode ter um ou mais \ntelefones e a chave do dicionário é o nome da pessoa. Seu programa deve ter as seguintes opções:\n 1 - Incluir Novo Nome\n 2 - Incluir Telefone\n 3 - Excluir Telefone\n 4 - Excluir Nome\n 5 - Mostrar Agenda\n 0 - Fim do Programa\n ========================\n Digite sua opção:\n\nincluir_novo_nome: acrescenta um nome novo na agenda, com um ou mais telefones. Ela deve receber como \nargumentos o nome e os telefones.\nincluir_telefone: acrescenta um telefone em um nome existente na agenda. Caso o nome não exista na agenda, \nvocê deve perguntar se a pessoa deseja incluí-lo. Caso a resposta seja afirmativa, use a função anterior para incluir o \nnovo nome.\nexcluir_telefone: exclui um telefone de uma pessoa que já está na agenda. Se a pessoa tiver apenas um telefone, \nela deve ser excluída da agenda.\nexcluir_nome: exclui uma pessoa da agenda. \nmostrar_agenda: essa função mostra todos os nomes e telefones na agenda.\n'''\n\ndef mostrar_agenda():\n for cont in agenda:\n print(f'Nome: {cont}')\n print(f' Telefone(s):')\n qntd_num = 1\n for num in agenda[cont]:\n print(f' {qntd_num}. {num}')\n qntd_num += 1\n \ndef excluir_nome():\n nome = input().strip()\n if nome in agenda:\n del agenda[nome]\n else:\n print(f'{nome} não está na agenda.')\n \ndef excluir_telefone():\n nome = input().strip()\n telefone = input().strip()\n nao_excluir = []\n if nome in agenda:\n for cont in agenda[nome]:\n if cont != telefone:\n nao_excluir.append(cont)\n agenda[nome] = nao_excluir\n\ndef incluir_telefone():\n nome = input().strip()\n fone = input().strip()\n if nome in agenda:\n agenda[nome].append(fone)\n else:\n adicionar = input().lower().strip()\n if adicionar == 's':\n agenda[nome] = [fone]\n\ndef incluir_novo_nome():\n nome = input().strip()\n fone = input().strip()\n if nome not in agenda:\n agenda[nome] = [fone]\n else:\n print('Nome já existe na agenda.')\n\nagenda = {}\n\ndef main():\n running = True\n #repete até que o usuário digite 'q' para sair\n while running == True:\n \n menuChoice = int(input())\n \n #1 para incluir novo nome\n if menuChoice == 1:\n incluir_novo_nome()\n\n #2 para incluir telefone\n elif menuChoice == 2:\n incluir_telefone()\n\n #3 para excluir telefone\n elif menuChoice == 3:\n excluir_telefone()\n\n #4 para excluir nome\n elif menuChoice == 4:\n excluir_nome()\n\n #5 para mostrar agenda\n elif menuChoice == 5:\n mostrar_agenda()\n \n #0 para sair\n elif menuChoice == 0:\n running = False\n\n \n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.7322404384613037,
"alphanum_fraction": 0.7344262003898621,
"avg_line_length": 31.678571701049805,
"blob_id": "6de714df51b741e0f1bdf5a726ad877caf77c9a1",
"content_id": "9967b74a3aa8963a4d9d531bfa00f8bc8c202ba8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 933,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 28,
"path": "/sem02-q6-calPerimetroEAreaDeTerrenoRetang.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#variável \"lado_menor\", do tipo float, recebe um valor a partir da leitura do teclado\nlado_menor = float(input(\"Lado menor do terreno: \"))\n\nprint(lado_menor)\na = int(input())\n\n#variável \"lado_maior\", do tipo float, recebe um valor a partir da leitura do teclado\nlado_maior = float(input(\"Lado maior do terreno: \"))\n\nprint(lado_maior)\na = int(input())\n\n#variável \"area\" recebe o valor da multiplicação entre os valores guardados nas variáveis \"lado_maior\" e \"lado_menor\"\narea = lado_menor * lado_maior\n\nprint(area)\na = int(input())\n\n#variável \"perimetro\" recebe o valor do perimetro conforme a fórmula\nperimetro = (2 * lado_menor) + (2 * lado_maior)\n\nprint(perimetro)\na = int(input())\n\n#impresão do valor da \"area\" com uma formatação para melhor compreenssão\nprint(f'Área do terreno: {area}')\n#impresão do valor da \"perimetro\" com uma formatação para melhor compreenssão\nprint(f'Perímetro do terreno: {perimetro}')\n"
},
{
"alpha_fraction": 0.43111109733581543,
"alphanum_fraction": 0.4577777683734894,
"avg_line_length": 11.5,
"blob_id": "3c9b6626d6b108495c5e2e241c4685f68c5524b1",
"content_id": "b46851f32de5f1e68ca1f65659097c2441da3918",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 225,
"license_type": "no_license",
"max_line_length": 26,
"num_lines": 18,
"path": "/passo2-auto-repetiçãoEx3.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "from turtle import *\n\n\ndef main():\n \n speed(11)\n shape(\"turtle\")\n\n for cont in range(30):\n forward(5)\n penup()\n forward(5)\n pendown()\n\n done()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.43264561891555786,
"alphanum_fraction": 0.46662622690200806,
"avg_line_length": 26.46666717529297,
"blob_id": "303ce742df79b5603e64676f1024b86d7bfc7b49",
"content_id": "6cb29405e5b81e167540ff754f576e186d7773e7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1667,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 60,
"path": "/desafio1-questaoDeTempo - Copia.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "\"\"\"\nprint(\"no Python, qual é o nome )\n\nPrint()\nprint(\":)\" *100)\nvoce tem 3 chances para tentar adivinha qa nuero rá srtead e 1 10\n\"\"\"\n\ndef quiz():\n pont = 0\n s = int(input(\"Quantas copas do mundo a seleção brasileira já conquistou? \"))\n if s == 5:\n print(\"Parabens você acertou!!\\n:) :) :)\\n\")\n pont += 1\n else:\n print(\"Que pena, você errou!!!\\n:( :( :(\\n\")\n\n s = int(input(\"Em que ano Pedro Álvares Cabral descobriu o Brasil?? \"))\n if s == 1500:\n print(\"Parabens você acertou!!\\n:) :) :)\\n\")\n pont += 1\n else:\n print(\"Que pena, você errou!!!\\n:( :( :(\\n\")\n\n s = input(\"Qual foi o campeão do brasileirão 2020? \")\n if s == 'flamengo':\n print(\"Parabens você acertou!!\\n:) :) :)\")\n pont += 1\n else:\n print(\"Que pena, você errou!!!\\n:( :( :(\")\n print(f'Sua pontuação: {pont}/3.')\n\nprint(\"=\"*50)\nprint(\"Bônus: Jogo da forca\\n\")\na1 , a2, a3, a4, a5, = \"_\", \"_\", \"_\", \"_\", \"_\"\nprint(\">>> Palavra: \",a1 , a2, a3, a2, a4, a5, a2)\npalavra = 'a'\nwhile palavra != 'laranja':\n d = input('\\nDigite uma letra(apenas minúscula): ')\n if d == 'l' or d == 'a' or d == 'r' or d == 'n' or d == 'j':\n print(\":)\"*4)\n print('muito bem!!')\n if d == 'l':\n a1 = 'l'\n if d == 'a':\n a2 = 'a'\n if d == 'r':\n a3 = 'r'\n if d == 'n':\n a4 = 'n'\n if d == 'j':\n a5 = 'j'\n else:\n print(\":(\" *4 )\n print(\"Não foi nessa vez!!\")\n print(\"\\n>>> Palavra: \",a1, a2, a3, a2, a4, a5, a2)\n palavra = a1+a2+a3+a2+a4+a5+a2\n\nprint(\"Parabens, você acertou a palavra!!\")\nfim = input()\n"
},
{
"alpha_fraction": 0.507317066192627,
"alphanum_fraction": 0.5182926654815674,
"avg_line_length": 25.45161247253418,
"blob_id": "11671784f8061831756769b14a0f23cf5a66654f",
"content_id": "f2d90f5e439229332a68d4858e961e1335622055",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 828,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 31,
"path": "/sem08-q3-recebeNumAteDig0ERetornarMaiorEMenor.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\nEscreva um programa que leia uma quantidade indefinida de números inteiros positivos terminada pelo \nnúmero 0 (zero). Ao final, o programa deve mostrar o maior e o menor de todos os números lidos \n(excluindo o zero).\n'''\n\ndef main():\n maior = 0\n menor = 0\n i = 0\n while True:\n num = int(input('Digite um número(0(zero) to stop!): '))\n if num == 0:\n break\n if i == 0:\n maior = num\n menor = num \n i += 1\n elif num > maior:\n maior = num \n elif num < menor:\n menor = num\n\n if i != 0: \n print(f'O maior valor digitado foi o número {maior}.')\n print(f'Já o menor valor válido digitado foi o número {menor}.')\n else:\n pass\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6023192405700684,
"alphanum_fraction": 0.6316507458686829,
"avg_line_length": 35.650001525878906,
"blob_id": "4891869a080ad61cc5ab9891ef0aaa6875d84195",
"content_id": "997de8ce7de3c108b8a54c99eb7e3e50557d0b47",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1476,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 40,
"path": "/sem14-q3-dicionario-menores_e_maiores_de_18_anos_com_dicionario.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n03.Crie um programa que cadastre informações de 20 pessoas (nome, idade e cpf) e coloque em um dicionário. Após a \nleitura, remova todas as pessoas menores de 18 anos do dicionário e coloque-as separadas em outro dicionário.\nImprima os dois dicionários separando os campos por ; (ponto-e-vírgula). Use o CPF para chave do dicionário.\n'''\ndef removeMEnor18(dicionario):\n menores = {}\n cont = 0\n for pessoa in range(20):\n if dicionario[pessoa][1] < 18:\n menores[cont] = dicionario[pessoa]\n cont += 1\n del dicionario[pessoa]\n return dicionario, menores\n \ndef dicionarioComDados20Pessoais():\n dados_pessoais = {}\n for pessoa in range(20):\n nome = input().strip()\n idade = int(input())\n cpf = input().strip()\n dados_pessoais[pessoa] = (nome, idade, cpf)\n return dados_pessoais\n \ndef main():\n #entrada\n cadastro = dicionarioComDados20Pessoais()\n cadastro_mais18, cadastro_menores = removeMEnor18(cadastro)\n\n #saída\n print('========== MAIORES DE 18 ANOS ==========')\n for pessoa in cadastro_mais18:\n print(cadastro_mais18[pessoa][0],cadastro_mais18[pessoa][1],cadastro_mais18[pessoa][2], sep = ';')\n\n print('========== MENORES DE 18 ANOS ==========')\n for menores in cadastro_menores:\n print(cadastro_menores[menores][0], cadastro_menores[menores][1], cadastro_menores[menores][2], sep = ';')\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6113945841789246,
"alphanum_fraction": 0.6326530575752258,
"avg_line_length": 27.682926177978516,
"blob_id": "0eda3e9ca6f50138b96b9044a4385acf568c93b4",
"content_id": "fda085678c681ff351bc7e8702c56b593087c3a4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1186,
"license_type": "no_license",
"max_line_length": 154,
"num_lines": 41,
"path": "/sem11-q5-listas.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n5. Leia duas listas A e B contendo 25 elementos inteiros cada, gerar e imprimir uma lista C de 50 elementos, \ncujos elementos sejam a intercalação dos elementos de A e B.\n'''\n\ndef recebe25():\n num = 25\n lista = []\n cont = 0\n print('Preencendo uma lista com 25 termos: ')\n while cont < num:\n n_paraInserir =int(input('Digite um número: '))\n #n_paraInserir = formataFloatCasas(n_paraInserir,1)\n lista.append(n_paraInserir)\n cont += 1\n return lista\n\ndef intercalacaoDe2Listas(listaA, listaB):\n num = 25\n cont = 0\n listaC = []\n while cont < num: \n #n_paraInserir = formataFloatCasas(n_paraInserir,1)\n listaC.extend([listaA[cont]])\n listaC.extend([listaB[cont]])\n cont += 1\n return listaC\n\ndef main():\n #entrada\n print('Intercalação de listas: ')\n listaA = recebe25()\n listaB = recebe25()\n\n #processamento\n listaC = intercalacaoDe2Listas(listaA, listaB)\n #saída\n print(f'Lista de números da primeira lista: {listaA}\\nLista de números da segunda lista: {listaB}\\nIntercalação das duas listas anteriores: {listaC}')\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6770045161247253,
"alphanum_fraction": 0.704992413520813,
"avg_line_length": 49.846153259277344,
"blob_id": "ad5e6eaa2cacf0a90f229ea0de1a89afaeee67ab",
"content_id": "69fcbdf64cd4dfd000e20204ab50326614429644",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1351,
"license_type": "no_license",
"max_line_length": 180,
"num_lines": 26,
"path": "/sem09-q5-dodo_natal_mortal.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n. O dodô é uma ave não voadora, extinta atualmente, e que era endêmica da Ilha \nMaurítius, na costa leste da África. A partir do ano 1600, durante cada ano, 6% \ndos animais dos animais vivos no começo do ano morreram e o número de \nanimais nascidos ao longo do ano que sobreviveram foi de 1% da população \ninicial.\nEscreva um programa que leia a população de aves no início do ano 1600 e \nimprime, anualmente, a partir do fim de 1600, o número de nascimentos, mortes e o total da população \npor ano (apenas a parte inteira do números, separados por vírgula). O programa encerra sua execução \nquanto a população total cai para menos de 10% da população original.\n'''\n\ndef main():\n populacao_original = int(input('Qual era a população em 1600? '))\n ano = 1600\n populacao_atual = populacao_original\n nascidos_sobreviventes = 0\n while populacao_atual >= 0.1 * populacao_original:\n nascidos_sobreviventes = 0.01 * populacao_atual\n mortos = 0.06 * populacao_atual\n populacao_atual = (populacao_atual - mortos) + nascidos_sobreviventes \n print(f'Ano do cáculo: {ano}\\nNascidos desse ano que sobreviveram: {nascidos_sobreviventes:.0f}\\nMortes nesse ano: {mortos:.0f}\\nPopulação atual: {populacao_atual:.0f}.\\n')\n ano += 1\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6760828495025635,
"alphanum_fraction": 0.6892655491828918,
"avg_line_length": 65.375,
"blob_id": "403f74c25238fe459d269e6e0f8bead65fb7c5f9",
"content_id": "1f9eaa130db9f92897655b4d5ae8558086d9e39d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 546,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 8,
"path": "/sem02-q3-dobroTriploRaizDeN.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#a variável \"n\", do tipo int, recebe um valor a partir da leituraa do teclado\nn = int(input(\"Digite um número: \"))\n#impressão na tela com a funçao f''(com formatação) do dobro do valor guardado em \"n\"\nprint(f'O dobro de {n} vale {n*2}.')\n#impressão na tela com a funçao f''(com formatação) do triplo do valor guardado em \"n\"\nprint(f'O triplo de {n} vale {n*3}.')\n#impressão na tela com a funçao f''(com formatação) da raiz do valor guardado em \"n\" conforme a fórmula (n**1/2)\nprint(f'A raiz quadrada de {n} vale {n ** (1/2):.2f}')\n"
},
{
"alpha_fraction": 0.44768211245536804,
"alphanum_fraction": 0.4655629098415375,
"avg_line_length": 21.863636016845703,
"blob_id": "df5b3ada20ea2575a410243ae79e06c24be3e0d2",
"content_id": "71f7fc0634f3b12ec0413901fa17b59e7f75ec9b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1521,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 66,
"path": "/sem05-2-q4-retornaMaiorEMenor.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#03. Escreva um programa que leia 5 números inteiros e escreva o maior e o menor deles.\n#Considere que todos os valores são diferentes. NÃO use as funções embutidas min() e max().\n\ndef maior(a, b, c, d, e, med): \n if a > med:\n print(a)\n if b > med:\n print(b)\n if c > med:\n print(c)\n if d > med:\n print(d)\n if e > med:\n print(e)\n #return m\n\ndef media(a, b, c, d, e):\n return (a + b + c + d + e) / 5\n \ndef main():\n \n n1 = int(input('Digite um número: '))\n n2 = int(input('Digite um número: '))\n n3 = int(input('Digite um número: '))\n n4 = int(input('Digite um número: '))\n n5 = int(input('Digite um número: '))\n \n \n m = media(n1, n2, n3, n4, n5)\n #round(m,2)\n #print(\"Média = %.2f\" % m)\n print(\"%.2f\" % m)\n maior(n1, n2, n3, n4, n5, m)\n #print(f'Maior: {a}\\nMenor: {b}')\n\nif __name__ == '__main__':\n main()\n\n\n\n'''\ndef maior_e_menor(a, b, c, d, e):\n \n if (a > b and b > c and c > d and d > e):\n return a\n elif (a < b and b > c and c > d and d > e): #2\n return b\n elif (a < b and b < c and c > d and d > e): #3\n return c\n elif (a < b and b < c and c < d and d > e):\n return d\n else:\n return e\n \ndef maior_e_menor2(a, b, c, d, e):\n if a > b > c > d > e:\n return a \n elif a < b > c > d > e: #2\n return b \n elif a < b < c > d > e: #3\n return c\n elif a < b < c < d > e:\n return d\n else:\n return e\n''' \n"
},
{
"alpha_fraction": 0.44423791766166687,
"alphanum_fraction": 0.48327139019966125,
"avg_line_length": 17.55172348022461,
"blob_id": "902f83930aba415857adc634a99b9adbe7a00762",
"content_id": "643b606e48c09e1c28132e6cf49cf7503f8ffc8b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 539,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 29,
"path": "/sem05-2-q5-retornaIMC.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "def imc(p, a):\n return p / (a**2)\n\n\ndef class_imc(m):\n if m < 18.5:\n return 'Abaixo do peso'\n elif 18.5 <= m < 25:\n return 'Peso normal'\n elif 25 <= m < 30:\n return 'Sobrepeso'\n elif 30 <= m < 35:\n return 'Obeso leve'\n elif 35 <= m < 40:\n return 'Obeso moderado'\n else:\n return 'Obeso mórbido'\n \n\ndef main():\n ps = float(input('peso: '))\n at = float(input('altura: '))\n m = imc(ps, at)\n print(m)\n print(class_imc(m))\n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.7235354781150818,
"alphanum_fraction": 0.7338129281997681,
"avg_line_length": 50.21052551269531,
"blob_id": "b196dc3cf9911b594b6e62e8a9b6490542da7a37",
"content_id": "e45fde1133e834da0803bb70388572bf2865f572",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 992,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 19,
"path": "/sem03-q4-acrescentaOuDescontaValorX.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#início de uma função nomeada \"percentual\", com dois argumetos\ndef percentual(valor, porcentagem):\n #retorno da função para porcentagem descontada do \"valor\"\n return valor * (porcentagem / 100)\n\n#variável \"pr\", do tipo float, recebe um valor a partir da leitura do teclado\npr = float(input(\"Preço: \"))\n#variável \"vr_p\", do tipo float recebe um valor a partir da leitura do teclado\nvr_p = float(input(\"Percentual: \"))\n#variável \"pr_acres\" recebe o valor de \"pr\" mais o percentual conforme a função \"porcentagem\"\npr_acres = pr + percentual(pr, vr_p)\n#variável \"pr_desc\" recebe o valor de \"pr\" menos o percentual conforme a função \"porcentagem\"\npr_desc = pr - percentual(pr, vr_p)\n#imprimir na tela a mensagem com os valores inseridos conforme a formatação\nprint(f'R${pr} com acréscimo de {vr_p}% fica por R${pr_acres}')\n#imprimir na tela a mensagem com os valores inseridos conforme a formatação\nprint(f'R${pr} com desconto de {vr_p}% fica por R${pr_desc}')\n\n#186-0316\n"
},
{
"alpha_fraction": 0.5575739741325378,
"alphanum_fraction": 0.6067975163459778,
"avg_line_length": 36.5054931640625,
"blob_id": "8a69d2f1fd416d67d8be067cf6131f3ed97b9b6d",
"content_id": "a308b6872b93310f367ed42b11b683698e384746",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3425,
"license_type": "no_license",
"max_line_length": 152,
"num_lines": 91,
"path": "/sem13-q4-arrays_vendas_dos_fabricantes_de_veiculos.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n04.A tabela abaixo demonstra a quantidade de vendas dos fabricantes de veículos durante o período de 2013 a 2018, em \nmil unidades.\n\nFabricante/Ano 2013 2014 2015 2016 2017 2018\nFiat 204 223 230 257 290 322\nFord 195 192 198 203 208 228\nGM 220 222 217 231 245 280\nWolkswagen 254 262 270 284 296 330\n\nFaça um programa que:\na) leia os dados da tabela pelo teclado;\nb) leia um ano do período determine e exiba o fabricante que mais vendeu nesse ano;\nc) determine e exiba o ano de maior volume geral de vendas.\nd) determine e exiba a média anual de vendas de cada fabricante durante o período.\n'''\n\ndef media_de_valor_por_linhaDaMatriz(matriz):\n medias = []\n soma = 0\n for linha in matriz:\n for coluna in linha:\n soma += coluna[2]\n medias.append(soma/len(matriz[0]))\n soma = 0\n return medias\n\n#essa função deve somar os elementos por coluna e retornar qual acumula maior valor\ndef maior_valor_coluna_matriz(matriz):\n soma_por_coluna = 0\n maior = 0, 0\n ano = matriz[0][0][1]\n qntd_colunas = len(matriz[0])\n for colunas in range(qntd_colunas):\n for linha in matriz:\n #ano = linha[1]\n for elemento in linha:\n if elemento[1] == ano:\n soma_por_coluna += elemento[2]\n if soma_por_coluna > maior[0]:\n maior = soma_por_coluna, ano\n ano += 1\n soma_por_coluna = 0\n return maior\n\ndef mais_vendido(matriz, ano):\n maior = (0, 0, 0)\n for linha in matriz:\n for elemento in linha:\n if elemento[1] == ano and elemento[2] > maior[2]:\n maior = elemento\n return maior \n\ndef preenche_matriz( linhas , colunas):\n matriz = [] #lista vazia\n for fab in linhas:\n linha = [] # cada linha é uma lista (vetor)\n for ano in colunas:\n vendas = int(input(f'Vendas da {fab} no ano de {ano}: '))\n linha.append((fab, ano, vendas) )\n #insere a linha na matriz\n matriz.append(linha)\n return matriz\n\ndef main():\n #entrada de dados\n fabricantes = ('Fiat', 'Ford', 'GM', 'Wolkswagen')\n periodo = list(ano for ano in range(2013, 2019))\n vendas_dos_fabricantes = preenche_matriz(fabricantes, periodo)\n ano_fabricante_mais_vendeu = int(input('Ano que deseja consultar qual fabricante foi a campeã de vendas: '))\n \n #processamento\n campeao_de_vendas_ano = mais_vendido(vendas_dos_fabricantes, ano_fabricante_mais_vendeu)\n ano_mais_lucrativo = maior_valor_coluna_matriz(vendas_dos_fabricantes)\n media_de_vendas_2013_2018 = media_de_valor_por_linhaDaMatriz(vendas_dos_fabricantes)\n\n #saida \n print(f'A fabricante que mais vendeu em {ano_fabricante_mais_vendeu} foi a {campeao_de_vendas_ano[0]} com {campeao_de_vendas_ano[2]} mil unidades.')\n print(f'O ano de maior volume geral de vendas foi {ano_mais_lucrativo[1]} com {ano_mais_lucrativo[0]} mil unidades.')\n print('A média anual de vendas de cada fabricante entre 2013 e 2018 foi:')\n\n cont = 0\n for fab in fabricantes: \n print(f'A {fab} vendeu em média {round(media_de_vendas_2013_2018[cont],2)} unidades por ano.')\n cont += 1\n \nif __name__ == '__main__':\n ''' \n testes\n '''\n main()\n"
},
{
"alpha_fraction": 0.5217983722686768,
"alphanum_fraction": 0.5504087209701538,
"avg_line_length": 25.178571701049805,
"blob_id": "53c303401ae6bee24737e27e3f85af8d0c6e5cad",
"content_id": "54dd61aa32a091393dda200caf413dee903fcfc0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 738,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 28,
"path": "/sem04-2-q3-retornaBooleanoSeConsoante.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#03. Escreva um programa que leia um caractere e mostra o valor booleano True (verdadeiro) se for uma CONSOANTE\n#ou o valor booleano False (falso) caso contrário.\n\ndef entr():\n s = input('Digite um, e somente um, caractere: ').lower().strip()\n return ord(s)\n\ndef letra(l):\n if l >= 97 and l <= 122:\n if l != 97 and l != 101 and l!= 105 and l!= 111 and l!= 117:\n return True\n else:\n return False\n else:\n return False\n \ndef main():\n a = entr()\n if letra(a) == True:\n \n print(f'O caractere {chr(a)} é uma consoante. ')\n else:\n \n print(f'O caractere {chr(a)} não é uma consoante.')\n #print(letra(a))\n\nif __name__ == '__main__':\n main()\n\n"
},
{
"alpha_fraction": 0.46516191959381104,
"alphanum_fraction": 0.5554465055465698,
"avg_line_length": 23.261905670166016,
"blob_id": "902ac8ddccaec4b514d8c26d03c6b6d5e2ce7eeb",
"content_id": "c3e29c400067adf7bb122b4f6df10fdc26c824f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1033,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 42,
"path": "/sem14-dicionario-codeclube_desafio03_planetasDistantes.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "def displayMenu():\n print(f\"{'='*13}Planetas distantes{'='*13}\")\n print(\"=\" * 44)\n print(\"Menu:\")\n print('Escolha qual planeta você deseja saber a distância até nós:')\n print(\" 1 = Mercúrio\")\n print(\" 2 = Vênus\")\n print(\" 3 = Marte\")\n print(\" 4 = Júpiter\")\n print(\" 5 = Saturno\")\n print(\" 6 = urano\")\n print(\" 7 = Netuno\")\n print(\" q = sair\")\n\ndistances = {\n 1: (\"mercúrio\", 91700000),\n 2: (\"vênus\", 41400000),\n 3: (\"marte\", 78300000),\n 4: (\"júpiter\", 628000000),\n 5: (\"saturno\", 1280400000),\n 6: (\"urano\", 2720400000),\n 7: (\"netuno\", 4350400000)\n }\n\nrunning = True\n\ndisplayMenu()\n\n#repete até que o usuário digite 'q' para sair\nwhile running == True:\n\n menuChoice = input(\">_\").lower()\n \n if menuChoice in '1234567':\n print(f'{distances[int(menuChoice)][0]} tá a {distances[int(menuChoice)][1]} km da Terra.')\n \n #q para sair\n elif menuChoice == 'q':\n running = False\n\n else:\n print(\"Escolha inválida!\")\n"
},
{
"alpha_fraction": 0.5706595182418823,
"alphanum_fraction": 0.6177658438682556,
"avg_line_length": 32.727272033691406,
"blob_id": "906b88897de6fa2182f4f0cc011187ed99272a2e",
"content_id": "01011c0a474895221eb621829f88cf0d63f1c13c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 754,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 22,
"path": "/sem09-q4-somandoOsNumerosIdade.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n03. O número da sorte de uma pessoa é calculado somando os dígitos da sua data de nascimento. Escreva um \nprograma que leia a data de nascimento, digitada no formado ddmmaaaa (um número inteiro com 8 \ndígitos), e mostre o seu número da sorte. Por exemplo, quem nasceu em 29/04/1989 deve digitar 29041989 \ne o programa vai calcular que o número da sorte é 42 (2 + 9 + 0 + 4 + 1 + 9 + 8 + 9 = 42).\n'''\n\ndef numero_da_sorte(idade):\n i = 0\n total = 0 \n while i <= (len(idade)-1):\n cont = int(idade[i])\n total += cont\n i += 1\n return total\n\ndef main():\n idade = input('Idade: ').strip()\n print(f'O número {numero_da_sorte(idade)} é o seu número da sorte!!')\n \nif __name__ == '__main__':\n main()\n\n"
},
{
"alpha_fraction": 0.5227272510528564,
"alphanum_fraction": 0.5616883039474487,
"avg_line_length": 19.53333282470703,
"blob_id": "16d3a472eac718b1e104313d89b5cec11639c315",
"content_id": "441d0ed017375779a7cdf6b01492b933c709dce7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 313,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 15,
"path": "/sem07-q1-contar0a50.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#01. Escreva um programa que mostre todos os números inteiros de 1 a 50 (um por linha).\n\n#essa função imprime na tela os números de 1 a 50.\ndef contando():\n #'i' é o 'contador'\n i = 1\n while i <= 50:\n print(i)\n i += 1\n\ndef main():\n contando()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.859649121761322,
"alphanum_fraction": 0.859649121761322,
"avg_line_length": 57,
"blob_id": "366a50183af5d61da3394401b64bbadd39390729",
"content_id": "510a9d89521a896f902518cf53128be311cf847b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 59,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 1,
"path": "/README.md",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#soluções para as atividades realizadas na disciplina PEC"
},
{
"alpha_fraction": 0.5892215371131897,
"alphanum_fraction": 0.6111776232719421,
"avg_line_length": 23.086538314819336,
"blob_id": "43e152cb8195dcf674a80cb0afe5cc06d55de6ad",
"content_id": "d6d3eb7fd8ca5c16bd3c006e1d08ce585361ec20",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2568,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 104,
"path": "/exemplo-logicaDePrograc.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n DEFINIÇÃO: INDENTAÇÃO OU ENDENTAÇÃO\n ===================================\n \n Em inglês nós temos o verbo \"to indent\" que\n significa encaixar.\n\n Em português a palavra \"indentar\" ainda é,\n para muitos, um neologismo.\n\n É mais comum dicionários de português trazer\n a palavra \"endentar\", que é derivada de \"dente\".\n\n O \"dente\" pode ser humano, a estrutura da maxila\n e da mandíbula, que realiza a mastigação.\n\n Mas também pode ser uma saliência ou ponta\n em objetos construídos ou elaborados pelo\n homem. \"dentes de uma engrenagem\"\n\n Enfim, para programação, \"Indentar\" ou \"Endentar\"\n é um recuo do texto em relação à margem;\n inserindo (ou não) um espaço entre\n a margem e o comando.\n\n\n TABULAÇÃO x ESPAÇO EM BRANCO\n ============================\n\n Se olhar na tabela ASCII temos os caracteres:\n \n TECLA DE TH OU TAB (código decimal 09):\n ---------------------------------------\n\n é uma tabualação horizontal.\n Vem de tabular, deslocar parte de um texto\n através de vários espaços *predefinidos* para\n melhor visualização.\n\n TECLA DE ESPAÇO (código decimal 32):\n ------------------------------------\n\n é um espaço em branco.\n Lugar vazio que pode ser ocupado.\n\n Em Python, a indentação pode ser feita com\n qualquer quantidade de espaços ou tabulação.\n Por padrão, devemos usar 4 espaços para\n fazer a indentação dos blocos de comandos.\n \n Alguns editores de texto próprios para programação convertem automáticamente\n um TAB em 4 espaços em branco. É o caso do IDLE.\n'''\n\nprint('Comando 1')\nprint('Comando 2')\nprint('Comando 3')\nprint('Comando 4')\nprint('Comando 5')\n\n\nif True:\n print('Comando 6')\n print('Comando 7')\n\nprint('Comando 8')\n\n\nif True: print('Comando 9')\n\nif False:\n print('Comando 10 (não executa)')\n print('Comando 11 (não executa)')\n\nif False: print('Comando 12 (não executa')\n\nprint('Comando 10')\nprint('Comando 11')\nprint('Comando 12')\n\n\nfor i in range(3):\n print(f'Comando 13 (i={i})')\n\nfor j in range(3): print(f'Comando 14 (j={j})')\n\n\ndef e_par(n): return n % 2 == 0\n\nfor k in range(1, 11): # k vai de 1 a 10\n if not e_par(k): print(f'Comando 15 (k={k})')\n\n if e_par(k):\n print(f'Comando 16 (k={k})')\n\n\nfor i in range(3):\n for j in range(3):\n print(f'Comando 17 (i={i},j={j})')\n print(f'Comando 18 (i={i},j={j})')\n print(f'Comando 19 (i={i},j={j})')\n print(f'Comando 20 (i={i},j={j})')\n\nprint('Último comando')\n"
},
{
"alpha_fraction": 0.5590888857841492,
"alphanum_fraction": 0.5771312713623047,
"avg_line_length": 34.16666793823242,
"blob_id": "ad58a4da9285ed00733118f154639b95372cf9a1",
"content_id": "02239865788f67e9cbf64dd3459b7b48b6b7704d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4502,
"license_type": "no_license",
"max_line_length": 310,
"num_lines": 126,
"path": "/desafio3-mantendoAPontuaçãoPeterAqui.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": " \n#deve conter todas as perguntas do quiz\ndef quiz():\n print('''\nQ1. Se, durante uma corrida de carros, você deixa o segundo colocado pra trás,\nqual é a sua colocação após a ultrapassagem?\na - primeiro\nb - segundo\nc - terceiro\n ''')\n quiz_conf(1)\n print('''\nQ2. A mãe de Maria tem cinco filhas: Fafá, Fefê, Fifi, Fofó e? Qual é o nome da quinta filha?\na - Fufú\nb - Gagá\nc - Maria\n ''')\n quiz_conf(2)\n print(\"\"\"\nQ3. No caminho de casa até o mercado, uma senhora conta 10 árvores a sua direita.\nApós as compras, ela volta para casa e conta 10 árvores a sua esquerda.\nQuantas árvores ela viu no total nesse dia?\na - 10\nb - 20\nc - 30\n \"\"\")\n quiz_conf(3)\n print('''\nQ4. Em uma sala quadrada, temos um gato em cada canto. Cada gato vê outros três gatos. Quantos gatos há no total dentro da sala?\na - 12\nb - 9\nc - 4\n ''')\n quiz_conf(4)\n print('''\nQ5. Fábio foi sozinho até a padaria no centro da cidade.\nDurante o percurso, encontrou duas garotas passeando com três cachorros,\nque estavam brincando com dois gatos, que, por sua vez, tinham dois donos.\nQuantos seres no total foram com Fábio até a padaria?\na - 0\nb - 5\nc - 9\n ''')\n quiz_conf(5)\n print('''\nQ6. Se uma borboleta vive cinco dias e a cada dia ela voa quatro metros, quantos metros ela terá percorrido em uma semana?\na - 20\nb - 24\nc - 28\n ''')\n quiz_conf(6)\n \ndef quiz_conf(n):\n s = input().lower() \n if s == 'a' or s == 'b' or s == 'c':\n #processamento da resposta Q1.\n if n == 1:\n if s == 'b':\n certo(0)\n else:\n certo(n) \n #processamento da resposta Q2.\n if n == 2:\n if s == 'c':\n certo(0)\n else:\n certo(n) \n #processamento da resposta Q3.\n if n == 3:\n if s == 'a':\n certo(0)\n else:\n certo(n)\n #processamento da resposta Q4.\n if n == 4:\n if s == 'c':\n certo(0)\n else:\n certo(n)\n #processamento da resposta Q5.\n if n == 5:\n if s == 'a':\n certo(0)\n else:\n certo(n)\n #processamento da resposta Q6.\n if n == 6:\n if s == 'a':\n certo(0)\n else:\n certo(n)\n else: \n certo('n')\n\ndef certo(n):\n if n == 'n':\n print('Você não escolheu a, b ou c :(') \n if n == 0:\n print('Isso mesmo, parabéns!!')\n global score\n score = scor(score)\n if n == 1:\n print(\"Errado, a alternatica correta é a letra 'b'. Pois, se você ultrapassa o segundo colocado, assume o lugar dele, ficando em segundo!\")\n if n == 2:\n print(\"Errado, a resposta correta é 'c', se a mãe de Maria tem cinco filhas, então, podemos enumerar a prole da seguinte forma: \\n1 - Fafá, 2 - Fefe, 3 - Fifi, 4 - Fofo e 5 - (mas não menos importante) MARIA.\")\n if n == 3:\n print(\"Nãaaaao, a resposta correta é 'a', são as mesmas dez árvores vistas de diferentes perspectivas.\\nNa ida, as árvores estavam à direita da mulher, mas na volta, quando ela estava no sentido contrário da rua, \\nas plantas podiam ser vistas à esquerda.\")\n if n == 4:\n print(\"Errado, a resposta correta é 'c', se a sala é um quadrado, logo, possui quatro cantos. \\nE já sabemos que em cada um desses cantos há um gato, ou seja, quatro felinos estão na sala. \\nPara confirmar essa ideia, ainda sabemos que cada um dos gatos consegue ver os outros três que estão na sala.\")\n if n == 5:\n print(\"Errado, a resposta correta é 'a' zero, basta ler com atenção e interpretar o desafio: se Fábio foi sozinho até a padaria, então ninguém foi junto. \\nEle apenas 'encontrou' uma série de seres pelo caminho.\")\n if n == 6:\n print(\"Errado, a resposta é 'a', se a borboleta vive cinco dias, ela terá morrido antes de uma semana (afinal, uma semana tem sete dias).\\nNo entanto, se considerarmos seu tempo de vida, sabemos que, em cinco dias, ela voou 20 metros, pois 5 x 4 = 20.\")\n \ndef scor(x):\n x += 1\n return x\n\ndef main():\n print('=' * 10 + 'desafio: Mantendo a Pontuação' + '=' * 10)\n global score\n score = 0\n quiz()\n print(f'>>>Pontuação: você acertou {score} das 6 questões.\\n>>>Obrigado por jogar!!S2 S2')\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6652126312255859,
"alphanum_fraction": 0.6935659646987915,
"avg_line_length": 23.13157844543457,
"blob_id": "40f3b34853a86c805c0733b5153297b621ef4b72",
"content_id": "04cfc88d1e3e2e94d51d54b7baba0c8b23be27e0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 931,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 38,
"path": "/sem02-q5-extraiUniDezCentDeInteiro.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#variável \"n\", do tipo int, recebe um valor a partir da leitura do teclado\nn = int(input(\"Digite um número entre 100 e 999: \"))\n#variável \"unidade\" recebe o valor do \"resto\" da divisão do valor guardado em n por 10\nunidade = n % 10\n\nprint(unidade)\na = int(input())\n\n# o valor da variável \"n\" é atualizado pelo valor inteiro da divisão de \"n\" por 10\nn = n // 10\n\nprint(n)\na = int(input())\n\n\n#variável \"dezena\" recebe o valor do resto da divisão do novo valor de \"n\" por 10\ndezena = n % 10\n\nprint(dezena)\na = int(input())\n\n\n# o valor da variável \"n\" é atualizado pelo valor inteiro da divisão de \"n\" por 10\nn = n // 10\n\nprint(n)\na = int(input())\n\n\n#variável \"centena recebe o valor do resto da divisão de \"n\" por 10\ncentena = n % 10\n\nprint(centena)\na = int(input())\n\n\n#imprimir na tela os valores processados, inseridos na mensagem conforme ordenados\nprint(f\"unidade: {unidade}, dezena: {dezena}, centena: {centena}.\")\n"
},
{
"alpha_fraction": 0.5150684714317322,
"alphanum_fraction": 0.5296803712844849,
"avg_line_length": 24.44186019897461,
"blob_id": "ea618d5d983f7742b1c5d131cb88b3907d55c6a4",
"content_id": "f4b672f8263050e87dbb157c49d76c0e99912424",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1125,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 43,
"path": "/sem08-2-q3-menuDeOpcoes.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n03. Escreva um programa Python que apresente o menu\n[...]\nSe for informada uma opção que não está no menu deve mostrar a mensagem “Opção inválida.”. \nEnquanto a opção for diferente de 0 (zero) deve-se continuar apresentando as opções. Obs: use como \nestrutura de repetição com teste no final e como estrutura condicional múltipla escolha.\n\n'''\n\ndef menu(opcao):\n if opcao == 0:\n print('0 - Fim de serviço.')\n elif opcao == 1:\n print('1 - Olá. Como vai?')\n elif opcao == 2:\n print('2 - Vamos estudar mais.')\n elif opcao == 3:\n print('3 - Meus Parabéns!')\n else:\n print('Opção inválida.')\n \ndef main(): \n while True:\n print('''OPÇÕES:\n1 - SAUDAÇÃO\n2 - BRONCA\n3 - FELICITAÇÃO\n0 - FIM''')#\n opcao = input('Escolha: ')\n try:\n opcao = int(opcao)\n if opcao == 0 :\n menu(opcao)\n break\n else:\n menu(opcao)\n print('\\n')\n except:\n menu(opcao)\n print('\\n')\n\nif __name__ == '__main__':\n main()\n\n"
},
{
"alpha_fraction": 0.5889781713485718,
"alphanum_fraction": 0.6050516366958618,
"avg_line_length": 31.259260177612305,
"blob_id": "8ff47f504f71c551a16d220591318006ba856039",
"content_id": "9785c0fedf9cbd34c298872cd176a5601d12fe68",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 897,
"license_type": "no_license",
"max_line_length": 114,
"num_lines": 27,
"path": "/sem04-q4-retornaBooleanoSeEntre0e9.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#Escreva um programa que leia um caractere e mostra o valor booleano True (verdadeiro) se for um dígito entre ‘0’ \n#e ‘9’ ou o valor booleano False (falso) caso contrário\n\ndef e_num(n, a):\n #global crc\n print(f'O caractere {a} é um dígito entre 0 e 9?')\n if n == True:\n print(f'{n}, o caractere {a} é um dígito entre 0 e 9.')\n else:\n print(f'{n}, o caractere {a} não é um dígito entre 0 e 9.')\n\ndef uni(n):\n return '0' <= n <= '9'\n\n\ndef main():\n #variável global 'crc' criada, para melhor uso da função 'e_num()' linha 4\n #global crc(solução alternativa)\n #'crc' recebe o caractere\n #n = input() \n crc = input('Digite UM, e SOMENTE UM, caractere qualquer:\\n').lower().strip()\n \n c = uni(crc) #função uni(), linha 11\n e_num(c, crc) #processmento final, função e_num(), linha 4\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6124818325042725,
"alphanum_fraction": 0.6378809809684753,
"avg_line_length": 38.371429443359375,
"blob_id": "9328494b5de58f662cf1e7bf31760a03d02e9daa",
"content_id": "caacdc775bb7392677fdfd2a0102ef9c3deae616",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1439,
"license_type": "no_license",
"max_line_length": 133,
"num_lines": 35,
"path": "/sem08-q5-aplicacaoComJurosSimples.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n05. Pedro recebe um salário mensal e tem aumentos salariais de 5% uma vez por ano no mês de março. Pedro \ntambém tem uma dívida no cartão de crédito com uma taxa de juros de 15% ao mês. Considerando que a \nsituação se refere ao mês de outubro do ano de 2016, faça um programa leia o valor do salário e o valor \nda dívida e calcula, simulando a evolução do salário e da dívida de Pedro, em que mês e ano a dívida com \no cartão de crédito será superior ao seu próprio salário. \nRepresente os meses como inteiros de 1 a 12. \n Dica: Controle essas quatro variáveis:\n “dívida” que aumenta todo mês;\n “salário” que aumenta apenas se o número do mês for 3 (março);\n “mês” que é incrementado sempre, mas que retorna a 1 quando passar de 12;\n “ano” que só é incrementado quando o mês retornar a 1\n'''\n\ndef divida(sal, div):\n mes = 10\n ano = 2016\n while div <= sal:\n div = div + (div * 0.15)\n mes += 1\n if mes > 12:\n mes = 1\n ano += 1\n if mes == 3:\n sal = sal + (sal * 0.05)\n return mes, ano\n\ndef main():\n salr = int(input('Salário atual: '))\n divd = int(input('Valor da dívida: '))\n a, b = divida(salr, divd)\n print(f'De acordo com as informações, se sua dívida não for paga, ela irá se tornar maior que o seu salário na data de {a}/{b}.')\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.566749095916748,
"alphanum_fraction": 0.5815821886062622,
"avg_line_length": 26.89655113220215,
"blob_id": "a8589b02125426cacb41ec349ade54aa717e61f3",
"content_id": "2517217882fc50695e7016af1d74039e7b5eee8c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1629,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 58,
"path": "/sem11-2-q2-lista-reescrendo-count.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n2. Usando apenas as estruturas básicas de programação, reescreva a funções count(), que recebe \numa lista e um valor e retorna o número de ocorrências do valor na lista. Por exemplo \ncount([1, 2, 3, 2, 4, 2, 5], 2) retorna 3, a quantidade de vezes que o valor 2 \naparece na lista.\nFaça a leitura pelo teclado, a leitura de um 0 (zero) encerra a leitura. Primeiro leia a lista e depois \no valor para pesquisar. Imprima a lista que foi lida, o valor pesquisado e o resultado encontrado.\n'''\n\ndef criaLista():\n lista = []\n print(f'{\"=\"*4}Criando uma lista: {\"=\"*4}')\n while True:\n n = int(input('Adicione(press 0 to stop): '))\n #n = int(input())\n if n == 0:\n break \n lista.append(n)\n return lista\n\ndef comprimento(lista):\n tem = True\n cont = 0\n i = 0\n while True:\n try:\n if lista[i] != '':\n cont += 1\n i += 1\n except:\n break\n return cont\n\ndef contem(lista, valor_pesquisa):\n tamanho = comprimento(lista)\n i = 0\n tem = 0\n while i < tamanho:\n if valor_pesquisa == lista[i]:\n tem += 1\n i += 1\n return tem\n \ndef main():\n #entrada\n lista = criaLista()\n valor_pesquisa = int(input('Digite um número para ser pesquisado na lista: '))\n\n #processamento\n tem_na_lista = contem(lista, valor_pesquisa)\n\n #saída\n print(f'Lista criada: {lista}')\n print(f'Valor para ser pesquisado: {valor_pesquisa}')\n print(f'Número de vezes que o item aparece nessa lista: {tem_na_lista}')\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5802268981933594,
"alphanum_fraction": 0.5931928753852844,
"avg_line_length": 23.68000030517578,
"blob_id": "5b65ecbcc11908f2c44d72f10fca7a1f95b1d20d",
"content_id": "3c57cc1fa56c47fcdb9c16b8eef4dcd672b959c9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 627,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 25,
"path": "/sem08-q2-recebeNumAteDig0ERetornaMediaArit.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\nEscreva um programa que leia uma quantidade indefinida de números inteiros positivos terminada pelo \nnúmero 0 (zero). Ao final, o programa deve mostrar a média aritmética de todos os números lidos \n(excluindo o zero).\n'''\n\ndef media(a,b):\n return a / (b -1)\n\ndef main():\n total = 0\n cont = 0\n while True:\n num = int(input('Digite um número(0(zero) to stop!): '))\n total += num\n cont += 1\n if num == 0: break\n\n if cont == 1:\n pass\n else:\n print(f'A média aritmética dos números inseridos é: {media(total, cont)}.')\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5075987577438354,
"alphanum_fraction": 0.5281155109405518,
"avg_line_length": 26.39583396911621,
"blob_id": "f9fdb82a052a77bfe01d78961e2d036d20dba7b0",
"content_id": "b2ff6dccfa89650df1d366ce69f1ed8152c8399b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1350,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 48,
"path": "/sem08-2-q4-menuFuncionalCardapio.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\nO cardápio de uma casa de lanches, especializada em sanduíches, é dado abaixo.\n[...]\nEscreva um programa que leia o código de vários itens comprados por um freguês e acumule o total da \ncompra. Ao finalizar com “X”, exiba o total a pagar.\n Observações:\n • Se for informada uma opção que não está no menu deve mostrar a mensagem “Opção \n inválida.”.\n • Enquanto o código não for 'X' o programa deve continuar lendo os itens.\n'''\n\ndef compra(opcao):\n if opcao == 'h':\n return 5.5\n elif opcao == 'c':\n return 6.8\n elif opcao == 'm':\n return 4.5\n elif opcao == 'a':\n return 7\n elif opcao == 'q':\n return 4\n else:\n print('Opção inválida.')\n return 0\n \ndef main():\n \n total = 0\n while True: \n print('''CÓDIGO PRODUTO PREÇO (R$)\nH Hamburger 5,50\nC Cheeseburger 6,80\nM Misto Quente 4,50\nA Americano 7,00\nQ Queijo Prato 4,00\nX PARA TOTAL DA CONTA''')\n opcao = input('Escolha: ').strip().lower()[0] \n if opcao == 'x' :\n print(f'Valor a pagar: R$ {total:.2f} reais.')\n break\n else:\n total += compra(opcao)\n print('\\n')\n \n\nif __name__ == '__main__':\n main()\n\n"
},
{
"alpha_fraction": 0.5081206560134888,
"alphanum_fraction": 0.5498839616775513,
"avg_line_length": 22.94444465637207,
"blob_id": "aae0a5565bfa6102f3e7118200113d133356a984",
"content_id": "78ba55ca8a7b7d759dde542587368666a8ae85b6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 435,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 18,
"path": "/sem07-q3-mediaAritDe100Nums.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#Escreva um programa que leia um conjunto 100 números inteiros e exiba o valor médio dos mesmos (com \n#duas casas decimais).\n\nimport random\n\ndef main():\n total = 0\n i = 0\n while i <= 99:#99\n #a = int(input())\n a = random.randrange(1, 100)\n print(a)\n total += a \n i += 1 \n print(\"\\nValor médio dos números inseridos: %.2f\"%(total/100))\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6281167268753052,
"alphanum_fraction": 0.6366047859191895,
"avg_line_length": 28.453125,
"blob_id": "4d1054fb34adcb9c2f1bc71c8c9f76c5f4177f63",
"content_id": "d4a01fbdfd3192383ab3b48261c5363294ec0e68",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1893,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 64,
"path": "/sem11-q2-listas.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n2. Escreva um programa que leia um número n. Considere uma lista com n posições, e então:\n a) preencha com 0 (zero) e imprima a lista;\n b) preencha com os números de 1 a n e imprima a lista;\n c) preencha com valores inteiros lidos pelo teclado e imprima a lista;\n d) preencha na ordem inversa com valores inteiros lidos pelo teclado e imprima a lista; dica: use insert\npara sempre incluir os elementos no início da lista;\n'''\ndef preencheComZeros(num):\n lista = []\n cont = 0\n while cont < num:\n lista.append(0)\n cont += 1\n return lista\n\ndef oneToNterm(num):\n lista = []\n cont = 1\n while cont <= num:\n lista.append(cont)\n cont += 1\n return lista\n\ndef insere_c_teclado(num):\n lista = []\n cont = 1\n print('Preenchendo a lista:')\n while cont <= num: \n n_paraInserir =int(input('Digite um número: '))\n lista.append(n_paraInserir)\n cont += 1\n return lista\n\ndef inverteInsersao(num):\n lista = []\n cont = 0\n print('Preenchendo a lista para depois inverter: ')\n while cont < num:\n n_paraInserir =int(input('Digite um número: '))\n lista.insert(0,n_paraInserir)\n cont += 1\n return lista\n\ndef main():\n #entrada\n n_ezimoTermo = int(input('Tamanho da lista: '))\n\n #processamento\n lista_deZeros = preencheComZeros(n_ezimoTermo)\n lista_de1AteN_termo = oneToNterm(n_ezimoTermo)\n preenche_c_teclado = insere_c_teclado(n_ezimoTermo)\n recebe_na_ordemInversa = inverteInsersao(n_ezimoTermo)\n \n #saida\n print(f'Lista preenchida com zeros: {lista_deZeros}')\n print(f'Lista com os termos de 1 a N: {lista_de1AteN_termo}')\n print(f'Lista com os termos inseridos com o teclado: {preenche_c_teclado}')\n print(f'Lista anterior na ordem invertida: {recebe_na_ordemInversa}')\n \n \n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.46315789222717285,
"alphanum_fraction": 0.5052631497383118,
"avg_line_length": 16.799999237060547,
"blob_id": "227c6bee0f6b8c5f9d95c3467449b1cc582195a7",
"content_id": "5f5221806ac54d592adb22cd2b1a91edcf8fad8b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 95,
"license_type": "no_license",
"max_line_length": 26,
"num_lines": 5,
"path": "/desafio.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "\nresposta = input().upper()\nprint(resposta)\ni = 0\nfor i >= 0 , i < 9 , i++\n print(i+1)\n \n"
},
{
"alpha_fraction": 0.4620535671710968,
"alphanum_fraction": 0.47678571939468384,
"avg_line_length": 27.71794891357422,
"blob_id": "ba30e0dc7cf5e83c32270ba0f1f7619390732207",
"content_id": "4a4cd3be5332947120ef2379ded9e9f6dd384688",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2306,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 78,
"path": "/triangulo-1.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "# Escreva um programa que leia três lados para um triânculo\n# e mostre uma das mensagens:\n# - é triangulo equilátero\n# - é triangulo isósceles\n# - é triangulo escaleno\n# - não é triângulo\n\ndef e_triangulo(a, b, c):\n # Sem funções; Sem operadores lógicos\n # Testa se são os lados são válidos para um triângulo\n if (a < b + c):\n if (b < a + c):\n if (c < a + b):\n # É equilátero\n if (a == b):\n if (b == c):\n return 'é triangulo equilátero'\n\n #É isósceles\n if (a == b):\n return 'é triangulo isósceles'\n else:\n if (a == c):\n return 'é triangulo isósceles'\n else:\n if (b == c):\n return 'é triangulo isósceles'\n\n # É escaleno\n if (a != b):\n if (b != c):\n return 'é triangulo escaleno'\n else:\n return 'não é triângulo' \n else:\n return 'não é triângulo' \n else:\n return 'não é triângulo'\n \ndef main():\n # a = float(input('a: '))\n # b = float(input('b: '))\n # c = float(input('c: '))\n \n print('Teste de Equilátero')\n a, b, c = 2, 2, 2 # Equilátero\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Isósceles Caso 1')\n a, b, c = 3, 2, 2 # Isósceles Caso 1\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Isósceles Caso 2')\n a, b, c = 2, 3, 2 # Isósceles Caso 2\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Isósceles Caso 3')\n a, b, c = 2, 2, 3 # Isósceles Caso 3\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Escaleno')\n a, b, c = 3, 4, 5 # Escaleno\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Não é triângulo Caso 1')\n a, b, c = 9, 4, 5 # Não é triângulo\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Não é triângulo Caso 2')\n a, b, c = 3, 9, 5 # Não é triângulo\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Não é triângulo Caso 3')\n a, b, c = 3, 4, 9 # Não é triângulo\n print(e_triangulo(a, b, c))\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.7553017735481262,
"alphanum_fraction": 0.7683523893356323,
"avg_line_length": 75.625,
"blob_id": "c701fb7fd1b29dffd932e26003d7a1268992262f",
"content_id": "2c439f67905daf5fcceadb7f8636a557a8f066b6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 655,
"license_type": "no_license",
"max_line_length": 151,
"num_lines": 8,
"path": "/sem01-q1-precoComDesconto.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#variável “preco”, do tipo float, recebe o valor do preço a partir da leitura do teclado\npreco = float(input(\"Digite o preço: \"))\n#variável “preco_com_desconto” recebe o valor da variável “preco” multiplicado por 0.90\npreco_com_desconto = preco * 0.90\n#atualização do valor atribuído para a variável “preco_com_desconto”, por meio da função “round”,limitando a saída para 2 casas decimais após a virgula\npreco_com_desconto = round(preco_com_desconto, 2)\n#saída final, após o precessamento, do valor do preço com desconto guardado na variável “preco_com_desconto”\nprint(\"Preço com desconto: \", preco_com_desconto)\n"
},
{
"alpha_fraction": 0.45506420731544495,
"alphanum_fraction": 0.4579172730445862,
"avg_line_length": 21.612903594970703,
"blob_id": "48977f9b42bd0d548400bbc314d90dbff771e673",
"content_id": "641285c05400a635b251b349afc9574d46efd1ce",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 707,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 31,
"path": "/sem05-2-q1-retornaIdadeExata.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "def idade(d, m, a, dn, mn, an):\n if d < dn and m <= mn and a > an:\n return (a - an) - 1\n elif d >= dn and m >= mn and a > an:\n return a - an\n else:\n return 0\n \n \n \n\n\ndef main():\n \n try:\n dd = int(input('Que dia é hoje?\\n'))\n mm = int(input('Em que mês estamos?\\n'))\n yy = int(input('Em que ano estamos?\\n'))\n\n d_nasc = int(input('Em que dia você nasceu?\\n'))\n m_nasc = int(input('Em que mês você nasceu?\\n'))\n a_nasc = int(input('Em que ano você nasceu?\\n'))\n \n d = idade(dd, mm, yy, d_nasc, m_nasc, a_nasc)\n \n print(d)\n except:\n print()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.34546196460723877,
"alphanum_fraction": 0.41823384165763855,
"avg_line_length": 22.27618980407715,
"blob_id": "393ab7303a143eb3f63fea469e54d669c749c184",
"content_id": "bff5c4219000ab2aec70c617bcbf708231be22f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2466,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 105,
"path": "/sem05-q4-retornaSigno.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#Escreva um programa que leia a data de nascimento do usuário, e informa qual o seu signo. Considere exatamente: \n#Áries (21/03 a 19/04); Touro (20/04 a 20/05); Gêmeos (21/05 a 21/06); Câncer (22/06 a 22/07); Leão (23/07 a \n#22/08); Virgem (23/08 a 22/09); Libra (23/09 a 22/10); Escorpião (23/10 a 21/11); Sagitário (22/11 a 21/12); \n#Capricórnio (22/12 a 19/01); Aquário (20/01 a 18/02); Peixes (19/02 a 20/03);\n\ndef t_sig(a):\n if a == 1:\n return 'Aquário'\n elif a == 2:\n return 'Peixes'\n elif a == 3:\n return 'Áries'\n elif a == 4:\n return 'Touro'\n elif a == 5:\n return 'Gêmeos'\n elif a == 6:\n return 'Câncer'\n elif a == 7:\n return 'Leão'\n elif a == 8:\n return 'Virgem'\n elif a == 9:\n return 'Libra'\n elif a == 10:\n return 'Escorpião'\n elif a == 11:\n return 'Sagitário'\n else:\n return 'Capricórnio'\n \n\ndef sig(d, m ):\n if m == 1:\n if d >= 20:\n print(t_sig(1))\n else:\n print(t_sig(12))\n elif m == 2:\n if d <= 18:\n print(t_sig(1))\n else:\n print(t_sig(2)) \n elif m == 3:\n if d <= 20:\n print(t_sig(2))\n else:\n print(t_sig(3)) \n elif m == 4:\n if d <= 19:\n print(t_sig(3))\n else:\n print(t_sig(4))\n elif m == 5:\n if d <= 20:\n print(t_sig(4))\n else:\n print(t_sig(5))\n elif m == 6:\n if d <= 21:\n print(t_sig(5))\n else:\n print(t_sig(6))\n elif m == 7:\n if d <= 22:\n print(t_sig(6))\n else:\n print(t_sig(7))\n elif m == 8:\n if d <= 22:\n print(t_sig(7))\n else:\n print(t_sig(8))\n elif m == 9:\n if d <= 22:\n print(t_sig(8))\n else:\n print(t_sig(9))\n elif m == 10:\n if d <= 22:\n print(t_sig(9))\n else:\n print(t_sig(10))\n elif m == 11:\n if d <= 21:\n print(t_sig(10))\n else:\n print(t_sig(11))\n elif m == 12:\n if d <= 21:\n print(t_sig(11))\n else:\n print(t_sig(12)) \n\ndef main():\n \n dd = int(input('dia que você nasceu: \\n'))\n mm = int(input('mês que você nasceu: \\n'))\n \n\n \n sig(dd, mm)\n \nif __name__ == '__main__':\n main()\n\n\n"
},
{
"alpha_fraction": 0.5481220483779907,
"alphanum_fraction": 0.5563380122184753,
"avg_line_length": 25.625,
"blob_id": "656053af9b3f4f7501bb0b072ca4a040ff4cfd90",
"content_id": "60e424558480f78ec3a35e59044e4f276c52da9a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1720,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 64,
"path": "/sem15-code_clube-porta_da_fortuna-desafio02-consertando_a_entrada.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "from random import *\n\n\n\ndef desafio_consetando_a_entrada():\n \n #imprime as três portas e as instruções do jogo\n print('''Porta da Fortuna!\n=========\n\nExiste um super prêmio atrás de uma dessas 3 portas!\nAdivinhe qual é a porta certa para ganhar o prémio!\n _____ _____ _____\n| | | | | |\n| [1] | | [2] | | [3] |\n| o| | o| | o|\n|_____| |_____| |_____|\n''')\n\n score = 0\n\n #o usuário muda esta variável para terminar o jogo\n jogando = True\n \n #repetir, enquanto a variável 'jogando' estiver com valor \"True\" \n while jogando == True:\n\n print('\\nEscolha um porta (1, 2 ou 3):') \n #get the chosen door and store it as an integer (whole number)\n chosenDoor = input()\n chosenDoor = int(chosenDoor)\n\n #randomly choose the winning door number (between 1 and 3)\n winningDoor = randint(1,3)\n\n #show the player the winning and chosen door numbers\n print(\"A porta escolhida foi a\", chosenDoor)\n print(\"A pota certa é a\", winningDoor)\n\n #player wins if the chosen door and winning door number are the same\n if chosenDoor == winningDoor:\n print(\"Parabéns!\")\n score += 1\n else:\n print(\"Que peninha!\")\n\n #pergunte ao jogador se ele quer continuar jogando\n print(\"\\nVocê quer jogar de novo?(s/n)\")\n resposta = input()[0].lower()\n\n #termina o jogo se o jogador digitar 'n'\n if resposta == 'n':\n jogando = False\n\n print('Obrigado por jogar.') \n print(\"Sua pontuação final é\", score,'.')\n \n\ndef main():\n desafio_consetando_a_entrada()\n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.6169312000274658,
"alphanum_fraction": 0.6402116417884827,
"avg_line_length": 30.5,
"blob_id": "ace2a97a17fc466af1d08ef485a94a664d84c683",
"content_id": "ddd1c1a87208a408c4b1062fa722125e59cd43ca",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 970,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 30,
"path": "/sem07-2-q4t2-cancaoDosProgramadores.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n04. Modifique mais um vez a canção dos programadores, dessa vez, gerando a canção dos bons \nprogramadores, que resolvem 11 erros de cada vez e ao chegar a zero declaram que o software está \nestabilizado. Atenção para o exemplo a seguir, especialmente, os versos finais.\n 99 bugs no software, pegue onze deles e conserte...\n Tecle “Ctrl+F5”\n 88 bugs no software, pegue onze deles e conserte...\n Tecle “Ctrl+F5”\n 77 bugs no software, pegue onze deles e conserte...\n Tecle “Ctrl+F5”\n ...\n 11 bugs no software, pegue onze deles e conserte...\n Tecle “Ctrl+F5”\n Sem erros no software! Está estabilizado!\n'''\n\ndef cancao():\n i = 99\n for cont in range(9): \n print(f'{i} bugs no software, pegue onze deles e conserte...')\n print('Tecle \"Ctrl+F5\"')\n i -= 11\n \n print('Sem erros no software! Está estabilizado!')\n\ndef main():\n cancao()\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5547300577163696,
"alphanum_fraction": 0.5710747838020325,
"avg_line_length": 26.2702693939209,
"blob_id": "f98b96d5b642d8e9d0b31932466c5718f484b46f",
"content_id": "48b39fe0f2225f7f3747dc0876968c992d9d4587",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2075,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 74,
"path": "/triangulo-3.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "# Escreva um programa que leia três lados para um triângulo\n# e mostre uma das mensagens:\n# - é triangulo equilátero\n# - é triangulo isósceles\n# - é triangulo escaleno\n# - não é triângulo\n\n\n# Com funções; Com operadores lógicos\n# Testa se são os lados são válidos para um triângulo\ndef lados_validos(a, b, c):\n return (a < b + c) and (b < a + c) and (c < a + b)\n\n# É equilátero\ndef e_equilatero(a, b, c):\n return lados_validos(a, b, c) and (a == b) and (b == c)\n\n#É isósceles\ndef e_isosceles(a, b, c):\n return lados_validos(a, b, c) and ((a == b) or (b == c) or (a == c))\n\n# É escaleno\ndef e_escaleno(a, b, c):\n return lados_validos(a, b, c) and (a != b) and (b != c)\n\ndef e_triangulo(a, b, c):\n if e_equilatero(a, b, c):\n return 'é triangulo equilátero'\n elif e_isosceles(a, b, c):\n return 'é triangulo isósceles'\n elif e_escaleno(a, b, c):\n return 'é triangulo escaleno'\n else:\n return 'não é triângulo' \n\ndef main():\n # a = float(input('a: '))\n # b = float(input('b: '))\n # c = float(input('c: '))\n \n print('Teste de Equilátero')\n a, b, c = 2, 2, 2 # Equilátero\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Isósceles Caso 1')\n a, b, c = 3, 2, 2 # Isósceles Caso 1\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Isósceles Caso 2')\n a, b, c = 2, 3, 2 # Isósceles Caso 2\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Isósceles Caso 3')\n a, b, c = 2, 2, 3 # Isósceles Caso 3\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Escaleno')\n a, b, c = 3, 4, 5 # Escaleno\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Não é triângulo Caso 1')\n a, b, c = 9, 4, 5 # Não é triângulo\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Não é triângulo Caso 2')\n a, b, c = 3, 9, 5 # Não é triângulo\n print(e_triangulo(a, b, c))\n\n print('\\nTeste de Não é triângulo Caso 3')\n a, b, c = 3, 4, 9 # Não é triângulo\n print(e_triangulo(a, b, c))\n\nif __name__ == '__main__':\n main()\n\n"
},
{
"alpha_fraction": 0.5919597744941711,
"alphanum_fraction": 0.602512538433075,
"avg_line_length": 29.615385055541992,
"blob_id": "3e2cf1a6b2583a200303a7496142df2cda4080bd",
"content_id": "b89f8d425a28b146e526d903cd634dd0681f84c9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2018,
"license_type": "no_license",
"max_line_length": 112,
"num_lines": 65,
"path": "/sem15-q1-dicionario-frequencia_relativa_de_cada_letra_no_texto.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n01.Escreva um programa que leia um texto e calcula a frequência relativa de cada letra no texto. Desconsidere a \ndiferença entre maiúsculas e minúsculas mas considere caracteres acentuados. Por exemplo, para a frase:\nSe, a princípio, a ideia não é absurda, então não há esperança \npara ela. (Albert Einstein)\nRetorna o dicionário:\n{'S': 4, 'E': 10, 'A': 15, 'P': 4, 'R': 5, 'I': 7, 'N': 7, 'C': \n2, 'O': 4, 'D': 2, 'B': 2, 'U': 1, 'T': 3, 'H': 1, 'L': 2}\n'''\n\ndef conta_caracteres(frase):\n aparece_caractere = {}\n \n for char in frase:#corre todos os caracteres da frase\n keys = list(aparece_caractere.keys())#atualiza a quantidade de chaves\n if char in keys:\n aparece_caractere[char] = aparece_caractere[char] + 1\n\n if char not in keys:\n \n aparece_caractere[char] = int(1)\n\n return aparece_caractere\n\n\ndef retira_acentuacao(sentence):\n for char in sentence:\n if char in 'áàâã':\n sentence = sentence.replace(char,'a')\n if char in 'éê':\n sentence = sentence.replace(char,'e')\n if char in 'í':\n sentence = sentence.replace(char,'i')\n if char in 'óôõ':\n sentence = sentence.replace(char,'o')\n if char in 'úü':\n sentence = sentence.replace(char,'u')\n if char in 'ç':\n sentence = sentence.replace(char,'c')\n\n return sentence.upper()\n\n \ndef retira_pontucao(sentence):\n #remove a pontuação da frase\n for char in ' ?!.,()-_\":':\n sentence = sentence.replace(char,'')\n\n sentence = retira_acentuacao(sentence.lower())\n \n return sentence\n\n\ndef main():\n #entrada\n frase = input('Digite uma frase: ').upper().strip()#caixa alta\n \n #processamento\n frase_sem_pontuacao_ou_acento = retira_pontucao(frase)\n caracteres = conta_caracteres(frase_sem_pontuacao_ou_acento)\n print('Frequência relativa de cada letra no texto: ')\n print(caracteres)\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.4688715934753418,
"alphanum_fraction": 0.5136186480522156,
"avg_line_length": 18,
"blob_id": "1b01eac9cc648920f7a277f2caddfb0068d3be9b",
"content_id": "9d1815b4cd5ca1a8c7036a9b5975b9b8c241f61d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 516,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 27,
"path": "/sem07-q5-recebe100NumsERetornaOMaior.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#05. Escreva um programa que leia um conjunto de 100 números inteiros positivos e determine o maior deles.\n\nimport random\n\ndef num_maior(a):\n global maior\n if a > maior:\n maior = a\n \ndef main():\n \n global maior\n maior = 0\n \n i = 0\n for cont in range(100):#100\n #a = int(input())\n a = random.randrange(1, 1000)\n print(f'Número({i+1}/100): {a}')\n num_maior(a)\n i += 1\n print(maior)\n\n \n \nif __name__ == '__main__':\n main()\n\n"
},
{
"alpha_fraction": 0.7160493731498718,
"alphanum_fraction": 0.7242798209190369,
"avg_line_length": 59.75,
"blob_id": "fc60b3a81f17fc1550f5604bfcdef0495677b9ea",
"content_id": "44e139a11b2f810940f8d44d5c1611e1ee8ddfa0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 247,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 4,
"path": "/sem02-q1-anterceSucessInteiros.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#a variável \"n\", do tipo int, recebe um valor a partir da leitura do teclado\nn = int(input('Digite um número: '))\n#impressão dos valores antecessores e sucessores do valor guardado na variável \"n\"\nprint(f'Antecessor: {n-1}; Sucessor: {n+1}.')\n"
},
{
"alpha_fraction": 0.7313432693481445,
"alphanum_fraction": 0.7405281066894531,
"avg_line_length": 50.235294342041016,
"blob_id": "b5ab9209aba0c9ebd13cd79def5924ed3e63c55f",
"content_id": "cbd8254efa3b4626b0a144eb7c3c3cba80b032e3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 889,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 17,
"path": "/sem03-q5-funcaoConverteMinutosEmHorasEMinutos.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#início de uma função nomeada \"minutos_para_horas\", com um argumetos\ndef minutos_para_horas(qtd_minutos):\n #variável \"horas\" recebe o valor passado como argumento e dividido,divisão inteira, por 60 \n horas = qtd_minutos // 60\n print(horas)\n #variável \"minutos\" recebe o valor passado como argumento e dividido, divisão modular, por 60 \n minutos = qtd_minutos % 60\n print(minutos)\n #retorno da função \n return f'{horas}h{minutos}min'\n\n#variável \"minutos\", recebe um valor a partir da leitura do teclado\nminutos = int(input('Quantidade de minutos: '))\n#variável \"horas\", o retorno da função \"minutos_para_horas\" com a passagem do valor de \"minutos\" como argumento\nhoras = minutos_para_horas(minutos)\n#imprimir na tela a mensagem a inserção do valor de \"horas\" concatenado conforme a formatação\nprint(f'{minutos} minutos são equivalentes a {horas}')\n"
},
{
"alpha_fraction": 0.3480769097805023,
"alphanum_fraction": 0.42692306637763977,
"avg_line_length": 16.33333396911621,
"blob_id": "7f64427b03b38efee88356e56099ea93c3958364",
"content_id": "4b6e9eb16e9845ed9f4e15af6db8ddfbe0f3f703",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1046,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 60,
"path": "/sem05-q2-contaDigitParesDeNumEntre100e999.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "# Escreva um programa que leia um número inteiro entre 100 e 999, mostre quantos dígitos pares existem nesse \n# número. Por exemplo: 245 tem 2 dígitos pares; 135 tem 0 dígitos pares; 134 tem 1 dígito par.\n\ndef separa(a):#123\n b = a % 10\n #print(b)\n c = a // 10\n d = c % 10\n #print(d)\n e = c // 10\n f = e % 10\n #print(f)\n return b, d, f\n\ndef eh_par2(a, b, c):\n if a % 2 == 0:\n print(a)\n if b % 2 == 0:\n print(b)\n if c % 2== 0:\n print(c)\ndef eh_par(a, b, c):\n i = 0\n if a % 2 == 0:\n i += 1\n if b % 2 == 0:\n i += 1\n if c % 2== 0:\n i += 1 \n \n return i\n\ndef main():\n try:\n n = int(input('num: '))\n #n = int(input())\n if n >= 100 and n <= 999:\n n1, n2, n3 = separa(n)\n c = eh_par(n1, n2, n3)\n print(c)\n else:\n print()\n \n #eh_par2(n1, n2, n3)\n \n except:\n print()\n\nif __name__ == '__main__':\n main()\n\n'''\n123 L 10\n120 12\n=3\n20\n30\n30\n0\n'''\n"
},
{
"alpha_fraction": 0.43729373812675476,
"alphanum_fraction": 0.49339935183525085,
"avg_line_length": 20.64285659790039,
"blob_id": "fb6dce7c230e80014fe0de7243a1281cca268826",
"content_id": "a890135889928f01340f5e0cf3a273755d2cb493",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 618,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 28,
"path": "/sem07-q4-seqcNumDe10em10Ate1000.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n04. Escreva um programa que gere a seguinte sequência: \n10, 20, 30, 40, ..., 990, 1000.\nConsidere a separação dos números por vírgula seguido de espaço em brando e o pontos no final da \nsequência.\n'''\n\ndef main():\n #contadores\n i = 0\n a = 0\n\n #gerando a sequência\n while i <= 99:\n a += 10\n if a < 1000:\n print(a, end = ', ') \n i += 1\n else:#colocando o '.' depois de 1000\n print(a, end = '.')\n i += 1 \n \nif __name__ == '__main__':\n main()\n\n#opçôes para solução:\n#vetor = list(range(5))\n#ii = []\n"
},
{
"alpha_fraction": 0.5701492428779602,
"alphanum_fraction": 0.5895522236824036,
"avg_line_length": 29.454545974731445,
"blob_id": "79f27aea6cb9b8a89c2e078f7a21f60d460cbcae",
"content_id": "ce1be61262807d3cc4be3428db45dc58efa570bd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 685,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 22,
"path": "/sem04-2-q4-retornaBooleanoSeVogalOuNumero.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "#04. Escreva um programa que leia um caractere e mostra o valor booleano True (verdadeiro) se for uma LETRA (vogal \n#ou consoante) ou um NÚMERO (entre ‘0’ e ‘9’) ou valor booleano False (falso) caso contrário.\n\ndef entr():\n s = input('Digite um, e somente um, caractere: ').lower().strip()\n return ord(s)\n\ndef oQue(l):\n if (l >= 97 and l <= 122) or (l >= 48 and l <= 57):\n return True\n else:\n return False\n \ndef main():\n a = entr()\n if oQue(a) == True:\n print(f'O caractere {chr(a)} é uma letra ou um número.')\n else:\n print(f'O caractere {chr(a)} não é uma letra ou um número.')\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5301587581634521,
"alphanum_fraction": 0.5460317730903625,
"avg_line_length": 25.25,
"blob_id": "ec944f640d5f631760e8b70787391d0a4054bc9f",
"content_id": "ca6215299e3ca06fa7bfd996d4fec3f6c84aed46",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 649,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 24,
"path": "/sem09-2-q4-ehPrimo.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n04. Um número é, por definição, primo se ele não tem divisores, exceto 1 e ele próprio. Escreva um programa \nque leia um número e determine se ele é ou não primo.\n'''\n\ndef eh_primo(n):\n if n == 0: return False\n i = n\n cont = 0\n while i > 0:\n if n % i == 0:\n cont += 1\n i -= 1 \n return cont <= 2\n\ndef main():\n n = int(input('Digite um número qualquer para saber se ele é um número primo: '))\n if eh_primo(n) == True:\n print(f'O número {n} é um número primo.')\n else:\n print(f'O número {n} não é um número primo.')\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5931780338287354,
"alphanum_fraction": 0.6056572198867798,
"avg_line_length": 33.342857360839844,
"blob_id": "9777a8b00f41edf018619532b14510d28b45cf2d",
"content_id": "2204a8c17368f9342f728c651e512614339a4336",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1204,
"license_type": "no_license",
"max_line_length": 117,
"num_lines": 35,
"path": "/sem15-q2-dicionario-um_entrevistador_precisa_contar_o_ano_de_nascimento.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n02.Um entrevistador precisa saber o ano de nascimento em que 1000 (mil) pessoas nasceram e, no final, deseja saber a \nquantidade de pessoas que nasceram em cada ano. Crie um dicionário e, a cada valor lido, some 1 (um) na chave \ncorrespondente ao ano do dicionário. Mostre quantas pessoas nasceram em cada ano exibindo do mais antigo ao mais \nrecente.\n'''\n\ndef main():\n print(\"Entrevista com mil pessoas: \")\n natalidade_por_ano = {}\n for ano in range(1000):\n ano = int(input('Em que ano o entrevistado nasceu?\\n\\> '))\n keys = list(natalidade_por_ano.keys())#atualiza a quantidade de chaves\n if ano in keys:\n natalidade_por_ano[ano] = natalidade_por_ano[ano] + 1\n\n if ano not in keys:\n natalidade_por_ano[ano] = 1\n\n maior = menor = keys[0]\n for ano in keys:\n if ano > maior:\n maior = ano\n if ano < menor:\n menor = ano\n \n ano_inicial = menor\n ano_final = maior\n print('Resultado da pesquisa: ')\n while ano_inicial <= ano_final: \n print(f'{ano_inicial}: {natalidade_por_ano[ano_inicial]}')\n ano_inicial += 1\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5604963898658752,
"alphanum_fraction": 0.5667011141777039,
"avg_line_length": 28.303030014038086,
"blob_id": "ce6e3edb14c93d8d934db5d7142632b22b03cde0",
"content_id": "a608f2bb0647f0bf8185ec2c45955baef71a1084",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 979,
"license_type": "no_license",
"max_line_length": 118,
"num_lines": 33,
"path": "/sem05-q1-contaCaracteresDeNomes.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "# Escreva um programa que leia o nome e o estado civil de uma pessoa, considere apenas “C” para casado e “S” para \n# solteiro. Se a pessoa for casada, leia, também, o nome do cônjuge. Mostre quantos caracteres no total existem no(s) \n# nome(s) lido(s).\n\n'''\ndef main():\n nome = input('Seu nome: ').strip()\n est_civil = input('Seu estado civil:(S- solteiro / C - casado) ').strip().lower()\n if est_civil == 'c':\n nome2 = input('Nome do cônjuge: ').strip()\n nome = nome + nome2\n print(len(nome))\n\nif __name__ == '__main__':\n main()\n'''\n\ndef main():\n \n nome = input('Seu nome: ').strip()\n #nome = input()#.strip()\n\n est_civil = input('Seu estado civil:(S- solteiro / C - casado) ').strip().lower()\n #est_civil = int(input())\n if est_civil == 1:\n\n nome2 = input('Nome do cônjuge: ').strip()\n #nome2 = input()#.strip()\n nome = nome + nome2\n print(len(nome))\n \nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.5482258796691895,
"alphanum_fraction": 0.5622189044952393,
"avg_line_length": 28.41176414489746,
"blob_id": "f60bf0c28f67ee6afa9a4981b2d14726d0df1aee",
"content_id": "e3bd312b705d7953c2f02f111bb141ae6d6fc4b8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2015,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 68,
"path": "/sem13-q1-arrays_maiorEMenorElementoEmMatrizQuadrada.py",
"repo_name": "AzzyOxx/pec-atividades",
"src_encoding": "UTF-8",
"text": "'''\n01.Faça um programa para ler uma matriz quadrada de ordem n e mostre uma tupla com a posição (linha e coluna) do \nmaior e menor elemento. O valore de n é inteiro, positivo e deve ser informados pelo usuário\n'''\n\ndef preenche_matriz(linhas, colunas):\n matriz = [] #lista vazia\n for lin in range(linhas):\n linha = [] # cada linha é uma lista (vetor)\n for col in range(colunas):\n n = int(input('Inserir número inteiro: '))\n linha.append(n)\n # insere a linha na matriz\n matriz.append(linha)\n return matriz\n\n'''\ndef imprime_matriz(matriz): \n for linha in matriz: \n print('|', end = '') \n for elemento in linha:\n print(f'{elemento:3d}', end = ' ') \n print('|')\n\n-- ou --\n\ndef imprime_matriz_indice(matriz):\n for i_linha in range(len(matriz)):\n print('|', end = '')\n for i_coluna in range(len(matriz[i_linha])):\n print(f'{matriz[i_linha][i_coluna]:3d}', end = ' ')\n print('|')\n'''\n\ndef imprime_maiorOUmenor_matriz(matriz):\n maior = menor = matriz[0][0], 1, 1\n line = col = 1\n for linha in matriz:\n col = 1\n for elemento in linha: \n if elemento > maior[0]:\n maior = elemento, line, col \n if elemento < menor[0]:\n menor = elemento, line, col\n col += 1\n line += 1\n maior = maior[1] , maior[2] \n menor = menor[1] , menor[2]\n '''\n maior = maior[1] -1, maior[2] -1\n menor = menor[1] -1, menor[2] -1\n '''\n return maior, menor\n \ndef main():\n #entrada de dados\n ordemDaMAtriz = int(input('Ordem da matriz quadrada: '))\n\n #processamento\n matriz = preenche_matriz( ordemDaMAtriz, ordemDaMAtriz )\n maiorEmenor = imprime_maiorOUmenor_matriz(matriz)\n\n #saída\n print('Posição do maior número inserido: ',maiorEmenor[0])\n print('Posição do menor número inserido: ',maiorEmenor[1])\n \nif __name__ == '__main__':\n main()\n\n"
}
] | 127 |
nishgaba-ai/computer-vision
|
https://github.com/nishgaba-ai/computer-vision
|
f715bef728ae03c56e50e7d3b2fc7d025590495b
|
00bf3fff95c5b2f31eaca7a67b7e7e74f65a7595
|
de272f4dc1f4072da93cf57ab012eb8e9f74fac5
|
refs/heads/master
| 2022-12-16T04:21:40.104506 | 2019-07-22T09:01:15 | 2019-07-22T09:01:15 | 188,984,867 | 0 | 0 |
MIT
| 2019-05-28T08:15:08 | 2019-07-22T09:01:18 | 2022-12-08T05:50:53 |
Python
|
[
{
"alpha_fraction": 0.6528264284133911,
"alphanum_fraction": 0.6613306403160095,
"avg_line_length": 44.30232620239258,
"blob_id": "630a505924a84fa26ae2e7a09b5b7661c283ff65",
"content_id": "6dc30afd7f141552a9172b4157c0d637450c4fb5",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1999,
"license_type": "permissive",
"max_line_length": 130,
"num_lines": 43,
"path": "/src/threshold.py",
"repo_name": "nishgaba-ai/computer-vision",
"src_encoding": "UTF-8",
"text": "#Idea of thresholding is to hold pixels between two values 0 and 1\r\nimport cv2 as cv\r\n'''type:binary,binaryinv,threstrunc,threshtozero,threstozeroinv,adaptivethreshmeanc,adaptivethreshgaussian\r\nSimple Thesholding:\r\n cv.THRESH_BINARY\r\n cv.THRESH_BINARY_INV\r\n cv.THRESH_TRUNC\r\n cv.THRESH_TOZERO\r\n cv.THRESH_TOZERO_INV\r\nAdaptive Thresholding:\r\n cv.ADAPTIVE_THRESH_MEAN_C \r\n cv.ADAPTIVE_THRESH_GAUSSIAN_C \r\n '''\r\nclass threshold:\r\n def __init__(self,path,vtype='binary',thres=127,maxval=255,blockSize=11,C=2):\r\n self.vtype=vtype\r\n self.path=path\r\n self.thres=thres\r\n self.maxval=maxval\r\n self.blockSize=blockSize\r\n self.C=C\r\n def thresholdimage(self):\r\n img=cv.imread(self.path)\r\n img_grey = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\r\n if self.vtype is 'binary':\r\n return cv.threshold(img,self.thres,self.maxval,cv.THRESH_BINARY)\r\n elif self.vtype is'binaryinv':\r\n return cv.threshold(img,self.thres,self.maxval,cv.THRESH_BINARY_INV)\r\n elif self.vtype is 'threstrunc':\r\n return cv.threshold(img,self.thres,self.maxval,cv.THRESH_TRUNC)\r\n elif self.vtype=='threshtozero':\r\n return cv.threshold(img,self.thres,self.maxval,cv.THRESH_TOZERO)\r\n elif self.vtype=='threstozeroinv':\r\n return cv.threshold(img,self.thres,self.maxval,cv.THRESH_TOZERO_INV)\r\n elif self.vtype=='adaptivethreshmeanc':\r\n ret,th1 = cv.threshold(img,self.thres,self.maxval,cv.THRESH_BINARY)\r\n return cv.adaptiveThreshold(img_grey,self.maxval,cv.ADAPTIVE_THRESH_MEAN_C,cv.THRESH_BINARY,self.blockSize,self.C)\r\n \r\n elif self.vtype=='adaptivethreshgaussian':\r\n ret,th1 = cv.threshold(img,self.thres,self.maxval,cv.THRESH_BINARY)\r\n return cv.adaptiveThreshold(img_grey,self.maxval,cv.ADAPTIVE_THRESH_GAUSSIAN_C,cv.THRESH_BINARY,self.blockSize,self.C)\r\n else:\r\n return None\r\n "
},
{
"alpha_fraction": 0.6014568209648132,
"alphanum_fraction": 0.6077002882957458,
"avg_line_length": 23.564102172851562,
"blob_id": "dd66ff1fe5331f23197f1d730c908816b64c871c",
"content_id": "fa02591c28bce7e7ed7b22ad6b01d537b2c1c2d9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 961,
"license_type": "permissive",
"max_line_length": 90,
"num_lines": 39,
"path": "/src/utils.py",
"repo_name": "nishgaba-ai/computer-vision",
"src_encoding": "UTF-8",
"text": "import numpy as np\n\ndef euclideanDistance(src, targ):\n '''\n Calculates euclidean distance between two arrays\n '''\n try:\n return np.linalg.norm(a-b)\n\n except:\n return \"Error while calculating the euclidean distances for the above data types\"\n\n\ndef isStringPalindrome(data):\n '''\n Checks if input string or corresponding string from input is a palindrome\n '''\n \n try:\n # Convert input type to string\n data = str(data)\n return True if data==data[::-1] else False\n\n except:\n print(\"Error while processing, please check the input is correct\")\n\n\ndef isPrime(n):\n '''\n Checks if a given number is prime or not\n\n Source: GeeksforGeeks\n '''\n try:\n n = int(n)\n return all([(n % j) for j in range(2, int(n**0.5)+1)]) and n>1\n except:\n print(\"Error Occured While Checking for prime, please check the input is correct\")\n return False\n\n\n\n"
},
{
"alpha_fraction": 0.6887486577033997,
"alphanum_fraction": 0.6955835819244385,
"avg_line_length": 58.709678649902344,
"blob_id": "1d0fb0f7e859faad1c028daf42d01852c5815de9",
"content_id": "85b1440f04bb86f27c1b516739b7f1bea7bc29ff",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1902,
"license_type": "permissive",
"max_line_length": 154,
"num_lines": 31,
"path": "/src/ImageDenoising.py",
"repo_name": "nishgaba-ai/computer-vision",
"src_encoding": "UTF-8",
"text": "import cv2 as cv\r\nimport numpy as np\r\nclass ImageDenoising:\r\n def __init__(self,path,variation='MD',dst=None,templateWindowSize=7,searchWindowSize=21,h=3):\r\n '''variation can have two values:\r\n -'MD'-cv.fastNlMeansDenoising() - works with a single grayscale images\r\n -'MDC'-cv.fastNlMeansDenoisingColored() - works with a color image.\r\n '''\r\n self.variation=variation\r\n self.path=path\r\n self.dst=dst\r\n self.templateWindowSize=templateWindowSize\r\n self.searchWindowSize=searchWindowSize\r\n self.h=h\r\n self.hForColorComponents=self.h\r\n \r\n def denoising(self): \r\n img=cv.imread(self.path)\r\n ''' Common-arguments-\r\n src-Input 8-bit 1-channel, 2-channel, 3-channel or 4-channel image.\r\n dst-Output image with the same size and type as src .\r\n hForColorComponents : same as h, but for color images only. (normally same as h) \r\n templateWindowSize-Size in pixels of the template patch that is used to compute weights. Should be odd. Recommended value 7 pixels\r\n searchWindowSize-Size in pixels of the window that is used to compute weighted average for given pixel. \r\n Should be odd. Affect performance linearly: greater searchWindowsSize - greater denoising time. Recommended value 21 pixels\r\n h-Parameter regulating filter strength. Big h value perfectly removes noise but also removes image details, smaller h value preserves details \r\n but also preserves some noise'''\r\n if self.variation is 'MD': #works with a single grayscale images\r\n return cv.fastNlMeansDenoising(img,self.dst,self.templateWindowSize,self.searchWindowSize,self.h) \r\n elif self.variation is 'MDC':\r\n return cv.fastNlMeansDenoisingColored(img,self.dst,self.templateWindowSize,self.hForColorComponents,self.searchWindowSize,self.h)\r\n \r\n \r\n "
},
{
"alpha_fraction": 0.6136886477470398,
"alphanum_fraction": 0.6146472096443176,
"avg_line_length": 31.481250762939453,
"blob_id": "b3b881045fc0e2a6a35c98e199326afda1e3ee6a",
"content_id": "4ac068b90723b82f6c0e1f9d4c7cfbf1613ac526",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5216,
"license_type": "permissive",
"max_line_length": 141,
"num_lines": 160,
"path": "/src/networkRequests.py",
"repo_name": "nishgaba-ai/computer-vision",
"src_encoding": "UTF-8",
"text": "import requests\nimport html\nimport google.cloud.storage\n\n\nclass getRequest:\n '''\n Class to send GET request\n '''\n\n def __init__(self, GET_URL = None, GET_PARAMS = None):\n if GET_PARAMS is not None:\n setParams(GET_PARAMS)\n else:\n self.GET_PARAMS = GET_PARAMS\n\n if GET_URL is not None:\n setUrl(GET_URL)\n else:\n self.GET_URL = None\n\n def setUrl(self, GET_URL):\n self.GET_URL = GET_URL\n\n def setParams(self, GET_PARAMS):\n self.GET_PARAMS = dict(GET_PARAMS)\n\n def makeRequest(self):\n self.request = requests.get(url = self.GET_URL, params = self.GET_PARAMS)\n try:\n self.response = self.request.json()\n except:\n self.response = \"Error while extracting JSON response for the GET request, please check 'request' attribute for further details\"\n\n\n\n\n\nclass postRequest:\n '''\n Class to send POST request\n '''\n\n def __init__(self, POST_URL = None, POST_PARAMS = None):\n if POST_PARAMS is not None:\n setParams(POST_PARAMS)\n else:\n self.POST_PARAMS = POST_PARAMS\n\n if POST_URL is not None:\n setUrl(POST_URL)\n else:\n self.POST_URL = None\n\n def setUrl(self, POST_URL):\n self.POST_URL = POST_URL\n\n def setParams(self, POST_PARAMS):\n self.POST_PARAMS = dict(POST_PARAMS)\n\n def makeRequest(self):\n self.request = requests.post(url = self.POST_URL, data = self.POST_PARAMS)\n try:\n self.response = self.request.json()\n except:\n self.response = \"Error while extracting JSON response for the POST request, please check 'request' attribute for further details\"\n\n\n\n\nclass uploadImageGC:\n '''\n Uploads Image to Google Cloud\n '''\n\n def __init__(self, service_account_json_file=None, bucket=None, source_file_path = None, destination_file_path = None):\n self.service_account_json_file = service_account_json_file\n self.bucket = bucket\n self.storage_client = None\n self.source_file_path = source_file_path\n self.destination_file_path = destination_file_path\n self.UPLOAD_URL = None\n\n if self.service_account_json_file is not None:\n makeStorageClient(self.service_account_json_file)\n\n def makeStorageClient(self, service_account_json_file):\n self.storage_client = google.cloud.storage.Client.from_service_account_json(service_account_json_emp)\n if self.bucket is not None:\n getBucket(self.bucket)\n\n\n def getBucket(self, bucket):\n if self.storage_client is not None:\n self.bucket = self.storage_client.get_bucket(bucket)\n if self.source_file_path is not None and self.destination_file_path is not None:\n uploadImage(self.source_file_path, self.destination_file_path)\n \n else:\n print(\"Storage client is currently None, bucket cannot be extracted for None client\")\n\n def uploadImage(self, source_file_path, destination_file_path):\n\n try:\n blob = self.bucket.blob(destination_file_path)\n blob.upload_from_filename(source_file_path)\n blob.make_public()\n\n self.UPLOAD_URL = html.unescape(self.bucket.blob(destination_file_path).public_url)\n\n except Exception as e:\n print(\"Error Uploading Image to Google Cloud\")\n print(e)\n\n\nclass downloadImageS3:\n '''\n Downloads Image from S3 Bucket\n '''\n \n def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, bucket_name = None, object_name = None, file_name = None):\n \n self.aws_access_key_id = aws_access_key_id\n self.aws_secret_access_key = aws_access_key_id\n self.client = None\n self.BUCKET_NAME = bucket_name\n self.OBJECT_NAME = object_name\n self.FILE_NAME = file_name\n createClient()\n\n def setKeyID(self, aws_access_key_id):\n self.aws_access_key_id = aws_access_key_id\n \n def setAccessKey(self, aws_secret_access_key):\n self.aws_secret_access_key = aws_secret_access_key\n\n def setBucket(self, bucket_name):\n self.BUCKET_NAME = bucket_name\n \n def setObject(self, object_name):\n self.OBJECT_NAME = object_name\n \n def setFile(self, file_name):\n self.FILE_NAME = file_name\n\n def createClient(self):\n if self.aws_access_key_id is not None and self.aws_secret_access_key is not None:\n self.client = boto3.client('s3', aws_access_key_id = self.aws_access_key_id, aws_secret_access_key = self.aws_secret_access_key)\n\n def download(self):\n try:\n if self.BUCKET_NAME is not None and self.OBJECT_NAME is not None and self.FILE_NAME is not None:\n with open(self.FILE_NAME, 'wb') as f:\n self.client.download_fileobj(self.BUCKET_NAME, self.OBJECT_NAME, f)\n else:\n print(\"Error while downloading from the bucket, one of the Bucket, Object or File name is None\")\n \n except Exception as e:\n print(\"Exception occured while downloading from S3\")\n print(e)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.6236559152603149,
"alphanum_fraction": 0.6322580575942993,
"avg_line_length": 19.04347801208496,
"blob_id": "ad4bdb59af538856e122ec43673973ead3a1dcb6",
"content_id": "3de4e23df5100c2ae926eba2e08e05d52a9f36b2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 465,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 23,
"path": "/src/loadImage.py",
"repo_name": "nishgaba-ai/computer-vision",
"src_encoding": "UTF-8",
"text": "import cv2\nfrom PIL import Image\nimport numpy as np\n\n\ndef as_bgr(img_path):\n '''\n Loads Imge as CV2 BGR format (numpy array as of OpenCV 4)\n '''\n img = cv2.imread(img_path)\n return img\n\ndef as_rgb(img_path):\n '''\n Loads Image as RGB format using pillow\n\n Return a numpy array\n '''\n\n # NOTE: As this uses pillow format, PNG is loaded with RGBA format by default\n img = Image.open(img_path)\n arr = np.array(img)\n return arr\n\n\n\n\n"
},
{
"alpha_fraction": 0.5636461973190308,
"alphanum_fraction": 0.5711973905563354,
"avg_line_length": 23.986486434936523,
"blob_id": "fe62855debd8d3a7f91cb84c1e455f56c71ff47a",
"content_id": "9d7abcdba1f721d848a7f3ac4ce0ba087da3b0dd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1854,
"license_type": "permissive",
"max_line_length": 107,
"num_lines": 74,
"path": "/src/loadVideo.py",
"repo_name": "nishgaba-ai/computer-vision",
"src_encoding": "UTF-8",
"text": "import cv2\nimport numpy as np\nfrom imutils.video import VideoStream\n\n\nclass fromCamera:\n '''\n Loads from camera from an external source\n '''\n def __init__(self, src=0, res=None):\n self.src = src\n self.res = res\n self.capture = cv2.VideoCapture(self.src)\n self.ret = False\n self.frame = None\n\n if self.res is not None:\n setResolution(self.res)\n\n \n def setResolution(self, res):\n self.res = res\n self.capture.set(cv2.CAP_PROP_FRAME_WIDTH, self.res[0])\n self.capture.set(cv2.CAP_PROP_FRAME_HEIGHT, self.res[1])\n \n def readFrame(self):\n ret, frame = self.capture.read()\n self.ret = ret\n self.frame = frame\n\n\n\nclass fromVideo:\n '''\n Loads from a video path into the camera\n '''\n\n def __init__(self, path=None, res=None):\n self.path = path\n self.res = res\n self.capture = cv2.VideoCapture(self.path)\n self.ret = False\n self.frame = None\n\n if self.res is not None:\n setResolution(self.res)\n\n \n def setResolution(self, res):\n self.res = res\n self.capture.set(cv2.CAP_PROP_FRAME_WIDTH, self.res[0])\n self.capture.set(cv2.CAP_PROP_FRAME_HEIGHT, self.res[1])\n \n def readFrame(self):\n ret, frame = self.capture.read()\n self.ret = ret\n self.frame = frame\n\n\nclass fromPiCamera:\n '''\n Loads the video source from Pi Camera\n '''\n def __init__(self, res=None):\n self.res = res\n self.frame = None\n\n if self.res is not None:\n self.capture = VideoStream(usePiCamera = True, resolution = (self.res[0], self.res[1])).start()\n else:\n self.capture = VideoStream(usePiCamera = True).start()\n \n def readFrame(self):\n self.frame = self.capture.read()\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.671838641166687,
"alphanum_fraction": 0.6733902096748352,
"avg_line_length": 45,
"blob_id": "98c0037cb501912603fcaafb57dd4134b991c882",
"content_id": "722cca095152d4f8991eb7a3ef67d1c67379935a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1289,
"license_type": "permissive",
"max_line_length": 152,
"num_lines": 28,
"path": "/docs/index.md",
"repo_name": "nishgaba-ai/computer-vision",
"src_encoding": "UTF-8",
"text": "# Welcome to Computer Vision Pip Package Docs\n\nFor full documentation visit [https://github.com/nishgaba-ai/computer-vision/blob/master/docs/index.md](https://github.com/nishgaba-ai/computer-vision).\n\n## Modules\n\n* **'loadImage'** \n **as_bgr()** # Loads Imge as CV2 BGR format (numpy array as of OpenCV 4) \n **as_rgb()** # Loads Image as RGB format using pillow \n* **'loadVideo'** \n **fromCamera** # Loads from camera from an external source \n **fromVideo** # Loads from a video path into the camera \n **fromPiCamera** # Loads the video source from Pi Camera \n* **'threshold'** \n **thresholdimage** # reads a single image and threshold the image \n* **'ImageDenoising'** \n **denoising** # reads a single image and remove the noise from image \n## Project layout\n\n src/\n loadImage.py # Contains modules for Loading Images in differnet formats\n loadVideo.py # Contains modules for Loading Video using different sources\n utils.py # Contains modules for off the hand utilities such as Euclidean Distance, etc.\n networkRequests.py # Contains modules for handling network requests such as GET, POST\n\n mkdocs.yml # The configuration file.\n docs/\n .. index.md # The documentation homepage.\n\n"
}
] | 7 |
tartinesKiller/SFC-i18n-extractor
|
https://github.com/tartinesKiller/SFC-i18n-extractor
|
53b037eb93e3a5acb819512412b10a9b6197598d
|
092ac8f5693e626805499106b9e6420bdf2f5e47
|
b766450f49b3635bbc6ce63cb458e5a95b1958e9
|
refs/heads/master
| 2021-08-23T14:34:12.467113 | 2017-12-05T07:48:26 | 2017-12-05T07:48:26 | 113,067,055 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.4654088020324707,
"alphanum_fraction": 0.4767295718193054,
"avg_line_length": 15.91489315032959,
"blob_id": "5dc759bc73e587754db8b363e4d0c887414b10fa",
"content_id": "3287565eb05a253fe28e58a3bace72dd5877def4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 795,
"license_type": "no_license",
"max_line_length": 122,
"num_lines": 47,
"path": "/README.md",
"repo_name": "tartinesKiller/SFC-i18n-extractor",
"src_encoding": "UTF-8",
"text": "# SFC-i18n-extractor\nSimple Python script to extract i18n blocks from vuejs's single file component, in order to put them all in a single json.\n\n# Usage\n```bash\npython3 main.py path/to/src/folder > path/to/translations.json\n```\n\n# Example\nIf both Hello.vue and Cheese.vue contains translations blocks like so:\n```html\n...\n</template>\n<i18n>\n{\n \"en\": {\n \"hello\": \"Hello!\"\n },\n \"fr\": {\n \"hello\": \"Bonjour !\"\n }\n}\n</i18n>\n<script>\n...\n```\nIt will be converted like that: \n```json\n{\n \"en\": {\n \"Hello.vue\": {\n \"hello\": \"Hello!\"\n },\n \"Cheese.vue\": {\n ...\n } \n },\n \"fr\": {\n \"Hello.vue\": {\n \"hello\": \"Bonjour !\"\n },\n \"Cheese.vue\": {\n ...\n } \n }\n}\n```\n"
},
{
"alpha_fraction": 0.5442779064178467,
"alphanum_fraction": 0.5681198835372925,
"avg_line_length": 28.3799991607666,
"blob_id": "5df223033ef738e24fb0cd1cc925481ecff84b23",
"content_id": "b7e22c3ecda94385b90976b1f27b4ba65a167e86",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1468,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 50,
"path": "/main.py",
"repo_name": "tartinesKiller/SFC-i18n-extractor",
"src_encoding": "UTF-8",
"text": "import json\nimport xml.etree.ElementTree\nimport sys\nimport os\nimport glob\n\ndef get_i18n_block(filepath):\n res = \"\"\n with open(filepath) as in_fh:\n while True:\n line = in_fh.readline()\n if not line: break\n\n if line.startswith('<i18n'):\n while True:\n i18n_line = in_fh.readline()\n if i18n_line.startswith(\"</\"):\n return res\n\n res += i18n_line\n\ndef add_sfc_i18n_to_translations(sfc_i18n_json, json_output, prefix):\n for lang in sfc_i18n_json:\n if not lang in json_output:\n json_output[lang] = dict()\n if prefix in json_output[lang]:\n raise ValueError(\"component %s already exists in %s\" % (prefix, lang))\n json_output[lang][prefix] = sfc_i18n_json[lang]\n\ndef extract_from_file(filepath, output_json):\n i18n_str = get_i18n_block(filepath)\n if not i18n_str:\n return\n i18n_json = json.loads(i18n_str)\n add_sfc_i18n_to_translations(i18n_json, output_json, os.path.basename(filepath))\n\ndef main():\n if len(sys.argv) != 2:\n print(\"NOOOOOOOOO!\")\n exit(1)\n else:\n new_json = dict()\n for filename in glob.iglob(os.path.join(sys.argv[1], \"./**/*.vue\"), recursive=True):\n extract_from_file(filename, new_json)\n res_str = json.dumps(new_json, ensure_ascii=False)\n print(res_str)\n\n\nif __name__ == \"__main__\":\n main()"
}
] | 2 |
pieleric/multi-prop-router
|
https://github.com/pieleric/multi-prop-router
|
156c1546a9f4e67bfe807be1322d44ca92543fb3
|
b678521cf21a15f3fc76bae0da2f84852cd074b4
|
461413172947e84fed8b4ac2982af1f5facc5df1
|
refs/heads/master
| 2020-04-06T11:18:39.790169 | 2018-11-17T09:47:19 | 2018-11-17T09:47:19 | 157,411,973 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5971566438674927,
"alphanum_fraction": 0.6174662709236145,
"avg_line_length": 37.110877990722656,
"blob_id": "5d16e8332ade501c171a8b18cd0452d9e177d2f8",
"content_id": "a5582a9505be8ce587b9daf732694feec71088dd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 18234,
"license_type": "no_license",
"max_line_length": 272,
"num_lines": 478,
"path": "/src/mprouter/__init__.py",
"repo_name": "pieleric/multi-prop-router",
"src_encoding": "UTF-8",
"text": "import mapbox\nfrom mapbox import Directions, Geocoder\nimport json\nimport requests\nimport time\nimport calendar\nimport logging\nimport os\nfrom geopy import Point\nfrom geopy.distance import distance as geodistance\n\nMONOTCH_KEY = open(\"monotch.key\").readline().strip()\nos.environ[\"MAPBOX_ACCESS_TOKEN\"] = open(\"mapbox.key\").readline().strip()\n\ntry:\n with open(\"monotch_parking_details.json\") as f:\n MONOTCH_CACHE_PARKING_DETAILS = json.load(f)\nexcept Exception:\n MONOTCH_CACHE_PARKING_DETAILS = {}\n\nMAX_PARKING_DISTANCE = 4000 # m\nPARKING_TO_PT_TIME = 3 * 60 # s\n\n# http://www.carbonindependent.org/sources_bus.html\nCO2_PER_KM_CAR = 170 # g/km\nCO2_PER_KM_BUS = 90 # g/km\nCO2_PER_KM_TRAM = 0 # g/km\n\n# 1.5 (€/l) x 0.07 (l/km)\nPRICE_PER_KM_CAR = 0.1 # €\n\n# Note: we express all coordinates in longitude (float), latitude (float), as in GeoJSON\n\nclass RouteSummary():\n def __init__(self, profile, distance, duration, price=0, co2=None, legs=None, depart_time=None, nbChanges=0, origin_id=None, destination_id=None, url=None):\n # TODO: distance is not important?\n self.profile = profile # \"car\", \"bike\", \"foot\" \"taxi\", \"public-transport\"...\n self.distance = distance # m\n self.depart_time = depart_time\n self.duration = duration # s\n self.price = price # €\n self.co2 = co2\n #TODO define more properties to the route summary: \"leisure\"/agreability, \"co2\"....\n # => just automatically computed from the 3 inputs?\n # TODO: add some \"legs\" information to be able to display some kind of basic info about the trip?\n self.legs = legs\n self.nbChanges = nbChanges\n self.origin_id = origin_id\n self.destination_id = destination_id\n self.url = url\n \n def to_struct(self):\n struct = {\"duration\": self.duration,\n \"price\": self.price,\n \"co2\": self.co2,\n }\n if self.co2 is not None:\n struct[\"co2\"]= self.co2\n if self.url:\n struct[\"url\"]= self.url\n return struct\n\nclass PRRouteSummary():\n def __init__(self, duration, price, car, parking, pt):\n self.duration = duration # s\n self.price = price # €\n self.car = car # s\n self.pt = pt\n self.parking = parking # a Parking\n self.co2_savings = None\n self.price_savings = None\n self.duration_savings = None\n \n @property\n def co2(self):\n return self.car.co2 + self.pt.co2\n\n def __str__(self):\n return \"PR Journey of %d m @ %f €, parking at %s, then doing: %s\" % (self.duration / 60, self.price, self.parking.name, self.pt.legs)\n\n def to_struct(self):\n struct = {\"duration_car\": self.car.duration,\n \"price_car\": self.car.price + self.parking.price,\n \"url_car\": self.car.url,\n \"duration_pt\": self.pt.duration,\n \"price_pt\": self.pt.price,\n \"url_pt\": self.pt.url,\n \"co2\": self.co2\n }\n for attr in (\"co2_savings\", \"price_savings\", \"duration_savings\"):\n av = getattr(self, attr)\n if av is not None:\n struct[attr] = av\n return struct\n\n# TODO: for now, we assume the price is fixed (because we can always get some arrangment with every parking company ;-) )\n# ideally, it could also be per hour\nclass Parking():\n def __init__(self, coordinates, name, pid, price, address):\n self.coordinates = coordinates\n self.name = name\n self.id = pid\n self.price = price # €\n self.address = address\n\n def __str__(self):\n return \"Parking %s (%s) at %s, price = %f €\" % (self.name, self.id, self.coordinates, self.price) \n\n\n\ndef get_bbox(position, radius):\n \"\"\"\n return (4 float): long west, lat north, long east, lat south\n \"\"\"\n nw = geodistance(meters=radius*1.4).destination(Point(position[1], position[0]), 225)\n se = geodistance(meters=radius*1.4).destination(Point(position[1], position[0]), 45)\n return nw[1], nw[0], se[1], se[0]\n\n\n\ndef get_distance(a, b):\n \"\"\"\n origin (float, float): coordinates longitude, latitude, eg: (-122.7282, 45.5801)\n destination (float, float): coordinates\n return (float): in m\n \"\"\"\n return geodistance(Point(a[1], a[0]), Point(b[1], b[0])).meters\n\n\n# TODO: also take the end time? At least to check that the parking is opened when coming back,\n# that there is still public transport, and to compute the price\ndef pr_route(origin, destination, depart_time):\n \"\"\"\n origin (float, float): coordinates longitude, latitude, eg: (-122.7282, 45.5801)\n destination (float, float): coordinates\n depart_time (float): s from epoch\n return (list of PRRouteSummary): the cheapest/quickest\n \"\"\"\n\n # Find all the parking \"near\" the destinations (aka around the city)\n try:\n # FIXME: once the API is enabled again -> remove the \"cache_\" \n pks_dest = cache_monotch_list_parkings(destination, MAX_PARKING_DISTANCE)\n except Exception:\n logging.exception(\"Failed to get parkings for location %s\", destination)\n raise\n\n logging.debug(\"got %d parkings\", len(pks_dest))\n\n # Compute for each parking the route\n full_journeys = []\n for p in pks_dest:\n try:\n # TODO; find the closest parking from the destination (and if it's not too far, use it to report the price with only car)\n # TODO: remove parkings which are really too close? Or just special case on foot?\n a_to_p = mapbox_route(origin, p.coordinates, \"car\")\n a_to_p.url = create_gmap_url(origin, p.coordinates, \"car\")\n except Exception:\n logging.exception(\"Failed to get routing for A %s to %s\", origin, p)\n continue\n\n depart_time_p = depart_time + a_to_p.duration + PARKING_TO_PT_TIME\n try:\n p_to_b = nl9292_route(p.coordinates, destination, depart_time_p)\n p_to_b.url = create_nl9292_url(p_to_b.origin_id, p_to_b.destination_id)\n except Exception:\n # Can happen if it's too close\n logging.exception(\"Failed to get routing for P %s to %s\", p, destination)\n continue\n\n total_dur = a_to_p.duration + PARKING_TO_PT_TIME + p_to_b.duration\n total_price = a_to_p.price + p.price + p_to_b.price\n # When to leave at the latest (computed backwards based on the public transport)\n a_to_p.depart_time = p_to_b.depart_time - PARKING_TO_PT_TIME - a_to_p.duration\n logging.debug(\"Car journey: %s\", a_to_p.url)\n logging.debug(\"Public transport journey: %s\", p_to_b.url)\n j = PRRouteSummary(total_dur, total_price, a_to_p, p, p_to_b)\n full_journeys.append(j)\n\n logging.debug(\"Got %d journeys\", len(full_journeys))\n \n # Pick the 2 cheapest journeys\n cheapest_journeys = sorted(full_journeys, key=lambda j: j.price)\n best_journeys = set(cheapest_journeys[:2])\n \n # Pick the 2 quickest journeys\n quickest_journeys = sorted(full_journeys, key=lambda j: j.duration)\n best_journeys |= set(quickest_journeys[:2])\n\n logging.debug(\"Selected %d journeys\", len(best_journeys))\n\n return best_journeys\n\n\ndef pr_route_address(origin_add, destination_add, depart_time):\n \"\"\"\n almost same as pr_route, but takes addresses, instead of coordinates, as input\n Also runs the car only route\n return RouteSummary, list of PRRouteSummary: car only, best P+R routes\n \"\"\"\n origin = mapbox_geocoder_fw(origin_add)\n destination = mapbox_geocoder_fw(destination_add)\n j_car = mapbox_route(origin, destination, \"car\")\n try:\n pk_closest = cache_monotch_list_parkings(destination, None)[0]\n except Exception:\n logging.exception(\"Failed to get parkings for location %s\", destination)\n raise\n j_car.price += pk_closest.price\n j_car.duration += PARKING_TO_PT_TIME\n\n js_pr = pr_route(origin, destination, depart_time)\n # Compute savings\n for j in js_pr:\n j.co2_savings = j_car.co2 - j.co2\n j.price_savings = j_car.price - j.price\n j.duration_savings = j_car.duration - j.duration\n\n return j_car, js_pr\n\n\ndef create_gmap_url(origin, destination, profile=\"car\"):\n \"\"\"\n origin (float, float): coordinates longitude, latitude, eg: (-122.7282, 45.5801)\n destination (float, float): coordinates\n return (str): the url to open the routing in google map\n \"\"\"\n # cf https://developers.google.com/maps/documentation/urls/ios-urlscheme\n gmprofile = {\"car\": \"driving\", \"pt\": \"transit\", \"bike\": \"bicycling\", \"foot\": \"walking\"}[profile]\n return (\"https://www.google.com/maps/?\" + \n \"saddr=@%f,%f\" % (origin[1], origin[0]) +\n \"&daddr=@%f,%f\" % (destination[1], destination[0]) +\n \"&directionsmode=%s\" % gmprofile)\n\ndef create_nl9292_url(origin_id, destination_id):\n \"\"\"\n origin_id (str): nl9292 POI ID\n destination_id (str): nl9292 POI ID\n return (str): the url to open the routing in 9292\n \"\"\"\n # cf https://9292.nl/zakelijk/reisadvies-via-je-eigen-website\n return (\"https://9292.nl/?\" + \n \"van=%s\" % origin_id +\n \"&naar=%s\" % destination_id)\n\ndef cache_monotch_list_parkings(position, radius=None):\n \"\"\"\n origin (float, float): coordinates longitude, latitude, eg: (-122.7282, 45.5801)\n radius (float or None): max distance from the position, if None, just return the closest parking\n return (list of Parkings)\n \"\"\"\n f = open(\"monotch_parkings.json\")\n r = json.load(f)\n\n pks = []\n for pj in r:\n pid = pj[\"id\"]\n \n loc = float(pj[\"location\"][\"lng\"]), float(pj[\"location\"][\"lat\"])\n if radius is not None and get_distance(position, loc) > radius:\n logging.debug(\"Skipping parking %s which is too far\", pid)\n continue\n \n if pid in MONOTCH_CACHE_PARKING_DETAILS:\n p_details = monotch_get_parking_details(pid)\n else:\n logging.debug(\"Simulating non cached parking %s\", pid)\n p_details = {\"overview_city\": \"Den Haag\",\n \"rate_day\": \"1000\",\n \"name\": \"\"}\n\n if \"rate_day\" in pj:\n try:\n price = (int(pj[\"rate_day\"]) / 100) \n price /= 2 # asssume we can get discount for 12h\n except Exception:\n logging.exception(\"Failed to read rate day for parking %s\", pid)\n price = 5\n # TODO: for a specific time slot? cf p_details[\"rates\"]\n \n else:\n price = 0\n \n # Note: the address is not always present. Ideally, we'd just fill-up by reverse geocoding.\n full_address = p_details.get(\"address\", \"\") + \" \" + p_details[\"overview_city\"]\n p = Parking(loc, p_details.get(\"name\", \"\"), pj[\"id\"], price, full_address)\n pks.append(p)\n\n if radius is None:\n # pick the closest one\n pk = min(pks, key=lambda p: get_distance(position, p.coordinates))\n return [pk]\n\n return pks\n\n\nMONOTCH_URI_BASE = \"https://api.monotch.com/PrettigParkeren/v6/\"\nMONOTCH_USABLE_PARKINGS = \"parking_unknown;parking_garage;parking_area;parking_pr;parking_valet;parking_book\"\ndef monotch_list_parkings(position, radius):\n \"\"\"\n origin (float, float): coordinates longitude, latitude, eg: (-122.7282, 45.5801)\n radius (float): max distance from the position\n return (list of Parkings)\n \"\"\"\n # It doesn't take a \"radius\", but a bounding box\n bbox = get_bbox(position, radius)\n uri = (MONOTCH_URI_BASE + \"list?\" + \"w=%f&n=%f&e=%f&s=%f\" % bbox +\n \"&types=\" + MONOTCH_USABLE_PARKINGS +\n \"&api_key=\" + MONOTCH_KEY\n )\n logging.debug(\"Contacting uri: %s\", uri)\n response = requests.get(uri)\n while response.status_code == 403:\n time.sleep(1)\n logging.debug(\"retrying a bit later\")\n response = requests.get(uri)\n logging.debug(\"Got response: %s\", response.content)\n r = response.json()\n\n pks = []\n for pj in r:\n loc = float(pj[\"location\"][\"lng\"]), float(pj[\"location\"][\"lat\"])\n p_details = monotch_get_parking_details(pj[\"id\"])\n if \"rate_day\" in pj:\n price = (int(pj[\"rate_day\"]) / 100) \n price /= 2 # asssume we can get discount for 12h\n # TODO: for a specific time slot? cf p_details[\"rates\"]\n \n else:\n price = 0\n \n # Note: the address is not always present. Ideally, we'd just fill-up by reverse geocoding.\n full_address = p_details.get(\"address\", \"\") + \" \" + p_details[\"overview_city\"]\n p = Parking(loc, p_details[\"name\"], pj[\"id\"], price, full_address)\n pks.append(p)\n\n return pks\n\n\ndef monotch_get_parking_details(parking_id):\n \"\"\"\n return (str): structure json-like from the monotoch API\n \"\"\"\n # Cache, because monotch limits the number of requests per sec (and anyway, it's static data)\n if parking_id in MONOTCH_CACHE_PARKING_DETAILS:\n return MONOTCH_CACHE_PARKING_DETAILS[parking_id]\n\n # https://api.monotch.com/PrettigParkeren/v6/detail?id=parking_1557&includeRates=1&api_key=hp8cq2h6sy2me5hn4nekgnme\n # https://api.monotch.com/PrettigParkeren/v6/rates?eid=parking_1731&api_key=hp8cq2h6sy2me5hn4nekgnme\n uri = (MONOTCH_URI_BASE + \"detail?\" + \"id=%s\" % parking_id +\n \"&includeRates=1\" + \n \"&api_key=\" + MONOTCH_KEY\n )\n logging.debug(\"Contacting uri: %s\", uri)\n response = requests.get(uri)\n while response.status_code == 403:\n time.sleep(1)\n logging.debug(\"retrying a bit later\")\n response = requests.get(uri)\n r = response.json()\n return r\n \n\n# Note:: Mapbox v5 doesn't support the time of departue/arrival\n\n\n# Mapbox:\ndef mapbox_route(origin, destination, profile):\n \"\"\"\n origin (float, float): coordinates longitude, latitude, eg: (-122.7282, 45.5801)\n destination (float, float): coordinates\n profile (str): \"car\", \"bike\", or \"foot\"\n return RouteSummary\n \"\"\"\n mbprofile = {\"car\": \"mapbox/driving-traffic\", \"bike\": \"mapbox/cycling\", \"foot\": \"mapbox/walking\"}[profile]\n\n service = Directions()\n response = service.directions([origin, destination], mbprofile)\n # TODO: check it went fine\n\n r = response.json()\n logging.debug(response.json())\n # TODO: r = response.json()\n # Get the most recommended route\n route = r[\"routes\"][0]\n # To get the whole geometry:\n # driving_routes = response.geojson()\n\n if profile == \"car\":\n dist_km = route[\"distance\"] / 1000\n price = PRICE_PER_KM_CAR * dist_km\n co2 = CO2_PER_KM_CAR * dist_km\n else:\n price = 0\n co2 = 0\n return RouteSummary(profile, route[\"distance\"], route[\"duration\"], price, co2)\n\ndef mapbox_geocoder_fw(address):\n \"\"\"\n address (str): \n return (float, float): longitude, latitude\n \"\"\"\n geocoder = Geocoder()\n # TODO: add some proximity (from the user)?\n response = geocoder.forward(address)\n \n r = response.json()\n logging.debug(\"Rettrieved potential locations for %s: %s\", address, r)\n \n coord = r[\"features\"][0]['center']\n return float(coord[0]), float(coord[1])\n\n\n# cf https://github.com/aitorvs/9292-api-spec/blob/master/docs/resources/journeys.md\nNL_9292_URI_BASE = \"http://api.9292.nl/0.1/\"\ndef nl9292_route(origin, destination, depart_time):\n \"\"\"\n origin (float, float): coordinates longitude, latitude, eg: (-122.7282, 45.5801)\n destination (float, float): coordinates\n return RouteSummary\n \"\"\"\n #curl -v \"http://api.9292.nl/0.1/journeys?before=1&sequence=1&byFerry=true&bySubway=true&byBus=true&byTram=true&byTrain=true&lang=nl-NL&from=station-amsterdam-centraal&dateTime=2018-11-21T1754&searchType=departure&interchangeTime=standard&after=5&to=station-eindhoven\"\n\n # need to convert a longitue/lattitude to a \"location id\"\n origin_id = nl9292_get_location_id(origin)\n destination_id = nl9292_get_location_id(destination)\n\n uri = (NL_9292_URI_BASE + \"/journeys?before=1&sequence=1&byFerry=true&bySubway=true&byBus=true&byTram=true&byTrain=true&lang=nl-NL\" +\n \"&from=\" + origin_id +\n \"&dateTime=\" + time.strftime(\"%Y-%m-%dT%H%M\", time.gmtime(depart_time)) + # now in yyyy-MM-ddTHHmm\n \"&searchType=departure&interchangeTime=standard&after=5\" +\n \"&to=\" + destination_id\n )\n response = requests.get(uri)\n r = response.json()\n logging.debug(\"Got response %s\", r)\n # We pick the first journey we find\n # TODO: be more picky: less changes + earliest one after the departure time\n j = r[\"journeys\"][0]\n\n departure = nl9292_time_to_epoch(j[\"departure\"])\n arrival = nl9292_time_to_epoch(j[\"arrival\"])\n duration = arrival - departure\n try:\n if j[\"fareInfo\"][\"fullPriceCents\"] is None:\n # Can happen if only walking\n price = 0\n else:\n price = j[\"fareInfo\"][\"fullPriceCents\"] * 0.01 # €\n except Exception:\n logging.exception(\"Failed to compute price\")\n price = 0\n legs = j[\"legs\"]\n nbChanges = j[\"numberOfChanges\"]\n co2 = 0\n # TODO: add for each leg of the journey which uses bus, the co2\n logging.debug(\"Found pt journey starting at %f, lasting %d m\", departure, duration / 60)\n return RouteSummary(\"public-transport\", None, duration, price, co2, legs, departure, nbChanges, origin_id, destination_id)\n\ndef nl9292_time_to_epoch(t):\n \"\"\"\n t (str): time in the format yyyy-MM-ddTHH:mm\n returns (float): seconds since epoch\n \"\"\"\n return calendar.timegm(time.strptime(t, \"%Y-%m-%dT%H:%M\"))\n\ndef nl9292_get_location_id(coordinates):\n \"\"\"\n origin (float, float): coordinates longitude, latitude\n returns the closest location id for the given coordinates\n \"\"\"\n # cf https://github.com/aitorvs/9292-api-spec/blob/master/docs/resources/locations.md\n # ex: \"http://api.9292.nl/0.1/locations?lang=nl-NL&latlong=52.352812,4.948491\"\n uri = (NL_9292_URI_BASE + \"locations?lang=nl-NL&latlong=%f,%f\" % (coordinates[1], coordinates[0]))\n response = requests.get(uri)\n r = response.json()\n logging.debug(\"Got response %s\", r)\n return r[\"locations\"][0][\"id\"]\n\n"
},
{
"alpha_fraction": 0.6091448068618774,
"alphanum_fraction": 0.6506600379943848,
"avg_line_length": 42.55833435058594,
"blob_id": "dc56cd75149e0f28149b59eaeec73521fc5b7753",
"content_id": "f137b7697cbc07708abe75c5d05f8e4ec94f73b0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5229,
"license_type": "no_license",
"max_line_length": 111,
"num_lines": 120,
"path": "/src/mprouter/test/mprouter_test.py",
"repo_name": "pieleric/multi-prop-router",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\nimport logging\nimport unittest\nimport mprouter\n\nlogging.getLogger().setLevel(logging.DEBUG)\n\nlonglat_tudelft = (4.37212, 52.00234)\nadd_tudelft = \"TU aula, delft\"\nlonglat_kijkduin = (4.22200, 52.06965)\nadd_kijkduin = \"deltaplein, den haag\"\nlonglat_denhaag = (4.31527, 52.08040) # Mauritshuis\nadd_denhaag = \"Mauritshuis, Den Haag\"\n\n\nclass TestMapBox(unittest.TestCase):\n def test_route(self):\n sumcar = mprouter.mapbox_route(longlat_tudelft, longlat_kijkduin, \"car\")\n logging.debug(\"car summary: %s\", sumcar)\n sumbike = mprouter.mapbox_route(longlat_tudelft, longlat_kijkduin, \"bike\")\n sumfoot = mprouter.mapbox_route(longlat_tudelft, longlat_kijkduin, \"foot\")\n assert sumcar.duration < sumbike.duration < sumfoot.duration\n # In NL, on foot can be further than by bike\n assert sumcar.distance >= sumbike.distance #>= sumfoot.distance\n assert sumcar.price > 0\n assert sumcar.co2 > 0\n \n def test_geocoder_fw(self):\n dh = mprouter.mapbox_geocoder_fw(add_denhaag)\n self.assertAlmostEqual(dh[0], longlat_denhaag[0], delta=0.01)\n self.assertAlmostEqual(dh[1], longlat_denhaag[1], delta=0.01)\n \n dh = mprouter.mapbox_geocoder_fw(add_kijkduin)\n self.assertAlmostEqual(dh[0], longlat_kijkduin[0], delta=0.01)\n self.assertAlmostEqual(dh[1], longlat_kijkduin[1], delta=0.01)\n\n\nclass Test9292(unittest.TestCase):\n def test_route(self):\n sumpt = mprouter.nl9292_route(longlat_tudelft, longlat_kijkduin, 1542387791)\n logging.debug(\"9292 summary: %s\", sumpt)\n assert sumpt.duration > 600 # at leat 10 min\n assert sumpt.price > 3 # €\n\n def test_location_id(self):\n tudelft_id = mprouter.nl9292_get_location_id(longlat_tudelft)\n logging.debug(\"Got TUDelft ID = %s\", tudelft_id)\n assert \"delft\" in tudelft_id\n\n\nclass TestMonotch(unittest.TestCase):\n def test_parking(self):\n pks1km = mprouter.monotch_list_parkings(longlat_kijkduin, 1000)\n logging.debug(\"Got parkings: %s\", pks1km)\n pks10km = mprouter.monotch_list_parkings(longlat_kijkduin, 10000)\n logging.debug(\"Got parkings: %s\", pks10km)\n for p in pks10km:\n print(p)\n self.assertGreaterEqual(len(pks10km), len(pks1km))\n\n def test_closest_parking(self):\n pk_kijkduin = mprouter.cache_monotch_list_parkings(longlat_kijkduin)[0]\n print(pk_kijkduin)\n pk_denhaag = mprouter.cache_monotch_list_parkings(longlat_denhaag)[0]\n dist = mprouter.get_distance(pk_denhaag.coordinates, longlat_denhaag)\n print(pk_denhaag)\n self.assertLessEqual(dist, 1000)\n\n\nclass TestPRRoute(unittest.TestCase):\n def test_pr_route(self):\n departt = 1542387791 # 2018-11-16 @ 16:30\n #journeys = mprouter.pr_route(longlat_tudelft, longlat_kijkduin, departt)\n journeys = mprouter.pr_route(longlat_tudelft, longlat_denhaag, departt)\n logging.debug(\"Got journeys: %s\", journeys)\n for j in journeys:\n print(j.to_struct())\n print(\"https://www.openstreetmap.org/directions?engine=osrm_car&route=%f,%f;%f,%f\" % \n (longlat_tudelft[1], longlat_tudelft[0], j.parking.coordinates[1], j.parking.coordinates[0]))\n self.assertGreaterEqual(len(journeys), 2)\n for j in journeys:\n self.assertGreaterEqual(j.duration, j.car.duration + j.pt.duration)\n self.assertGreater(j.car.depart_time, departt - 600)\n\n def test_pr_route_night(self):\n departt = 1542420000 # 2018-11-17 @ 03:00\n journeys = mprouter.pr_route(longlat_tudelft, longlat_kijkduin, departt)\n #journeys = mprouter.pr_route(longlat_tudelft, longlat_denhaag, departt)\n logging.debug(\"Got journeys: %s\", journeys)\n for j in journeys:\n print(j.to_struct())\n print(\"https://www.openstreetmap.org/directions?engine=osrm_car&route=%f,%f;%f,%f\" % \n (longlat_tudelft[1], longlat_tudelft[0], j.parking.coordinates[1], j.parking.coordinates[0]))\n self.assertGreaterEqual(len(journeys), 2)\n for j in journeys:\n self.assertGreaterEqual(j.duration, j.car.duration + j.pt.duration)\n self.assertGreater(j.car.depart_time, departt - 600)\n\n def test_pr_route_add(self):\n departt = 1542387791 # 2018-11-16 @ 16:30\n car_only, journeys = mprouter.pr_route_address(add_tudelft, add_kijkduin, departt)\n logging.debug(\"Just by car: %s\", car_only.to_struct())\n logging.debug(\"Got journeys: %s\", journeys)\n for j in journeys:\n print(j.to_struct())\n print(\"https://www.openstreetmap.org/directions?engine=osrm_car&route=%f,%f;%f,%f\" % \n (longlat_tudelft[1], longlat_tudelft[0], j.parking.coordinates[1], j.parking.coordinates[0]))\n self.assertGreaterEqual(len(journeys), 2)\n for j in journeys:\n self.assertGreaterEqual(j.duration, j.car.duration + j.pt.duration)\n self.assertGreater(j.car.depart_time, departt - 600)\n\n def test_bbox(self):\n bbox = mprouter.get_bbox(longlat_tudelft, 10000)\n self.assertEqual(len(bbox), 4)\n\n\nif __name__ == \"__main__\":\n unittest.main()\n"
},
{
"alpha_fraction": 0.6128071546554565,
"alphanum_fraction": 0.6485480070114136,
"avg_line_length": 26.40816307067871,
"blob_id": "28e11e4edb1bab4fe28bebb4193fdbcff221427c",
"content_id": "14ceec21f1dcdea424ae7df5656a9f0eaee6d364",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1343,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 49,
"path": "/cache_parking_details.py",
"repo_name": "pieleric/multi-prop-router",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\nimport logging\nimport requests\nimport json\nimport mprouter\nfrom mprouter import MONOTCH_URI_BASE, MONOTCH_USABLE_PARKINGS, MONOTCH_KEY\n\nlogging.getLogger().setLevel(logging.DEBUG)\n\n#BBOX = 4.3193, 52.1527,4.4529, 51.9396\nBBOX = mprouter.get_bbox((4.37212, 52.00234), 30000)\n\nuri = (MONOTCH_URI_BASE + \"list?\" + \"w=%f&n=%f&e=%f&s=%f\" % BBOX +\n \"&types=\" + MONOTCH_USABLE_PARKINGS +\n \"&api_key=\" + MONOTCH_KEY\n )\nlogging.debug(\"Contacting uri: %s\", uri)\nresponse = requests.get(uri)\nwhile response.status_code == 403:\n time.sleep(1)\n logging.debug(\"retrying a bit later\")\n response = requests.get(uri)\nlogging.debug(\"Got response: %s\", response.content)\nr = response.json()\n\njson_sum = json.dumps(r, sort_keys=True, indent=4)\nf = open(\"monotch_parkings_2.json\", \"w+\")\nf.write(json_sum)\n\npks = dict(mprouter.MONOTCH_CACHE_PARKING_DETAILS)\ntry:\n for pj in r:\n pid = pj[\"id\"]\n if pid in pks:\n continue\n try:\n p_details = mprouter.monotch_get_parking_details(pid)\n except Exception:\n logging.exception(\"Failed to get %s\", pid)\n continue\n pks[pid] = p_details\nexcept KeyboardInterrupt:\n pass\n\nfulls = json.dumps(pks, sort_keys=True, indent=4)\nf = open(\"monotch_parking_details.json\", \"w+\")\nf.write(fulls)\nf.close()\n"
},
{
"alpha_fraction": 0.7448015213012695,
"alphanum_fraction": 0.775047242641449,
"avg_line_length": 28.38888931274414,
"blob_id": "8922d8b7cca7879f5ed3f72e1631d689e83490df",
"content_id": "dcf09339a0b0dca9efed150abdc62bb7ffeef9c0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 529,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 18,
"path": "/install.sh",
"repo_name": "pieleric/multi-prop-router",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n# Set up the needed dependencies on Ubuntu 16.04/18.04\n\n# Mapbox SDK (for Python)\nsudo apt install python3-pip\n#pip3 install boto3 iso3166 python-dateutil requests polyline uritemplate cachecontrol\n#mkdir ~/development/\n#cd ~/development/\n#git clone https://github.com/mapbox/mapbox-sdk-py.git\npip3 install mapbox requests geopy\n\n\n\n\nexport PYTHONPATH=~/development/mapbox-sdk-py/:~/development/multi-prop-router/src/\n\n# This is the \"public\" token to access the Mapbox API\nexport MAPBOX_ACCESS_TOKEN=$(cat mapbox.key)\n"
}
] | 4 |
MaheshSuranga/Overseer-Engine
|
https://github.com/MaheshSuranga/Overseer-Engine
|
a13601c2d73833a673a3f6f8f547ae01b5ce4034
|
7e4ffb190a0675dfc9c635fd3aa7f2944e9ffdeb
|
2f072348fa09e210735d0b1228db83174678b40c
|
refs/heads/master
| 2020-09-29T18:08:38.998225 | 2020-01-01T16:51:52 | 2020-01-01T16:51:52 | 227,090,308 | 2 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5614009499549866,
"alphanum_fraction": 0.585089385509491,
"avg_line_length": 33.75757598876953,
"blob_id": "6ffa662fe98ab6cd6fa59d9852cb8278ea6e4059",
"content_id": "b12ca376d155a1eeb60910301b66b37ed24be72b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6881,
"license_type": "no_license",
"max_line_length": 93,
"num_lines": 198,
"path": "/core/recognize_video.py",
"repo_name": "MaheshSuranga/Overseer-Engine",
"src_encoding": "UTF-8",
"text": "from imutils.video import VideoStream\nfrom imutils.video import FileVideoStream\nfrom imutils.video import FPS\nimport json\nimport numpy as np\nimport argparse\nimport imutils\nimport pickle\nimport time\nimport cv2\nimport os\nimport socket\nimport threading\nimport utils.imageEnhancer as image_enhancer\n\nFD_FOLDER = 'face_detection_model'\nEMBEDDINGS_MODEL = 'openface_nn4.small2.v1.t7'\nRECOGNIZER = 'output/recognizer.pickle'\nLABEL_ENCODER = 'output/le.pickle'\n\nvs = None\noutputFrame = None\nlock = threading.Lock()\ncurrent = None\nusers = {}\n\n# sender = imagezmq.ImageSender(connect_to=\"tcp://{}:5555\".format(\n# \t\"192.168.43.132\"))\n\n# get the host name, initialize the video stream, and allow the\n# camera sensor to warmup\n\ndef recognize(inp_confidence, vid_file):\n # rpiName = socket.gethostname()\n # print(rpiName + \"*************\")\n global vs,outputFrame, lock, current, users\n\n # load our serialized face detector from disk\n print(\"[INFO] loading face detector...\")\n protoPath = os.path.sep.join([FD_FOLDER, \"deploy.prototxt\"])\n modelPath = os.path.sep.join([FD_FOLDER,\n \"res10_300x300_ssd_iter_140000.caffemodel\"])\n detector = cv2.dnn.readNetFromCaffe(protoPath, modelPath)\n\n # load our serialized face embedding model from disk\n print(\"[INFO] loading face recognizer...\")\n embedder = cv2.dnn.readNetFromTorch(EMBEDDINGS_MODEL)\n\n # load the actual face recognition model along with the label encoder\n recognizer = pickle.loads(open(RECOGNIZER, \"rb\").read())\n le = pickle.loads(open(LABEL_ENCODER, \"rb\").read())\n\n # initialize the video stream, then allow the camera sensor to warm up\n print(\"[INFO] starting video stream...\")\n # vs = VideoStream(src=0).start()\n vs = FileVideoStream(vid_file).start()\n time.sleep(2.0)\n\n # start the FPS throughput estimator\n fps = FPS().start()\n users = {}\n # loop over frames from the video file stream\n while True:\n # grab the frame from the threaded video stream\n frame = vs.read()\n frame = image_enhancer.image_enhance(frame)\n frame = image_enhancer.image_sharpen(frame)\n # sender.send_image(rpiName, cv2.resize(frame, (640,320)))\n \n # resize the frame to have a width of 600 pixels (while\n # maintaining the aspect ratio), and then grab the image\n # dimensions\n frame = imutils.resize(frame)\n (h, w) = frame.shape[:2]\n # construct a blob from the image\n imageBlob = cv2.dnn.blobFromImage(\n cv2.resize(frame, (720, 1280)), 1.0, (800,800),\n (104.0, 177.0, 123.0), swapRB=False, crop=False)\n\n # apply OpenCV's deep learning-based face detector to localize\n # faces in the input image\n detector.setInput(imageBlob)\n detections = detector.forward()\n\n # loop over the detections\n for i in range(0, detections.shape[2]):\n # extract the confidence (i.e., probability) associated with\n # the prediction\n confidence = detections[0, 0, i, 2]\n\n # filter out weak detections\n if confidence > float(inp_confidence):\n # compute the (x, y)-coordinates of the bounding box for\n # the face\n box = detections[0, 0, i, 3:7] * np.array([w, h, w, h])\n (startX, startY, endX, endY) = box.astype(\"int\")\n\n # extract the face ROI\n face = frame[startY:endY, startX:endX]\n (fH, fW) = face.shape[:2]\n\n # ensure the face width and height are sufficiently large\n if fW < 20 or fH < 20:\n continue\n\n # construct a blob for the face ROI, then pass the blob\n # through our face embedding model to obtain the 128-d\n # quantification of the face\n faceBlob = cv2.dnn.blobFromImage(face, 1.0 / 255,\n (96, 96), (0, 0, 0), swapRB=True, crop=False)\n embedder.setInput(faceBlob)\n vec = embedder.forward()\n \n # face = cv2.resize(face, (160,160), interpolation = cv2.INTER_AREA)\n # sample = np.expand_dims(face, axis=0)\n # vec = model.predict(sample)\n\n # perform classification to recognize the face\n preds = recognizer.predict_proba(vec)[0]\n j = np.argmax(preds)\n proba = preds[j]\n name = le.classes_[j]\n\n current = name\n if not current in users:\n users[current] = 1 * proba\n else:\n users[current] = users[current] + 1 * proba\n # draw the bounding box of the face along with the\n # associated probability\n text = \"{}: {:.2f}%\".format(name, proba * 100)\n y = startY - 10 if startY - 10 > 10 else startY + 10\n cv2.rectangle(frame, (startX, startY), (endX, endY),\n (0, 0, 255), 2)\n cv2.putText(frame, text, (startX, y),\n cv2.FONT_HERSHEY_SIMPLEX, 0.45, (0, 0, 255), 2)\n\n # update the FPS counter\n fps.update()\n\n with lock:\n outputFrame = frame.copy()\n # cv2.imshow(\"Frame\", frame)\n key = cv2.waitKey(1) & 0xFF\n\n # if the `q` key was pressed, break from the loop\n if key == ord(\"q\"):\n break\n # show the output frame\n # cv2.imshow(\"Frame\", frame)\n # key = cv2.waitKey(1) & 0xFF\n\n # # if the `q` key was pressed, break from the loop\n # if key == ord(\"q\"):\n # break\n\n # stop the timer and display FPS information\n fps.stop()\n print(\"[INFO] elasped time: {:.2f}\".format(fps.elapsed()))\n print(\"[INFO] approx. FPS: {:.2f}\".format(fps.fps()))\n\n # do a bit of cleanup\n cv2.destroyAllWindows()\n vs.stop()\n\ndef generate():\n\t# grab global references to the output frame and lock variables\n\tglobal outputFrame, lock\n\n\t# loop over frames from the output stream\n\twhile True:\n\t\t# wait until the lock is acquired\n\t\twith lock:\n\t\t\t# check if the output frame is available, otherwise skip\n\t\t\t# the iteration of the loop\n\t\t\tif outputFrame is None:\n\t\t\t\tcontinue\n\n\t\t\t# encode the frame in JPEG format\n\t\t\t(flag, encodedImage) = cv2.imencode(\".jpg\", outputFrame)\n\n\t\t\t# ensure the frame was successfully encoded\n\t\t\tif not flag:\n\t\t\t\tcontinue\n\n\t\t# yield the output frame in the byte format\n\t\tyield(b'--frame\\r\\n' b'Content-Type: image/jpeg\\r\\n\\r\\n' + \n\t\t\tbytearray(encodedImage) + b'\\r\\n')\n\ndef current_identification():\n global current\n print(current)\n yield \"data: \" + current + \"\\n\\n\"\n\ndef all_count():\n global users\n print(users)\n yield \"data: \" + json.dumps(users) + \"\\n\\n\""
},
{
"alpha_fraction": 0.5289542078971863,
"alphanum_fraction": 0.556611955165863,
"avg_line_length": 31.16666603088379,
"blob_id": "fcf4174c8a8beb02e100f7a193d675a14c15aaee",
"content_id": "a65b8643681fd6b49fe2cc0e8ddad73a2e54f554",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1157,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 36,
"path": "/utils/imageEnhancer.py",
"repo_name": "MaheshSuranga/Overseer-Engine",
"src_encoding": "UTF-8",
"text": "import cv2\n\nclahe = cv2.createCLAHE(clipLimit=3.0, tileGridSize=(8,8))\n\ndef image_enhance(img):\n #-----Converting image to LAB Color model----------------------------------- \n lab= cv2.cvtColor(img, cv2.COLOR_BGR2LAB)\n # cv2.imshow(\"lab\",lab)\n\n #-----Splitting the LAB image to different channels-------------------------\n l, a, b = cv2.split(lab)\n # cv2.imshow('l_channel', l)\n # cv2.imshow('a_channel', a)\n # cv2.imshow('b_channel', b)\n\n #-----Applying CLAHE to L-channel-------------------------------------------\n\n cl = clahe.apply(l)\n # cv2.imshow('CLAHE output', cl)\n\n #-----Merge the CLAHE enhanced L-channel with the a and b channel-----------\n limg = cv2.merge((cl,a,b))\n # cv2.imshow('limg', limg)\n\n #-----Converting image from LAB Color model to RGB model--------------------\n final = cv2.cvtColor(limg, cv2.COLOR_LAB2BGR)\n return final\n\ndef image_sharpen(img):\n # Load the image\n # image = cv2.imread(\"35.0.jpg\")\n # Blur the image\n gauss = cv2.GaussianBlur(img, (7,7), 0)\n # Apply Unsharp masking\n unsharp_image = cv2.addWeighted(img, 2, gauss, -1, 0)\n return unsharp_image"
},
{
"alpha_fraction": 0.5244177579879761,
"alphanum_fraction": 0.5432006120681763,
"avg_line_length": 25.639999389648438,
"blob_id": "8baff471a7bb2f8718fc7fe22cb5dea10de067d2",
"content_id": "64b58a96606e7cca74a8ef7ff32458e60f99d35d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1331,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 50,
"path": "/utils/videoSender.py",
"repo_name": "MaheshSuranga/Overseer-Engine",
"src_encoding": "UTF-8",
"text": "import socket\nimport io\n\nSERVER_IP = '10.10.24.129'\nreply_gait = \"\"\n\ndef send_train_video(empName, filename):\n soc = socket.socket()\n soc.connect((SERVER_IP,8080))\n\n print('waiting for connection...')\n\n soc.send(empName.encode()) \n reply = ''\n reply = soc.recv(1024).decode()\n print (\"reply: \"+reply)\n\n if(reply == \"OK\"):\n with soc:\n # filename = input('enter filename to send: ')\n # with open(filename, 'rb') as file:\n # sendfile = file.read()\n bin_file = io.BytesIO(filename.read())\n soc.sendall(bin_file.read())\n print('file sent')\n\ndef send_surveillance_video(filename):\n global reply_gait\n reply_gait = ''\n soc = socket.socket()\n soc.connect((SERVER_IP,8081))\n\n print('waiting for connection...')\n\n with soc:\n # filename = vidFile\n with open(filename, 'rb') as file:\n bin_file = io.BytesIO(file.read())\n # sendfile = file.read()\n soc.send(str(file.tell()).encode())\n soc.sendall(bin_file.read())\n print('file sent')\n reply = ''\n reply = soc.recv(1024).decode()\n reply_gait = reply\n print (\"reply: \"+reply)\n\ndef gait_server_reply():\n global reply_gait\n yield \"data: \" + reply_gait + \"\\n\\n\""
},
{
"alpha_fraction": 0.5699334144592285,
"alphanum_fraction": 0.5880114436149597,
"avg_line_length": 24.634145736694336,
"blob_id": "bc12a670418cd33bbf769f31ffc8dafee22ca8c9",
"content_id": "8dc4dea1e97347e4213c65b3926dc58e3efb72fc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1051,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 41,
"path": "/utils/frameDivider.py",
"repo_name": "MaheshSuranga/Overseer-Engine",
"src_encoding": "UTF-8",
"text": "import cv2\nimport numpy as np\nimport os\n\nfpsVideo=30\n\n\n# Playing video from file:\n# vidcap = cv2.VideoCapture('Asiri1.mp4')\ndef vid_to_images(vid_file, save_folder, fpsOutput=15):\n r=fpsVideo/fpsOutput\n vidcap = cv2.VideoCapture(vid_file)\n try:\n if not os.path.exists(save_folder):\n os.makedirs(save_folder)\n except OSError:\n print ('Error: Creating directory of data')\n \n success,image = vidcap.read()\n count = 0\n success = True\n\n while(success):\n # Capture frame-by-frame\n success,image = vidcap.read()\n\n # Saves image of the current frame in jpg file\n if(count%r==0):\n name = str(count/r) + '.jpg'\n print ('Creating...' + name)\n cv2.imwrite(os.path.join(save_folder, name), image)\n\n if cv2.waitKey(10) == 27: # exit if Escape is hit\n break\n\n # To stop duplicate images\n count += 1\n\n # When everything done, release the capture\n vidcap.release()\n cv2.destroyAllWindows()\n"
},
{
"alpha_fraction": 0.6288036108016968,
"alphanum_fraction": 0.6325836181640625,
"avg_line_length": 35.496551513671875,
"blob_id": "9df3bfa8ed83e5f48192ce8ad301901e24a4b782",
"content_id": "f54236e89cc36fff065bbe5893594247332495ec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5291,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 145,
"path": "/app.py",
"repo_name": "MaheshSuranga/Overseer-Engine",
"src_encoding": "UTF-8",
"text": "from flask import Flask, render_template, request, Response, redirect, url_for, flash\nfrom werkzeug import secure_filename\nimport os\nimport utils.videoSender as video_sender\nimport utils.frameDivider as frame_divider\nimport core.extract_embeddings as embeddings_extractor\nimport core.train_model as classfier\nimport core.recognize_video as recognizer\nfrom flask_toastr import Toastr\nimport io\nimport pandas as pd\nimport threading\n\napp = Flask(__name__)\ntoastr = Toastr(app)\napp.secret_key = b'_5#y2L\"F4Q8z\\n\\xec]/'\n\nUPLOAD_FOLDER = 'dataset'\napp.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER\nALLOWED_EXTENSIONS = {'txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'} \n\[email protected]('/')\ndef index():\n return render_template('home.html')\n\ndef allowed_file(filename):\n return '.' in filename and \\\n filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS\n\[email protected]('/upload/',methods = ['GET','POST'])\ndef upload_file():\n if request.method =='POST':\n empName = request.form['name']\n fileList = request.files.getlist('file[]')\n\n vidFile = request.files['file']\n vidFileName = 'video.mp4'\n vidFile.save(vidFileName)\n vidFile.stream.seek(0)\n # myfile = 'video.mp4'\n\n saveLocation = os.path.sep.join([app.config['UPLOAD_FOLDER'],empName])\n frame_divider.vid_to_images(vidFileName, saveLocation, 15)\n\n if fileList and len(fileList) != 1:\n print(fileList, len(fileList))\n if not (os.path.exists(saveLocation)):\n os.mkdir(saveLocation)\n for f in fileList:\n filename = secure_filename(f.filename.split('/')[1])\n f.save(os.path.join(saveLocation,filename))\n \n \n video_sender.send_train_video(empName, vidFile)\n flash(u'New employee was successfully registered!', 'success')\n return redirect(url_for('index'))\n # if 'file[]' not in request.files:\n # flash('No file part')\n # return \"No file part\"\n # file = request.files['file']\n # if file.filename == '':\n # print(\"********\")\n # flash('No selected file')\n # return \"No file selected\"\n # if file and allowed_file(file.filename):\n # flash(\"file attached\")\n # filename = secure_filename(file.filename)\n # file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))\n # return 'Home'\n # return redirect(url_for('uploaded_file',\n # filename=filename))\n # print(request.files.popitem)\n \n # return request\n\n return \"render_template('file_upload.html')\"\[email protected]('/extraction/',methods = ['GET','POST'])\ndef extraction():\n if request.method == 'POST':\n confidence = request.form['range']\n num_of_embeddings = embeddings_extractor.extract_face_embeddings(confidence)\n flash(u'{} facial embeddings were extracted!'.format(num_of_embeddings), 'success')\n return render_template('home.html', number=str(num_of_embeddings))\n\[email protected]('/train/',methods = ['GET','POST'])\ndef train():\n if request.method == 'POST':\n message = classfier.train_classifier()\n flash(u'The classifier was successfully trained!', 'success')\n return render_template('home.html', message=message)\n\[email protected]('/surveillance/',methods = ['GET','POST'])\ndef surveillance():\n if request.method == 'POST':\n confidence = request.form['range1']\n vidFile = request.files['file1']\n filename = 'surveillance.mp4'\n vidFile.save(filename)\n\n vidFile.stream.seek(0)\n myfile = 'surveillance.mp4'\n # vidFile = vidFile.read()\n # vidFile = io.BytesIO(vidFile)\n # vidFile = vidFile.read()\n # vidFile = float(vidFile.read())\n # vidFile = int.from_bytes(vidFile, \"little\")\n\n # recognizer.recognize(confidence, myfile)\n\n t1 = threading.Thread(target=recognizer.recognize, args=(confidence, myfile))\n t2 = threading.Thread(target=video_sender.send_surveillance_video, args=[myfile])\n t1.daemon = True\n t2.daemon = True\n t1.start()\n t2.start() \n return render_template('video.html')\n\[email protected](\"/video_feed\")\ndef video_feed():\n\t# return the response generated along with the specific media\n\t# type (mime type)\n\treturn Response(recognizer.generate(),\n\t\tmimetype = \"multipart/x-mixed-replace; boundary=frame\")\n\[email protected](\"/current_identification/\")\ndef current_identification():\n\t# return the response generated along with the specific media\n\t# type (mime type)\n if request.headers.get('accept') == 'text/event-stream':\n return Response(recognizer.current_identification(), mimetype ='text/event-stream')\n\[email protected](\"/all_count/\")\ndef all_count():\n\t# return the response generated along with the specific media\n\t# type (mime type)\n if request.headers.get('accept') == 'text/event-stream':\n return Response(recognizer.all_count(), mimetype ='text/event-stream')\n\[email protected](\"/gait_reply/\")\ndef gait_reply():\n if request.headers.get('accept') == 'text/event-stream':\n return Response(video_sender.gait_server_reply(), mimetype ='text/event-stream')\n\nif __name__ == '__main__':\n app.run(debug=True, threaded=True)"
}
] | 5 |
simonovoleg/2good2share
|
https://github.com/simonovoleg/2good2share
|
ff229cc6e61ebdfc0cc8d72e5d6df1e1a024998b
|
af2c48226afb652351e087a94a60028b6295649d
|
705a2f3a7487d609fc41a38ab1c7059b6de365f3
|
refs/heads/master
| 2021-05-26T16:06:14.986622 | 2020-04-08T15:52:08 | 2020-04-08T15:52:08 | 254,130,792 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6472622752189636,
"alphanum_fraction": 0.6472622752189636,
"avg_line_length": 50.05882263183594,
"blob_id": "f2eea8e3968204f91b02ab39d0ea3d85452f2ddf",
"content_id": "db41936df962dd3d3dae1825668af27716e45e72",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1735,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 34,
"path": "/app/serializers.py",
"repo_name": "simonovoleg/2good2share",
"src_encoding": "UTF-8",
"text": "from rest_framework import serializers\n\nfrom app.models import Campaign\n\n# Serializers define the API representation.\nclass CategorySerializer(serializers.ModelSerializer):\n class Meta:\n model = Campaign\n fields = [ \n 'campaign_id', 'url', 'auto_fb_post_mode', 'collected_date', 'category_id', 'category', \n 'currencycode', 'current_amount', 'goal', 'donators', 'days_active', 'days_created', 'title',\n 'description', 'default_url', 'has_beneficiary', 'media_type', 'project_type',\n 'turn_off_donations', 'user_id', 'user_first_name', 'user_last_name', 'user_facebook_id',\n 'user_profile_url', 'visible_in_search', 'status', 'deactivated', 'state', 'is_launched',\n 'campaign_image_url', 'created_at', 'launch_date', 'campaign_hearts', 'social_share_total',\n 'social_share_last_update', 'location_city', 'location_country', 'location_zip', 'is_charity',\n 'charity_valid', 'charity_npo_id', 'charity_name', 'velocity'\n ]\n\n\n\n\n\n\n'''\n'campaign_id', 'url', 'auto_fb_post_mode', 'collected_date', 'category_id', 'category', \n'currencycode', 'current_amount', 'goal', 'donators', 'days_active', 'days_created', 'title',\n'description', 'default_url', 'has_beneficiary', 'media_type', 'project_type',\n'turn_off_donations', 'user_id', 'user_first_name', 'user_last_name', 'user_facebook_id',\n'user_profile_url', 'visible_in_search', 'status', 'deactivated', 'state', 'is_launched',\n'campaign_image_url', 'created_at', 'launch_date', 'campaign_hearts', 'social_share_total',\n'social_share_last_update', 'location_city', 'location_country', 'location_zip', 'is_charity',\n'charity_valid', 'charity_npo_id', 'charity_name', 'velocity'\n'''"
},
{
"alpha_fraction": 0.7043795585632324,
"alphanum_fraction": 0.7043795585632324,
"avg_line_length": 37.279998779296875,
"blob_id": "c54842cc9b6588ad01bf74738a0a3cdf6a3a1e6b",
"content_id": "f5df77af781433dd26a97d936f7fdfb8c1294422",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1918,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 50,
"path": "/app/models.py",
"repo_name": "simonovoleg/2good2share",
"src_encoding": "UTF-8",
"text": "from django.db import models\n#from api.fields import JSONField\n\n# Create your models here.\n\nclass Campaign(models.Model):\n #category = models.ForeignKey(Category, on_delete=models.PROTECT)\n campaign_id = models.IntegerField(primary_key=True)\n url = models.TextField()\n auto_fb_post_mode = models.BooleanField()\n collected_date = models.TextField()\n category_id = models.IntegerField()\n category = models.TextField()\n currencycode = models.TextField()\n current_amount = models.IntegerField()\n goal = models.IntegerField()\n donators = models.IntegerField()\n days_active = models.IntegerField()\n days_created = models.IntegerField()\n title = models.TextField()\n description = models.TextField()\n default_url = models.TextField()\n has_beneficiary = models.BooleanField()\n media_type = models.IntegerField()\n project_type = models.IntegerField()\n turn_off_donations = models.BooleanField()\n user_id = models.IntegerField()\n user_first_name = models.TextField()\n user_last_name = models.TextField()\n user_facebook_id = models.TextField()\n user_profile_url = models.TextField()\n visible_in_search = models.BooleanField()\n status = models.IntegerField()\n deactivated = models.BooleanField()\n state = models.TextField()\n is_launched = models.BooleanField()\n campaign_image_url = models.TextField()\n created_at = models.TextField()\n launch_date = models.TextField()\n campaign_hearts = models.IntegerField()\n social_share_total = models.IntegerField()\n social_share_last_update = models.TextField()\n location_city = models.TextField()\n location_country = models.TextField()\n location_zip = models.TextField()\n is_charity = models.BooleanField()\n charity_valid = models.BooleanField()\n charity_npo_id = models.IntegerField()\n charity_name = models.TextField()\n velocity = models.IntegerField()\n "
}
] | 2 |
anandhere8/Book-Store
|
https://github.com/anandhere8/Book-Store
|
7b48a12db72e0c81bfa8cef50d2ca44cf7d9960b
|
946507bc0fcdfed60ce0e0c4371b8a7211a0890b
|
7f34b06d51a6d7858e830bca3f52c50887b94c73
|
refs/heads/master
| 2020-04-16T04:18:00.193215 | 2019-01-13T13:56:01 | 2019-01-13T13:56:01 | 165,261,629 | 2 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.597966730594635,
"alphanum_fraction": 0.6155267953872681,
"avg_line_length": 22.521739959716797,
"blob_id": "051a57cea6ba2b9e7855a9bb4d36f0875dfeed85",
"content_id": "c232367daab59e415f8ddb49c61f59c709fd0720",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1082,
"license_type": "permissive",
"max_line_length": 164,
"num_lines": 46,
"path": "/README.md",
"repo_name": "anandhere8/Book-Store",
"src_encoding": "UTF-8",
"text": "# Book-Store\n\n\nThis code is a nice simple application for keeping the records of books. Further code can be easily manipulated for the use of one individual person or organisation\n \n## How to use?\n### You must have installed following things\n> 1. Python\n> 2. Pip3\n> 3. Tkinter Lib\n> 4. Sqlite3 Lib\n\n## 1.Installing python\n ### For Debian based System\n ```\n sudo apt-get install python3\n ```\n ### For Arch based system\n ```\n sudo pacman -S python3\n ```\n## 2.Installing pip3\n ### For Debian based System\n ```\n sudo apt-get -y install python3-pip\n ```\n ### For Arch based system\n ```\n sudo pacman -S python3-pip\n ```\n## 3,4. For libs\n ```\n pip install (name of lib) \n ```\n \n## After installing all of the above. Open terminal or cmd .\n ```\n git clone https://github.com/anandhere8/Book-Store.git\n cd book-store/\n ```\n #### run the bookstore.py via python3\n ``` python3 bookstore.py ```\n ```\n \n # Output\n \n"
},
{
"alpha_fraction": 0.594394326210022,
"alphanum_fraction": 0.6221005320549011,
"avg_line_length": 26.972972869873047,
"blob_id": "dc838ca9b727262657160e5ab7ec7aab20e7500f",
"content_id": "4f4234f64505962859c5b24fb1695506772ad84c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3104,
"license_type": "permissive",
"max_line_length": 81,
"num_lines": 111,
"path": "/bookstore.py",
"repo_name": "anandhere8/Book-Store",
"src_encoding": "UTF-8",
"text": "from tkinter import *\nimport backend\n\nwin=Tk()\n\nwin.title(\"BOOK STORE\")\n\ntitletext=Label(win,text=\"Title\")\nyeartext=Label(win,text=\"Year\")\nauthortext=Label(win,text=\"Author\")\nisbntext=Label(win,text=\"ISBN\")\ntitletext.grid(row=0,column=0)\nyeartext.grid(row=1,column=0)\nauthortext.grid(row=0,column=2)\nisbntext.grid(row=1,column=2)\n\n\n############ SELECTING_CURSOR ##################\n\ndef get_select(event):\n index=t1.curselection()[0]\n selected_tuple=t1.get(index)\n global id\n id=selected_tuple[0]\n titlebox.delete(0,END)\n titlebox.insert(END,selected_tuple[1])\n authorbox.delete(0,END)\n authorbox.insert(END, selected_tuple[2])\n yearbox.delete(0,END)\n yearbox.insert(END, selected_tuple[3])\n isbnbox.delete(0,END)\n isbnbox.insert(END, selected_tuple[4])\n\n################################################\n\n ################### VARIABLES #################\ntitle=StringVar()\nauthor=StringVar()\nisbn=StringVar()\nyear=StringVar()\n################################################\n\n\n####################EXECUTABLE_FUNCTIONS########\n\ndef view_button():\n t1.delete(0,END)\n for row in backend.view():\n t1.insert(END,row)\n\ndef search_button():\n t1.delete(0,END)\n for row in backend.search(title.get(),author.get(),year.get(),isbn.get()):\n t1.insert(END,row)\n view_button()\n\ndef entry_button():\n backend.entry(title.get(),author.get(),year.get(),isbn.get())\n t1.delete(0,END)\n t1.insert(END,title.get(),author.get(),year.get(),isbn.get())\n view_button()\n\ndef update_button():\n backend.update(id,title.get(),author.get(),year.get(),isbn.get())\n view_button()\n\ndef delete_button():\n backend.delete(id)\n view_button()\n\n#################################################\n\n#_______________________ENTRY BOXES _______________\ntitlebox=Entry(win,textvariable=title)\nyearbox=Entry(win,textvariable=year)\nauthorbox=Entry(win,textvariable=author)\nisbnbox=Entry(win,textvariable=isbn)\ntitlebox.grid(row=0,column=1)\nyearbox.grid(row=1,column=1)\nauthorbox.grid(row=0,column=3)\nisbnbox.grid(row=1,column=3)\n\n#__________________________________________________\n\n\n#________________________BUTTONS_______________________\n\nview=Button(win,text=\"View All\",height=1,width=12,command=view_button)\nsearch=Button(win,text=\"Search Entry\",height=1,width=12,command=search_button)\nadd=Button(win,text=\"Add Entry\",height=1,width=12,command=entry_button)\nupdate=Button(win,text=\"Update Selected\",height=1,width=12,command=update_button)\ndelete=Button(win,text=\"Delete Selected\",height=1,width=12,command=delete_button)\nclose=Button(win,text=\"Close\",height=1,width=12,command=win.destroy)\nview.grid(row=2,column=3)\nsearch.grid(row=3,column=3)\nadd.grid(row=4,column=3)\nupdate.grid(row=5,column=3)\ndelete.grid(row=6,column=3)\nclose.grid(row=7,column=3)\n\n#__________________________________________________\n\nt1=Listbox(win,height=9,width=35)\nt1.grid(row=2,column=0,columnspan=2,rowspan=6)\ns1=Scrollbar(win)\ns1.grid(column=2,row=2,rowspan=6)\nt1.configure(yscrollcommand=s1.set)\ns1.configure(command=t1.yview)\nt1.bind('<<ListboxSelect>>',get_select)\n\nwin.mainloop()"
},
{
"alpha_fraction": 0.5881890058517456,
"alphanum_fraction": 0.5937007665634155,
"avg_line_length": 25.39583396911621,
"blob_id": "bfa1e742d2df06895aec9fb6884e10de39af2b93",
"content_id": "89e45091166588d6ef89ac253eee2b6378c26129",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1270,
"license_type": "permissive",
"max_line_length": 125,
"num_lines": 48,
"path": "/backend.py",
"repo_name": "anandhere8/Book-Store",
"src_encoding": "UTF-8",
"text": "import sqlite3\ndef connect():\n conn=sqlite3.connect(\"book.db\")\n cur=conn.cursor()\n cur.execute(\"CREATE TABLE IF NOT EXISTS book (Id INTEGER PRIMARY KEY,Title text,Author text,Year INTEGER,ISBN INTEGER)\")\n conn.commit()\n conn.close()\n\ndef view():\n conn = sqlite3.connect(\"book.db\")\n cur = conn.cursor()\n cur.execute(\"SELECT * FROM book \")\n all=cur.fetchall()\n conn.close()\n return all\n\n\n\ndef entry(t,a,y,i):\n conn = sqlite3.connect(\"book.db\")\n cur = conn.cursor()\n cur.execute(\"INSERT INTO book VALUES(NULL,?,?,?,?)\",(t,a,y,i))\n conn.commit()\n conn.close()\n\ndef search(t=\"\",a=\"\",y=\"\",i=\"\"):\n conn = sqlite3.connect(\"book.db\")\n cur = conn.cursor()\n cur.execute(\"SELECT * FROM book WHERE Title=? OR Author=? OR Year=? OR ISBN=?\",(t,a,y,i))\n all = cur.fetchall()\n conn.close()\n return all\n\ndef delete(id):\n conn = sqlite3.connect(\"book.db\")\n cur = conn.cursor()\n cur.execute(\"DELETE FROM book WHERE Id=? \",(id,))\n conn.commit()\n conn.close()\n\ndef update(id,t,a,y,i):\n conn = sqlite3.connect(\"book.db\")\n cur = conn.cursor()\n cur.execute(\"update book set Title=?,Author=?,Year=?,ISBN=? where id=?\",(t,a,y,i,id))\n conn.commit()\n conn.close()\n\nconnect()\n\n\n\n"
}
] | 3 |
AlexTail/python-tasks
|
https://github.com/AlexTail/python-tasks
|
059b656d425faf8ecde7061a8fa890125250bb12
|
6b118f3c4881a00be3b8b50697c8d91b36248312
|
a93ef4e57ac42f1fdc9ba09096a6c1403db71516
|
refs/heads/master
| 2020-05-30T06:04:13.764267 | 2019-09-16T12:42:45 | 2019-09-16T12:42:45 | 189,557,492 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5130434632301331,
"alphanum_fraction": 0.5681159496307373,
"avg_line_length": 20.59375,
"blob_id": "db4793745b0f3b18c3eb7ee2da60715ae63f8c95",
"content_id": "3f1eaa1bd480a2aeeb33bb2087c9224eae950b83",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 711,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 32,
"path": "/task-pro-2.py",
"repo_name": "AlexTail/python-tasks",
"src_encoding": "UTF-8",
"text": "\"\"\"\n\nRu: Двоичный (бинарный) поиск\n\nEng: Binary Search\n\n\"\"\"\n\n\ndef binarysearch(mylist, iskat, start, stop):\n if start > stop:\n return False\n else:\n mid = (start + stop) // 2\n if iskat == mylist[mid]:\n return mid\n elif iskat < mylist[mid]:\n return binarysearch(mylist, iskat, start, mid - 1)\n else:\n return binarysearch(mylist, iskat, mid + 1, stop)\n\nmylist = [10, 12, 13, 15, 20, 24, 27, 33, 42, 51, 57, 68, 70, 77, 79, 81]\niskat = 77\nstart = 0\nstop =len(mylist)\n\nx = binarysearch(mylist, iskat, start, stop)\n\nif x == False:\n print(\"Item\", iskat, \"Not Found!\")\nelse:\n print(\"Item\", iskat, \"Found at index:\", x)"
},
{
"alpha_fraction": 0.7950819730758667,
"alphanum_fraction": 0.7950819730758667,
"avg_line_length": 39.66666793823242,
"blob_id": "8c2d734d306ee1ddc249a19fa09d1c0f91e8811a",
"content_id": "8acfbfe44b4d893285acc951abcb34ca744dae96",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 162,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 3,
"path": "/README.md",
"repo_name": "AlexTail/python-tasks",
"src_encoding": "UTF-8",
"text": "RU: В данном разделе находятся задачи на Python с решением.\n\nEN: This section contains tasks on Python with the solution.\n"
},
{
"alpha_fraction": 0.44655171036720276,
"alphanum_fraction": 0.4683907926082611,
"avg_line_length": 21.597402572631836,
"blob_id": "0095b05ec202acc4c2fb1b0e12e43577f634a2d7",
"content_id": "8dda737315023a09e3277c166ec9af384cc0f722",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2001,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 77,
"path": "/task-2.py",
"repo_name": "AlexTail/python-tasks",
"src_encoding": "UTF-8",
"text": "\"\"\"\nИмеется положительное число. Необходимо найти сумму его цифр. Если\nпередается не число, вывести 'Это не число!'\n\"\"\"\n\n# Если some_number == 45\n# то 9\n\n# Если some_number == 425\n# то 11\n\n# Если some_number == 1231421\n# то 14\n\n# Если some_number == {'key' : 'value'}\n# то 'Это не число!'\n\n\n\n# Решение № 1 ----------------------------------------------------------\nnum = input(\"1) Введите число: \")\n\nif num.isnumeric():\n print('1) Cумма чисел: ', sum(map(int, num)))\nelse:\n print(\"1) Это не число!\")\nprint('\\n')\n\n\n\n# Решение № 2 ----------------------------------------------------------\n\nsome_number = 197\n\ndef sum_of_digits(number: int):\n if str(number).isnumeric():\n print(type(number))\n digit_list = map(int, str(number))\n print('2) Cумма чисел: ', sum(digit_list))\n else:\n print(\"2) Это не число!\")\n \nsum_of_digits(some_number)\nprint('\\n')\n\n\n\n# Решение № 3 ----------------------------------------------------------\n\nsome_num = 197\n\ndef sum_of_digits(number: int):\n if str(number).isnumeric():\n print('3) Cумма чисел: ', sum([int(i) for i in str(number)]))\n else:\n print(\"3) Это не число!\")\nsum_of_digits(some_num)\nprint('\\n')\n\n\n\n# Решение № 4 ----------------------------------------------------------\n\nsome_number = input(\"4) Введите число: \")\n\ndef sum_of_numbers(number: str):\n if number.isnumeric():\n len_of_numbers = len(number)\n result = 0\n for n in range(len_of_numbers):\n result += int(number[n])\n print('4) Сумма чисел =', result)\n else:\n print(\"4) Это не число!\")\n\n\nsum_of_numbers(some_number)\n"
},
{
"alpha_fraction": 0.36975857615470886,
"alphanum_fraction": 0.4459974467754364,
"avg_line_length": 11.709677696228027,
"blob_id": "b938f64e676c235fc4f6706a725f60a72ef4a279",
"content_id": "879773ff33c7d57a0ed2a000882d8eef34e66b41",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 813,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 62,
"path": "/task-pro-1.py",
"repo_name": "AlexTail/python-tasks",
"src_encoding": "UTF-8",
"text": "\"\"\"\n\nРекурсия: Сумма Чисел, Факториал, Фибоначчи\n\nRecursion Functions\n\n1) privet n times\n2) Sum 1 + 2 + 3 + 4 + 5 = 15\n3) Factorial 5! = 1 * 2 * 3 * 4 * 5 = 120\n4) Fibonacci 0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55\n\n\"\"\"\n\n\n\ndef privet(x):\n \"\"\" 1) \"\"\"\n if x == 0:\n return\n else:\n print('Hello World', x)\n privet(x-1)\n\nprivet(10)\n\n\n\ndef sum(x):\n \"\"\" 2) \"\"\"\n if x == 0:\n return 0\n elif x == 1:\n return 1\n else:\n return x + sum(x-1)\n\nz = sum(5)\nprint(z)\n\n\n\ndef factorial(x):\n \"\"\" 3) \"\"\"\n if x == 0:\n return 1\n else:\n return x * factorial(x-1)\n\nprint(factorial(5))\n\n\n\ndef fi(x):\n \"\"\" 4) \"\"\"\n if x == 0:\n return 0\n elif x == 1:\n return 1\n else:\n return fi(x-1) + fi(x-2)\n\nprint(fi(7))"
},
{
"alpha_fraction": 0.5079726576805115,
"alphanum_fraction": 0.5649202466011047,
"avg_line_length": 19.952381134033203,
"blob_id": "f22aad3f92a44fac8fb3ad10f235911dd7083a79",
"content_id": "f00b5942c752eede3fef6c047c52ffbefa58f0b9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 439,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 21,
"path": "/task-pro-3.py",
"repo_name": "AlexTail/python-tasks",
"src_encoding": "UTF-8",
"text": "\"\"\"\n\nRu: Сортировка Пузырьком \n\nEng: Bubble sort\n\n\"\"\"\n\noldlist = [10, 5, 92, 85, 24, 23, 8, 203, 14, 67]\n\ndef bubble_sort(mylist):\n last_item = len(mylist) - 1\n for z in range(0, last_item):\n for x in range(0, last_item-z):\n if mylist[x] > mylist[x+1]:\n mylist[x], mylist[x+1] = mylist[x+1], mylist[x]\n return mylist\n\nprint('Oldlist =', oldlist)\nnewlist = bubble_sort(oldlist).copy()\nprint('Newlist =', newlist)"
},
{
"alpha_fraction": 0.465568870306015,
"alphanum_fraction": 0.5359281301498413,
"avg_line_length": 20.45161247253418,
"blob_id": "62d198692a8f48d12a0359c0955584cfdbef38c4",
"content_id": "6a683c1c2a266559d0098bcb74c1c7e92ae1a386",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 809,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 31,
"path": "/task-3.py",
"repo_name": "AlexTail/python-tasks",
"src_encoding": "UTF-8",
"text": "\"\"\"\nДано целое число some_number (от 1 до 1000), которое вводится с клавиатуры. Вернуть\nнаибольшее число, содержащее ровно some_number цифр.\n\"\"\"\n\n\n# if some_number == 3\n# return 999\n\n# if some_number == 5\n# return 99999\n\n# if some_number == 1\n# return 9\n\n# if some_number == 12\n# return 999999999999\n\n\n# Решение №1 -----------------------------------------------------------\n\nprint(int(\"9\"*int(input('1) Введите целое число до 1000: '))))\n\n\n\n# Решение №2 -----------------------------------------------------------\n\ndef max_number(some_number: int) -> int:\n return 10 ** some_number - 1\n\nprint(max_number(int(input('2) Введите целое число до 1000: '))))\n\n\n\n"
},
{
"alpha_fraction": 0.5621734857559204,
"alphanum_fraction": 0.57523512840271,
"avg_line_length": 28.015151977539062,
"blob_id": "2f1c2688bbfcbab926d0253ef7420d4392947118",
"content_id": "f0734d87201a971ed0cc3aeee430464ec4f2e6e9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1957,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 66,
"path": "/task-pro-5.py",
"repo_name": "AlexTail/python-tasks",
"src_encoding": "UTF-8",
"text": "\"\"\"\n\nRu: скрипт удаления старых файлов и пустых директорий\n\nEng: script to delete old files and empty directories\n\n\"\"\"\n\nimport os\nimport time\n\nDAYS = 5\nFOLDERS = [\"C:\\/testDelete\\/12\"]\n\nprint(FOLDERS)\n\nTOTAL_DELETED_SIZE = 0\nTOTAL_DELETED_FILE = 0\nTOTAL_DELETED_DIRS = 0\n\nnowTime = time.time() \noldTime = nowTime - 60*60*24*DAYS \n\ndef delete_old_files(folder):\n \"\"\"Delete files older than X DAYS\"\"\"\n global TOTAL_DELETED_FILE\n global TOTAL_DELETED_SIZE\n for path, dirs, files in os.walk(folder): \n for file in files: \n fileName = os.path.join(path, file)\n fileTime = os.path.getmtime(fileName)\n if fileTime < oldTime:\n sizeFile = os.path.getsize(fileName)\n TOTAL_DELETED_SIZE += sizeFile \n TOTAL_DELETED_FILE += 1 \n print(\"Deleting file: \" + str(fileName))\n os.remove(fileName) \n\ndef delete_empty_dir(folder):\n global TOTAL_DELETED_DIRS\n empty_folders_in_this_run = 0\n for path, dirs, files in os.walk(folder):\n if (not dirs) and (not files):\n TOTAL_DELETED_DIRS += 1\n empty_folders_in_this_run += 1\n print(\"Deleting empty dir: \" + str(path))\n os.rmdir(path)\n if empty_folders_in_this_run > 0:\n delete_empty_dir(folder)\n\n\nstarttime = time.asctime() \n\nfor folder in FOLDERS:\n delete_old_files(folder) \n delete_empty_dir(folder) \n\nfinishtime = time.asctime()\n\nprint(\"--- --- --- --- --- --- --- --- --- --- ---\")\nprint(\"Start time: \" + str(starttime))\nprint(\"Total deleted size: \" + str(int(TOTAL_DELETED_SIZE/1024/1024)) + \"Mb\")\nprint(\"Total deleted files: \" + str(TOTAL_DELETED_FILE))\nprint(\"Total deleted empty folders: \" + str(TOTAL_DELETED_DIRS))\nprint(\"Finish time: \" + str(finishtime))\nprint(\"--- --- --- --- --- --- --- --- --- --- ---\")"
},
{
"alpha_fraction": 0.5245746970176697,
"alphanum_fraction": 0.5463138222694397,
"avg_line_length": 23.627906799316406,
"blob_id": "b9020a99dff31d0aa3c2719283a12de6209e7491",
"content_id": "f83cd2324ed3e8c239ea46aa0d606e95eb640a9f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1080,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 43,
"path": "/task-pro-4.py",
"repo_name": "AlexTail/python-tasks",
"src_encoding": "UTF-8",
"text": "\"\"\"\n\nRu: Скрипт очистки лог-файлов\n\nEng: Script cleaning log-files\n\n\"\"\"\n\n#!/bin/pithon3\n\n\n\nimport shutil\nimport os\nimport sys\n\n# 4.py mylog.txt 10 5\n\nif(len(sys.argv) < 4):\n print(\"Missing arguments! Usage is script.py 10 5\")\n exit(1) \n\nfile_name = sys.argv[1]\nlimitsize = int(sys.argv[2])\nlogsnumber = int(sys.argv[3])\n\nif(os.path.isfile(file_name) == True):\n logfile_size = os.stat(file_name).st_size\n logfile_size = logfile_size / 1024 \n\n if(logfile_size >= limitsize):\n if(logsnumber > 0):\n for currentFileNum in range(logsnumber, 1, -1):\n src = file_name + \"_\" + str(currentFileNum-1)\n dst = file_name + \"_\" + str(currentFileNum)\n if(os.path.isfile(src) == True):\n shutil.copyfile(src, dst)\n print(\"Copied: \" + src + \" to \" + dst)\n\n shutil.copyfile(file_name, file_name + \"_1\")\n print(\"Copied: \" + file_name + \" to \" + file_name + \"_1\")\n myfile = open(file_name, 'w') \n myfile.close()"
},
{
"alpha_fraction": 0.6612716913223267,
"alphanum_fraction": 0.6890173554420471,
"avg_line_length": 25.090909957885742,
"blob_id": "912892c85700397ca3cd2d6693405ea04d46159e",
"content_id": "e7bd11655ad308be5be02b3aa444d40a6e45c0bf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1056,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 33,
"path": "/task-1.py",
"repo_name": "AlexTail/python-tasks",
"src_encoding": "UTF-8",
"text": "\"\"\"\nДан массив целых чисел. Написать функцию, которая определяет, содержит\nли массив какие-либо дубликаты. Возвращает True если содержит дубликаты,\nи возвращает False, если не содержит. В массиве могут быть данные False или True.\n\"\"\"\n\n# Если\n# contains_duplicates = [1, 2, 3, 1]\n# то\n# contains_duplicates(example_list) = True\n\n# Если\n# contains_duplicates = [1, 2, 3, 1, False, False]\n# то\n# contains_duplicates(example_list) = True\n\n# Если\n# contains_duplicates = [1, 23, 213, 125152, True]\n# то\n# contains_duplicates(example_list) = False\n\n# Если\n# contains_duplicates = []\n# то\n# contains_duplicates(example_list) = False\n\ndef contains_duplicates(example_list: list) -> bool:\n id_list = [id(i) for i in example_list]\n return len(set(id_list)) != len(example_list)\n\nexample_list = [1, 2, 3, 1]\n\nprint(contains_duplicates(example_list)) \n\n"
}
] | 9 |
minh-le1994/MinhLe
|
https://github.com/minh-le1994/MinhLe
|
8390a85c014421922cc0374475ce72a634e269a8
|
b9392b1cffe41486a9f296d83f11123c5bded8cb
|
6675cc79f2bad3cf8d4bbb4517c4660a14dcd425
|
refs/heads/master
| 2022-12-11T23:10:14.188875 | 2020-07-12T16:00:43 | 2020-07-12T16:00:43 | 250,595,997 | 0 | 0 | null | 2020-03-27T17:11:56 | 2020-07-12T16:00:56 | 2022-09-23T22:39:29 |
HTML
|
[
{
"alpha_fraction": 0.7461072206497192,
"alphanum_fraction": 0.763076901435852,
"avg_line_length": 46.043861389160156,
"blob_id": "88990f3936f6cf35c98b23ce44806776834fa951",
"content_id": "f1e76dc393db2a5bba74c3efccdf1ab72e060e72",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "RMarkdown",
"length_bytes": 10725,
"license_type": "no_license",
"max_line_length": 519,
"num_lines": 228,
"path": "/University/Introduction Data Engineering/Abgabe/Allgemeine_Aufgaben_Gruppe_25.Rmd",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "---\ntitle: \"General Tasks\"\noutput: html_document\n---\nTo solve the general tasks following packages were needed:\n```{r, eval = FALSE}\n#In case you don't have the following packages installed, run the follwoing code:\ninstall.packages(\"tidyverse\")\ninstall.packages(\"knitr\")\n\n```\n\n```{r, message = FALSE, warning = FALSE}\nlibrary(tidyverse)\nlibrary(knitr)\n```\n# Task 1\nFirst, we have to load the two required datasets. Before we can merge the two tables, we check if the columns that carry the same title also contain the same data. After we have done that, we can join the tables.\nOur goal is to calculate the delay in delivery (\"Lieferverzug\"). In order to do that we have to subtract the production date from the goods-in date. Once we have this information, we can identify the distribution, calculate the maximum, minimum and average delay and plot the distribution.\n\nBefore answering this task we first need to create the dataset for the logistics delay: \"Logistikverzug\". The dataset was created with the following code:\n\n```{r, warning = FALSE}\n#Load the documents\nkomponent <- read.csv(\"Data/Logistikverzug/komponente_k7.csv\", header=TRUE, sep=\";\", stringsAsFactors = FALSE)\nlogistik <- read.csv(\"Data/Logistikverzug/Logistikverzug_K7.csv\", header=TRUE, sep=\";\", stringsAsFactors = FALSE)\n\n#Join the data\nlogistik_komponent <- left_join(logistik,komponent,by=c(\"X\",\"IDNummer\",\"Werksnummer\",\"Herstellernummer\"),suffix = c(\".1\", \".2\"))\n\n#Change the format of the Data Wareneingang and Produktionsdatum to a date attribute\nlogistik_komponent$Wareneingang<-as.Date(logistik_komponent$Wareneingang,\"%d.%m.%Y\")\nlogistik_komponent$Produktionsdatum<-as.Date(logistik_komponent$Produktionsdatum,\"%Y-%m-%d\")\n\nlogistikverzug <- mutate(logistik_komponent, Logistikverzug = Wareneingang - Produktionsdatum) %>%\n select(IDNummer, Logistikverzug)\n```\n\nThe dataset used for solving this task can be seen in the following:\n```{r, results = 'asis', echo = FALSE}\n\nkable(logistikverzug[1:6, ], caption = \"Logistic Delay of the Component K7\")\n\n```\n\n\na) **The distribution of the \"Logistics Delay\"**\n\nThe distribution of the logistic delay was tested visually with the help of a Q-Q-Plot. Based on the following histogram we assuming that the logistic delay is a normal distribution.\n\n```{r}\nhist(as.numeric(logistikverzug$Logistikverzug), xlab = \"Delay\", main = \"Histogramm of Logistical Delay\")\n```\n\nBased on the assumption we tested if the delay is a normal distribution. To do that we plotted a Q-Q-Plot to decide if our assumption is right. The Q-Q Plot compares the data of the logistical delay with the theoretical quantiles. If the empiric and the theoretical quantiles of the respective distribution are close to each other the empiric data is distributed like the assumed distribution. Close to each other means here that the data is close to the diagonal you can see in the plot below.\n\n```{r}\nx <- scale(logistikverzug$Logistikverzug)\nqqnorm(x)\nqqline(x)\n```\n\nIn this case you can see that the data is quite closed to the diagonal which shows the theoretical quantiles. **The delay is a normal distribution**.\n\n\nb) **Minimum and Maximum of the difference between goods outgoing and income**\n\nHere we will identify the mininum and maximum value of the logistic delay.\n\n```{r}\n#Identify the Minumum and Maximum Delay of the goods\nmin(logistikverzug$Logistikverzug)\nmax(logistikverzug$Logistikverzug)\n\n```\n\nThe Maximum time difference between in- and outgoing goods is **13 days**.\nThe Minimum time difference between in- and outgoing goods is **2 days**.\n\n\nc) **The mean of the \"Logistics Delay\"**\n\nHere we will identfy the mean of the logistic delay.\n\n```{r}\nmean(logistikverzug$Logistikverzug)\n```\n\nThe average logistical delay of the component K7 is **5.080437 days**. \n\n\nd) **Plot of the \"Logistics Delay\"**\n\nIn the following you can find the distribution of the logistic delay as histogram.\n\n```{r}\nhist(as.numeric(logistikverzug$Logistikverzug), xlab = \"Delay\", main = \"Histogramm of Logistical Delay\")\n```\n\n# Task 2\nThe concept is called a relational database system.\n\nThe import of small tables is relatively easy with the standard procedures of R. The import of large tables typically results in very long import duration and a high working memory consumption though. In accordance with the relational database system is it, therefore, advisable to only import data segments that are necessary for the requested analysis.\n\n# Task 3\nWe are looking for the numbers of cars which are registered in Dortmund and have the K7 component build into the car. For that, we first import the data sets which we need to extract this information.\n\nImport the relevant datasets:\n\n```{r, results = 'hide', message = FALSE, warning = FALSE}\n#Import the relation tables to connect them with the cars\nrelation_oem1_11 <- read_csv2(\"Data/Fahrzeug/Bestandteile_Fahrzeuge_OEM1_Typ11.csv\")\nrelation_oem1_12 <- read_csv2(\"Data/Fahrzeug/Bestandteile_Fahrzeuge_OEM1_Typ12.csv\")\nrelation_oem2_21 <- read_csv2(\"Data/Fahrzeug/Bestandteile_Fahrzeuge_OEM2_Typ21.csv\")\nrelation_oem2_22 <- read_csv2(\"Data/Fahrzeug/Bestandteile_Fahrzeuge_OEM2_Typ22.csv\")\n\nk7 <- as_tibble(read.table(\"Data/Komponente/Komponente_K7.txt\", sep = \"\\t\", stringsAsFactors = FALSE))\n\nzulassung <- read_csv2(\"Data/Zulassungen/Zulassungen_alle_Fahrzeuge.csv\")\n\n```\n\nThe data sets will be reduced to the relevant information to join them afterward. We need in all tables just the ID numbers of the component or the car. An exception is the dataset containing the registrations. In this dataset, we will also keep the location of the registration to solve this task. Because we just need this kind of data and the columns are fine in their structure, a full clean up of the datasets will not be made.\n\n```{r, results = 'hide'}\n\nrelation_oem1_11_clean <- select(relation_oem1_11, ID_Karosserie, ID_Fahrzeug)\nrelation_oem1_12_clean <- select(relation_oem1_12, ID_Karosserie, ID_Fahrzeug)\nrelation_oem2_21_clean <- select(relation_oem2_21, ID_Karosserie, ID_Fahrzeug)\nrelation_oem2_22_clean <- select(relation_oem2_22, ID_Karosserie, ID_Fahrzeug)\n\nk7_clean <- select(k7, ID_Karosserie)\n\n#change the name to make it uniform with the relation datasets for joining later\nzulassung_clean <- select(zulassung, IDNummer, Gemeinden)\ncolnames(zulassung_clean)[1] <- \"ID_Fahrzeug\"\n\n```\n\nAfter that the data was joined together so that we can identify the relevant data\n\n```{r, results = 'hide', message = FALSE}\n\nrelation_total <- rbind(relation_oem1_11_clean, relation_oem1_12_clean, relation_oem2_21_clean, relation_oem2_22_clean)\nfinal_data <- left_join(k7_clean, relation_total, by = \"ID_Karosserie\") %>%\n left_join(zulassung_clean, by = \"ID_Fahrzeug\")\n\n```\n\nThe next step is to identify all the cars registered in Dortmund. For that, we filtered just for the cars which are registered in Dortmund. Afterward, we summarised the data was summarised to show the number of cars registered in Dortmund with the K7 component.\n\n```{r}\ncars_k7_dortmund <- filter(final_data, Gemeinden == \"DORTMUND\") %>%\n summarise(Amount_Cars_in_Dortmund = n())\ncars_k7_dortmund\n\n```\n\nThe amount of cars with the componente K7 registered in Dortmund is **69**.\n\n# Task 4\n\nIn this task, we should find out the data type of the attributes of the table \"Zulassung_aller_Fahrzeuge\". First of all the table will be imported. The attributes describe the columns of a dataset. Depending on how you import the data the datatype of the attributes can be different. We focus here on the import with the `readr` package. The dataset was first imported with the `read_csv2` as it has as separator a \",\".\n\n```{r, message = FALSE, warning = FALSE}\n\nzulassung <- read_csv2(\"Data/Zulassungen/Zulassungen_alle_Fahrzeuge.csv\")\n```\n\nTo identify the data type of the attributes the fuction `class` was used.\n```{r}\n#check the type of the Data in every column\n\n#x is an \"numeric\"\nclass(zulassung$X1)\n\n#IDNummer, Gemeinden are \"character\"\nclass(zulassung$IDNummer)\nclass(zulassung$Gemeinden)\n\n#Zulassung is a \"date - format\"\nclass(zulassung$Zulassung)\n\n```\n\nWith the results of the code before the data type of the attributes are the following:\n\n- X1: `numeric`\n- IDNummer: `character`\n- Gemeinden: `character`\n- Zulassung: `Date`\n\n# Task 5\nThe backup on a server is effectively a prerequisite for an effective publication of the material. Using a server database has several advantages:\n\na) Availability and access\nFor the results to be comprehensible to others, they must have access to the underlying data at will. To ensure this, a personal computer is completely unsuitable. A server, on the other hand, is usually always available.\n\nb) Amount of data\nthe data on which the analysis is based is very extensive and require a lot of time and bandwidth to read and process alone. if the data is read-only as required on the server, a lot of time and computing capacity is saved.\n\n(c) security\nif the data is stored on a server, access to it can generally be better controlled. In addition, the hardware of the servers also allows redundant storage, which offers higher security against technical failures.\n\nIf the dataset would be saved on the own local computer, the application cannot be run from any other computer. The application could not access the dataset and the application would not work. \n\n# Task 6\nFor the task, we need to import some datasets. Based on the ID \"K1BE2-104-1041-32050\" we already know that the car needs to be part of the OEM 2 because they are the producer having the engine K1BE2 build into their car. Based on this information we will import the two related tables for the components and the OEM 2. The relation tables are enough because they already include the information about the ID of the car. To find out where the car is registered we need to also import the data regarding the registration.\n\n```{r, results = 'hide', warning= FALSE, message= FALSE}\nrelation_oem2_21 <- read_csv2(\"Data/Fahrzeug/Bestandteile_Fahrzeuge_OEM2_Typ21.csv\")\nrelation_oem2_22 <- read_csv2(\"Data/Fahrzeug/Bestandteile_Fahrzeuge_OEM2_Typ22.csv\")\nzulassung <- read_csv2(\"Data/Zulassungen/Zulassungen_alle_Fahrzeuge.csv\")\n```\n\n```{r, results = 'hide'}\nrelation_oem2 <- rbind(relation_oem2_21, relation_oem2_22)\nresult <- filter(relation_oem2, ID_Motor == \"K1BE2-104-1041-32050\")\nresult$ID_Fahrzeug\n```\n\nThe engine is related to the car with the **ID = 21-2-21-51526**.\nUnder the assumption that this variable corresponds to \"IDNummer\", we filter the table \"zulassung\" to find the corresponding registration location. The result of this filtering brings us to the location the car is registered in. The car is registered in **Leipzig**.\n\n```{r}\nlocation <- filter(zulassung, IDNummer == \"21-2-21-51526\")[[3]]\nlocation\n```"
},
{
"alpha_fraction": 0.5558856129646301,
"alphanum_fraction": 0.5577422976493835,
"avg_line_length": 21.450000762939453,
"blob_id": "23786560bed6e974da5a4ca9c20f730a7d0b314e",
"content_id": "f5b59b9f6518af6c92c309afa45e3ed78d50eca3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 2694,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 120,
"path": "/University/DQE/modules/predefined/module_data_name_text_input.R",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "module_data_name_text_input_ui <- function(\n id\n) {\n ns <- shiny::NS(id)\n \n tagList(\n shiny::uiOutput(\n outputId = ns(\"text_input\")\n ),\n shiny::uiOutput(\n outputId = ns(\"name_error\")\n )\n )\n}\n\nmodule_data_name_text_input <- function(\n input, output, session, .values, .label = NULL, .value = \"\", \n .allow_reset = TRUE, .reset = NULL, .update_value = NULL\n) {\n \n ns <- session$ns\n \n valid_characters <- c(letters, LETTERS, 0:9, \"\", \"-\")\n \n rvs <- shiny::reactiveValues(\n name_error = FALSE,\n # The value is retrieved from the ui the first time the input renders\n value = NULL\n )\n \n .group_name_error <- list(\n is_null = \"Name muss mindestens aus einem Zeichen bestehen\",\n invalid_character = \"Name darf nur A-Z, a-z, 0-9 und '-' enthalten.\"\n )\n \n output$text_input <- shiny::renderUI({\n label <- handle_fun(.label)\n \n if (.allow_reset) {\n label <- div(\n label,\n actionButton(\n inputId = ns(\"reset\"),\n label = \"Zurücksetzen\"\n )\n )\n }\n \n shiny::textInput(\n inputId = ns(\"name_text\"),\n label = label,\n value = handle_fun(.value)\n )\n })\n \n output$name_error <- shiny::renderUI({\n error <- FALSE\n \n if (purrr::is_null(input$name_text) || \n input$name_text == \"\") {\n error <- TRUE\n error_type <- \"is_null\"\n } else if (!all(stringr::str_split(input$name_text, \"\")[[1]] %in% \n valid_characters)) {\n error <- TRUE\n error_type <- \"invalid_character\"\n }\n \n if (error) {\n rvs$name_error <- TRUE\n # Specific error message dependent on error type\n return(.group_name_error[[error_type]])\n } else {\n rvs$name_error <- FALSE\n return(NULL)\n }\n })\n \n shiny::observeEvent(input$reset, {\n updateTextInput(\n session = session,\n inputId = \"name_text\",\n value = handle_fun(.value)\n )\n })\n \n if (!purrr::is_null(.reset)) {\n shiny::observeEvent(.reset(), {\n shiny::updateTextInput(\n session = session,\n inputId = \"name_text\",\n value = rvs$value\n )\n })\n }\n \n # name returns the name only if it is allowed otherwise it stops with req,\n # whereas null_name returns NULL if an error occured, so that it is useable\n # in for example the fallback function\n name <- shiny::reactive({\n req(!rvs$name_error)\n input$name_text\n })\n \n null_name <- shiny::reactive({\n if (rvs$name_error) {\n return(NULL)\n } else {\n return(input$name_text)\n }\n })\n \n return_list <- list(\n name = name,\n null_name = null_name,\n error = shiny::reactive({rvs$name_error})\n )\n \n return(return_list)\n}"
},
{
"alpha_fraction": 0.4920486509799957,
"alphanum_fraction": 0.4939195513725281,
"avg_line_length": 17.771930694580078,
"blob_id": "cb26abfe110b3c877f502dcb7208a3ef859de7f5",
"content_id": "dfaa5d8214326d41acd95a50a9b2043b3bae0025",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 1069,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 57,
"path": "/University/DQE/modules/predefined/utils.R",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "`%_%` <- function(x, y) {\n paste(x, y, sep = \"_\")\n}\n\nhandle_fun <- function(x) {\n if (is.function(x)) {\n return(x())\n } else {\n return(x)\n }\n}\n\nactionButtonQW <- function(\n inputId, label, icon = NULL, style = \"material-flat\", color = \"default\",\n size = \"xs\", block = FALSE, no_outline = TRUE, tooltip = NULL,\n dropdown = FALSE\n) {\n if (dropdown) {\n ui <- div(\n style = \"margin: 0px 2px\",\n shinyWidgets::actionBttn(\n inputId = inputId,\n label = label,\n icon = icon,\n style = style,\n color = color,\n size = size,\n block = block,\n no_outline = no_outline\n )\n )\n } else {\n ui <- shinyWidgets::actionBttn(\n inputId = inputId,\n label = label,\n icon = icon,\n style = style,\n color = color,\n size = size,\n block = block,\n no_outline = no_outline\n )\n }\n \n if (!is.null(tooltip)) {\n ui <- tagList(\n ui,\n shinyBS::bsTooltip(\n id = inputId,\n title = tooltip,\n placement = \"top\"\n )\n )\n }\n \n ui\n}"
},
{
"alpha_fraction": 0.6441631317138672,
"alphanum_fraction": 0.6765119433403015,
"avg_line_length": 20.57575798034668,
"blob_id": "3e662929860180ea01b274c35c8c55c011b8d354",
"content_id": "068fd78446fee0e3108dc1820bab1ad03b942093",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 711,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 33,
"path": "/Spotify/SQL/CreateTables.sql",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "CREATE DATABASE spotify;\n\nCREATE TABLE Spotify_History(\n\tID int NOT NULL auto_increment,\n\tTime datetime UNIQUE,\n Song_Name varchar(100),\n Spotify_ID varchar(100),\n Spotify_URI varchar(100),\n Popularity int,\n Object_Type varchar(50),\n primary key(ID)\n);\n\nCREATE TABLE Song_Data(\n\tSpotify_ID varchar(100) UNIQUE,\n Spotify_URI varchar(100),\n Artist varchar(100),\n Album varchar (100),\n Duration int,\n Acousticness float,\n Danceability float,\n Energy float,\n Instrumentalness float,\n key_spotify int, \n Liveness float,\n Loudness float,\n Mode int,\n Speechiness float,\n Tempo float,\n Time_Signature int,\n Valence float,\n primary key(Spotify_ID)\n);"
},
{
"alpha_fraction": 0.539429247379303,
"alphanum_fraction": 0.5424976944923401,
"avg_line_length": 39.197532653808594,
"blob_id": "e027550eb52f6081caf748a2685cb128bb560bb6",
"content_id": "5dad493d3968494c1f544392291e5fcfceb00bc0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3259,
"license_type": "no_license",
"max_line_length": 132,
"num_lines": 81,
"path": "/Spotify/.ipynb_checkpoints/extractdata-checkpoint.py",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "import requests\nimport pandas as pd\nimport json\nfrom authorisation import SpotifyAuthorisation\n\nclass DataExtracter():\n \n \"\"\"\n Class to extract the data from the Spotify Web Api regarding the last 50 songs played and the audio features of this songs. The \n class is initiated with the token needed to access the spotify web api\n \"\"\"\n \n def __init__(self, token): \n self.base_query = \"https://api.spotify.com/v1\"\n self.token = token\n\n #Read last fifty songs\n def get_recent_songs(self):\n \"\"\"\n Sends a request to the spotify web api and returns the last 50 played songs from the respective user.\n \"\"\"\n endpoint = \"/me/player/recently-played\"\n params = {\"limit\": 50}\n header = {\"Authorization\": \"Bearer {}\".format(self.token)}\n\n response = requests.get(\"{}{}\".format(self.base_query, endpoint),\n params = params,\n headers = header\n )\n print(\"Song History Request Status: {}\".format(response.status_code))\n return response\n \n def get_song_properties(self, spotify_ids:list):\n \"\"\"\n Returns the song audio features given an list of spotify ids\n \"\"\"\n endpoint = \"audio-features\"\n response = requests.get(\"{}/{}\".format(self.base_query, endpoint), \n params = {\"ids\": \",\".join(spotify_ids)}, \n headers = {\"Authorization\": \"Bearer {}\".format(self.token)})\n \n print(\"Song Properties Request Status: {}\".format(response.status_code))\n return response\n\n def extract_data(self):\n \"\"\"\n Extract the recently last 50 songs and the audio features to return it as a pandas DataFrame\n \"\"\"\n response = self.get_recent_songs()\n dic = {\"timestamp\": [], \"name\": [], \"id\": [], \"uri\": [], \"popularity\": [], \"object_type\": [], \"artist\": [], \"album\": []}\n\n for element in response.json()[\"items\"]:\n dic[\"timestamp\"].append(element[\"played_at\"])\n dic[\"name\"].append(element[\"track\"][\"name\"])\n dic[\"id\"].append(element[\"track\"][\"id\"])\n dic[\"uri\"].append(element[\"track\"][\"uri\"])\n dic[\"object_type\"].append(element[\"context\"][\"type\"])\n dic[\"popularity\"].append(element[\"track\"][\"popularity\"])\n dic[\"album\"].append(\",\".join([artist[\"name\"] for artist in element[\"track\"][\"artists\"]]))\n dic[\"artist\"].append(element[\"track\"][\"album\"][\"name\"])\n \n \n keys = [\"danceability\", \"energy\", \"key\", \"loudness\", \"mode\", \"speechiness\", \"acousticness\", \"instrumentalness\", \"liveness\", \n \"valence\", \"tempo\", \"duration_ms\", \"time_signature\", \"id\", \"uri\"]\n \n response = self.get_song_properties(dic[\"id\"])\n \n for key in keys:\n dic[key] = []\n \n for element in response.json()[\"audio_features\"]:\n print(element)\n for key in keys:\n try:\n dic[key].append(element[key])\n except: \n dic[key].append(0)\n \n self.song_data = pd.DataFrame(dic)\n \n return self.song_data "
},
{
"alpha_fraction": 0.8161764740943909,
"alphanum_fraction": 0.8161764740943909,
"avg_line_length": 44.33333206176758,
"blob_id": "d4f6ef5947d3f8c750c70bcc81614780c76c6d71",
"content_id": "fc3397527320ea8c47e0b94ba423b415e46d2f0d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 136,
"license_type": "no_license",
"max_line_length": 124,
"num_lines": 3,
"path": "/Kaggle/README.md",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "## Kaggle\n\nThis is a repository of personal Kaggle Notebooks from competition or related to specific datasets datasets on the platform.\n"
},
{
"alpha_fraction": 0.8285714387893677,
"alphanum_fraction": 0.8285714387893677,
"avg_line_length": 104,
"blob_id": "0a6b376449fe332655c21c5b1ca2e0e993a329b9",
"content_id": "bb91f07037ecf8d5582406847143a93df05f9e8f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 105,
"license_type": "no_license",
"max_line_length": 104,
"num_lines": 1,
"path": "/University/README.md",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "This repository consists of code in the programming language R, that are related to University projects.\n"
},
{
"alpha_fraction": 0.5739333629608154,
"alphanum_fraction": 0.5765634179115295,
"avg_line_length": 26.158729553222656,
"blob_id": "e4a4bba09c18690d4e4ca53232a64c187460f045",
"content_id": "39a95367e9bf8492bccbb8e848a6acde89e586c7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 3422,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 126,
"path": "/University/DQE/modules/predefined/ObjectStorage.R",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "#' ObjectStorage\n#'\n#' R6Class for storing similar R6 objects.\n#'\n#' @section Usage:\n#' \\preformatted{storage = ObjectStorage$new(allowed_classes = NULL)\n#'\n#' storage$add_object(object)\n#'\n#' storage$get_object(name)\n#' }\n#'\n#' @section Methods:\n#' \\describe{\n#' \\item{\\code{new(allowed_classes = NULL)}}{Initialize the storage.\n#' \\describe{\n#' \\item{\\code{allowed_classes}}{Character vector. \\code{class(object)} has to\n#' return at least one of these classes for being added to the storage.}\n#' }\n#' }\n#' \\item{\\code{add_object(object)}}{Add an object to the storage.\n#' \\describe{\n#' \\item{\\code{object}}{R6 object with public method \\code{get_name()}.}\n#' }\n#' }\n#' \\item{\\code{get_names()}}{Get the names of the stored objects as a\n#' character vector.\n#' }\n#' \\item{\\code{get_object(name, lazy = FALSE)}}{Get an object from the storage\n#' with \\code{object$get_name() == name}.\n#' \\describe{\n#' \\item{\\code{name}}{Name of an R6 object.}\n#' \\item{\\code{lazy}}{If \\code{\\link[base:logical]{TRUE}}, allow a name\n#' which is not present in the names of the storage for a short time.\n#' }\n#' }\n#' }\n#' \\item{\\code{get_objects(names)}}{Get a list of objects from the storage\n#' with \\code{object$get_name() \\%in\\% names}.\n#' \\describe{\n#' \\item{\\code{names}}{Character vector. Each element has to be a name of\n#' an object in the storage.\n#' }\n#' }\n#' }\n#' }\n#'\n#' @name ObjectStorage\nNULL\n\n#' @export\nObjectStorage <- R6::R6Class(\n classname = \"ObjectStorage\",\n public = list(\n initialize = function(allowed_classes = NULL) {\n private$storage <- shiny::reactiveVal(list())\n\n private$length <- shiny::reactive({\n length(private$storage())\n })\n\n private$storage_names <- shiny::reactive({\n map_chr(private$storage(), function(object) {\n object$get_name()\n })\n })\n\n private$allowed_classes <- allowed_classes\n\n invisible(self)\n },\n\n add_object = function(object) {\n if (!exists(\n x = \"get_name\",\n where = object\n )) {\n stop(\n \"ObjectStorage: object has to have a method with name \\\"get_name\\\"\"\n )\n }\n if (!is.null(private$allowed_classes)) {\n stopifnot(any(private$allowed_classes %in% class(object)))\n }\n storage <- private$storage()\n storage[[private$length() + 1]] <- object\n private$storage(storage)\n invisible(self)\n },\n\n get_length = function() {\n length(private$storage())\n },\n\n get_names = function() {\n private$storage_names()\n },\n\n get_object = function(name) {\n index <- which(private$storage_names() == name)\n if (length(index) != 1) {\n stop(paste0(\"There are either no or multiple objects with name \", name))\n }\n private$storage()[[index]]\n },\n\n get_objects = function(names) {\n if (!(all(names %in% self$get_names()))) {\n stop(\"ObjectStorage: Not all names are present in the names of the\n storage object.\")\n }\n positions <- map_dbl(names, function(name) {\n which(self$get_names() == name)\n })\n objects <- private$storage()[positions]\n names(objects) <- names\n objects\n }\n ),\n private = list(\n allowed_classes = NULL,\n length = NULL,\n storage = NULL,\n storage_names = NULL\n )\n)\n"
},
{
"alpha_fraction": 0.5663580298423767,
"alphanum_fraction": 0.5663580298423767,
"avg_line_length": 21.36206817626953,
"blob_id": "ec75fb3e03d15f23e10f447a6f9cc10a47d4abb7",
"content_id": "af08be81f52cdcc5bec32bc4e672ba615114be98",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 1301,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 58,
"path": "/University/DQE/modules/predefined/data_selector.R",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "data_selector_ui <- function(id) {\n ns <- NS(id)\n \n uiOutput(\n outputId = ns(\"module_ui\")\n )\n}\n\ndata_selector <- function(\n input, output, session, .values\n) {\n \n ns <- session$ns\n \n output$module_ui <- renderUI({\n selectInput(\n inputId = ns(\"select_data_name\"),\n label = div(\n \"Wähle Datensatz\",\n actionButtonQW(\n inputId = ns(\"open_data\"),\n label = NULL,\n icon = icon(\"table\"),\n tooltip = \"Datensatz öffnen\"\n )\n ),\n choices = .values$data_storage$get_names()\n )\n })\n \n data <- reactive({\n .values$data_storage$get_object(req(input$select_data_name))$get_value()\n })\n \n observeEvent(input$open_data, {\n .values$viewer$append_tab(\n tab = tabPanel(\n title = paste(\"Datensatz:\", input$select_data_name),\n dataTableOutput(\n outputId = ns(\"data\" %_% input$select_data_name)\n )\n )\n )\n \n output[[\"data\" %_% input$select_data_name]] <- renderDataTable({\n # Keine Abhängigkeit von data(), da sich die Tabelle ansonsten ändert, wenn\n # der Benutzer eine anderen Namen auswählt\n isolate(datatable(data()))\n })\n })\n \n return_list <- list(\n data = data,\n name = reactive(input$select_data_name)\n )\n \n return(return_list)\n}"
},
{
"alpha_fraction": 0.5333333611488342,
"alphanum_fraction": 0.5352380871772766,
"avg_line_length": 17.13793182373047,
"blob_id": "92540875ea013aa335746ed4d1677b9d4f719615",
"content_id": "34714c51526fd6ef44e1528107fbf5be5282f67a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 525,
"license_type": "no_license",
"max_line_length": 41,
"num_lines": 29,
"path": "/University/DQE/modules/predefined/DataObject.R",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "DataObject <- R6Class(\n classname = \"DataObject\",\n public = list(\n initialize = function(name, value) {\n private$name <- reactiveVal(name)\n private$value <- reactiveVal(value)\n },\n \n get_name = function() {\n private$name()\n },\n \n get_value = function() {\n private$value()\n },\n \n set_name = function(name) {\n private$name(name)\n },\n \n set_value = function(value) {\n private$value(value)\n }\n ),\n private = list(\n name = NULL,\n value = NULL\n )\n)"
},
{
"alpha_fraction": 0.6323481798171997,
"alphanum_fraction": 0.6411172747612,
"avg_line_length": 44.29411697387695,
"blob_id": "403457495e7120beb005209527383c09c8dc265f",
"content_id": "c7a5814bad77d15b9362d868f614f4f2c954baec",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3079,
"license_type": "no_license",
"max_line_length": 143,
"num_lines": 68,
"path": "/COVID19/.ipynb_checkpoints/DataProcessing-checkpoint.py",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "import pandas as pd\nimport numpy as np\nimport os\n\nts_confirmed = pd.read_csv(\"JohnHopkinsData/csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_confirmed_global.csv\")\nts_deaths = pd.read_csv(\"JohnHopkinsData/csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_deaths_global.csv\")\nts_recovered = pd.read_csv(\"JohnHopkinsData/csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_recovered_global.csv\")\n\n#Turn the data into long format\nconfirmed = ts_confirmed.melt(id_vars = [\"Province/State\", \"Country/Region\", \"Lat\", \"Long\"], var_name = \"Date\", value_name = \"Confirmed Cases\")\ndeaths = ts_deaths.melt(id_vars = [\"Province/State\", \"Country/Region\", \"Lat\", \"Long\"], var_name = \"Date\", value_name = \"Deaths\")\nrecovered = ts_recovered.melt(id_vars = [\"Province/State\", \"Country/Region\", \"Lat\", \"Long\"], var_name = \"Date\", value_name = \"Recovered Cases\")\n\n#Replace empty values\nconfirmed[\"Confirmed Cases\"] = confirmed[\"Confirmed Cases\"].fillna(0)\ndeaths[\"Deaths\"] = deaths[\"Deaths\"].fillna(0)\nrecovered[\"Recovered Cases\"] = recovered[\"Recovered Cases\"].fillna(0)\n\nconfirmed[\"Province/State\"] = confirmed[\"Province/State\"].fillna(\"\")\ndeaths[\"Province/State\"] = deaths[\"Province/State\"].fillna(\"\")\nrecovered[\"Province/State\"] = recovered[\"Province/State\"].fillna(\"\")\n\nconfirmed[\"Date\"] = pd.to_datetime(confirmed[\"Date\"])\n#Add new cases to the data\ncountries = confirmed[\"Country/Region\"].unique()\n\nprov = []\ncoun = []\ndat = []\nnew = []\n\nfor country in countries:\n df = confirmed[confirmed[\"Country/Region\"] == country]\n df = df.sort_values(by = \"Date\", axis = 0,ignore_index = True)\n provinces = df[\"Province/State\"].unique()\n current_confirmed = 0\n new_cases = 0\n \n if len(provinces) != 0:\n for province in provinces:\n current_confirmed = 0\n df = confirmed[(confirmed[\"Country/Region\"] == country) & (confirmed[\"Province/State\"] == province)]\n df = df.sort_values(by = \"Date\", axis = 0, ignore_index = True)\n \n for index, row in df.iterrows():\n new_cases = row[\"Confirmed Cases\"] - current_confirmed\n prov.append(province)\n coun.append(country)\n dat.append(row[\"Date\"])\n new.append(new_cases)\n current_confirmed = row[\"Confirmed Cases\"]\n else:\n for index, row in df.iterrows():\n new_cases = row[\"Confirmed Cases\"] - current_confirmed\n prov.append(\"\")\n coun.append(country)\n dat.append(row[\"Date\"])\n new.append(new_cases)\n current_confirmed = row[\"Confirmed Cases\"]\n \nnew_case_df = pd.DataFrame({\"Province/State\": prov, \"Country/Region\": coun, \"Date\": dat, \"New Cases\":new})\nconfirmed = confirmed.merge(new_case_df, how = \"left\")\n\n\n#Save the data\nconfirmed.to_csv(\"TableauData/confirmed.csv\", sep = \";\", index = False)\ndeaths.to_csv(\"TableauData/deaths.csv\", sep = \";\", index = False)\nrecovered.to_csv(\"TableauData/recovered.csv\", sep = \";\", index = False)"
},
{
"alpha_fraction": 0.4516940712928772,
"alphanum_fraction": 0.45471319556236267,
"avg_line_length": 21.33333396911621,
"blob_id": "4d5a05981f525c196f0caefa957dbac0befdba36",
"content_id": "1a6398c3da59d366b40e0d1e6a80bf687d8809fb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 5966,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 267,
"path": "/University/DQE/modules/predefined/excel_csv_file_input.R",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "excel_csv_file_input_ui <- function(id) {\n ns <- NS(id)\n \n tagList(\n fileInput(\n inputId = ns(\"file\"),\n label = \"Lade eine Datei hoch:\",\n placeholder = \"Klicke hier.\"\n ),\n uiOutput(\n outputId = ns(\"names_data\")\n ),\n uiOutput(\n outputId = ns(\"add_preview\")\n ),\n uiOutput(\n outputId = ns(\"add_data\")\n )\n )\n}\n\nexcel_csv_file_input <- function(input, output, session, .values) {\n \n ns <- session$ns\n \n output$names_data <- renderUI({\n if (!file_type_not_supported()) {\n l <- list()\n for (i in 1:sheets()) {\n l[[i]] <- fluidRow(\n column(\n width = 6,\n textInput(\n inputId = ns(paste(\"name_data\", i, sep = \"_\")),\n label = paste(\"Name des \", i, \". Tabellenblattes\"),\n value = excel_sheets(file()$datapath)[[i]]\n )\n ),\n column(\n width = 6,\n textOutput(\n outputId = ns(paste(\"warning_name_data\", i, sep = \"_\"))\n )\n )\n )\n }\n ui <- map(seq_len(sheets()), function(i) {\n fluidRow(\n column(\n width = 6,\n textInput(\n inputId = ns(\"name_data\" %_% i),\n label = paste(\"Name des \", i, \". Tabellenblattes\"),\n value = excel_sheets(file()$datapath)[[i]]\n )\n ),\n column(\n width = 6,\n textOutput(\n outputId = ns(paste(\"warning_name_data\", i, sep = \"_\"))\n )\n )\n )\n }) \n } else {\n ui <- NULL\n }\n ui\n })\n \n output$add_preview <- renderUI({\n if (!file_type_not_supported()) {\n actionButtonQW(\n inputId = ns(\"add_preview\"),\n label = \"Vorschau\"\n )\n }\n })\n \n output$add_data <- renderUI({\n if (!error()) {\n ui <- actionButtonQW(\n inputId = ns(\"add_data\"),\n label = \"Füge Datensatz hinzu.\"\n )\n } else {\n ui <- uiOutput(\n outputId = ns(\"ui_error\")\n )\n }\n })\n \n file <- reactive({\n req(input$file)\n })\n \n file_type <- reactive({\n path <- file()$datapath\n split_path <- str_split(path, pattern = \"\\\\.\")\n # Extrahiere Dateiendung\n split_path[[1]][length(split_path[[1]])]\n })\n \n sheets <- reactive({\n if (file_type() == \"xlsx\" || file_type() == \"xls\") {\n len <- length(excel_sheets(file()$datapath))\n } else {\n len <- 1 \n }\n \n len\n })\n \n file_type_not_supported <- reactive({\n !(file_type() %in% c(\"xlsx\", \"xls\", \"csv\"))\n })\n \n data_names <- reactive({\n if (!file_type_not_supported()) {\n map_chr(seq_len(sheets()), function(i) {\n req(input[[\"name_data\" %_% i]])\n })\n } else {\n character()\n }\n })\n \n name_in_use <- reactive({\n any(data_names() %in% .values$data_storage$get_names())\n })\n \n which_names_in_use <- reactive({\n data_names()[which(data_names() %in% .values$data_storage$get_names())]\n })\n \n error <- reactive({\n file_type_not_supported() ||\n name_in_use()\n })\n \n data_preview <- reactive({\n if (!file_type_not_supported()) {\n data <- list()\n type <- file_type()\n \n if (type == \"xlsx\" || type == \"xls\") {\n for (i in 1:sheets()) {\n data[[i]] <- read_excel(\n path = file()$datapath,\n sheet = i,\n col_names = TRUE\n )\n }\n data <- map(seq_len(sheets()), function(sheet) {\n read_excel(\n path = file()$datapath,\n sheet = sheet,\n col_names = TRUE\n )\n })\n } else if (type == \"csv\") {\n data[[input$name_data_1]] <- read_csv2(\n file = file()$datapath\n )\n }\n \n return(data)\n }\n })\n \n data <- reactive({\n if (!error()) {\n data <- list()\n type <- file_type()\n if (type == \"xlsx\" || type == \"xls\") {\n for (i in seq_len(sheets())) {\n name <- input[[\"name_data\" %_% i]]\n if (name == \"\") {\n \n } else {\n data[[name]] <- read_excel(\n path = file()$datapath,\n sheet = i,\n col_names = TRUE\n )\n }\n }\n } else if (type == \"csv\") {\n data[[input$name_data_1]] <- read_csv2(\n file = file()$datapath\n )\n }\n \n return(data)\n }\n })\n\n observeEvent(input$add_preview, {\n .values$viewer$append_tab(\n tab = tabPanel(\n title = \"Vorschau\",\n value = \"preview\",\n uiOutput(\n outputId = ns(\"select_preview_sheet\")\n ),\n dataTableOutput(\n outputId = ns(\"preview_data\")\n )\n )\n )\n })\n \n output$select_preview_sheet <- renderUI({\n if (sheets() > 1) {\n selectInput(\n inputId = ns(\"select_preview_sheet\"),\n label = \"Wähle das anzuzeigende Tabellenblatt aus:\",\n choices = seq_len(sheets())\n )\n }\n })\n \n output$preview_data <- renderDataTable({\n if (sheets() > 1) {\n data_preview()[[as.numeric(req(input$select_preview_sheet))]]\n } else {\n data_preview()[[1]]\n }\n })\n \n output$ui_error <- renderUI({\n if (file_type_not_supported()) {\n not_supported <- paste0(\n \"Dateiendung .\", file_type(), \" wird nicht untersützt. \"\n )\n } else {\n not_supported <- NULL\n }\n \n if (name_in_use()) {\n name_in_use <- paste0(\n \"Es existieren bereits Datensätze mit den Namen \", \n paste(which_names_in_use(), collapse = \", \"), \". \"\n )\n } else {\n name_in_use <- NULL\n }\n \n ui <- tagList(\n not_supported,\n name_in_use\n )\n })\n \n observeEvent(input$add_data, {\n data <- data()\n walk(seq_len(sheets()), function(i) {\n object <- DataObject$new(\n name = input[[\"name_data\" %_% i]],\n value = data[[i]]\n )\n .values$data$add_object(\n object\n )\n })\n })\n}"
},
{
"alpha_fraction": 0.5819176435470581,
"alphanum_fraction": 0.5844775438308716,
"avg_line_length": 32.70588302612305,
"blob_id": "2984fa6ae84bf6c937f29f9772f27c000ffa2553",
"content_id": "b10f3b6cd8c08b574c61da59d6538069fe3e4f70",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 8597,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 255,
"path": "/University/DQE/modules/predefined/TabBox.R",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "#' Create a tabbed box\n#'\n#' Create a \\code{\\link[shinydashboard]{tabBox}} using an R6 object. The object\n#' contains methods for creating the tabBox, inserting and removing \\code{\n#' \\link[shiny]{tabPanel}} elements and the ability to expand the usual tabBox\n#' with an additional action button which closes a tab.\n#'\n#' Instantiate a new tabBox_R6 obejct with \\code{tabBox_R6$new(id,\n#' selected = NULL, title = \"Viewer\", width = 6, height = NULL,\n#' side = c(\"left\", \"right\"))}.\n#'\n#' @usage\n#' NULL\n#'\n#' @format\n#' NULL\n#'\n#' @return\n#' Public methods and fields are accesible using the '$' operator.\n#' \\item{append_tab(tab, select = FALSE, closeable = TRUE)}{Append the\n#' \\code{\\link[shiny]{tabPanel}} \\code{tab} to the tabBox. If \\code{tab} should\n#' be selected upon being inserted use \\code{select = TRUE}. If the \\code{tab}\n#' should not be closeable via an \\code{\\link[shiny]{actionButton}} use\n#' \\code{closeable = FALSE}.}\n#' \\item{get(what)}{Get the value of the private element with name \\code{what}.}\n#' \\item{insert_tab(tab, target, position = c(\"before\", \"after\"),\n#' select = FALSE, closeable = TRUE)}{Insert the \\code{\\link[shiny]{tabPanel}}\n#' \\code{tab} to the tabBox. See \\code{\\link[shiny]{insertTab}} for details\n#' regarding \\code{target} and \\code{position}. If \\code{tab} should be selected\n#' upon being inserted use \\code{select = TRUE}. If the \\code{tab}\n#' should not be closeable via an \\code{\\link[shiny]{actionButton}} use\n#' \\code{closeable = FALSE}.}\n#' \\item{prepend_tab(tab, select = FALSE, closeable = TRUE)}{Prepend the\n#' \\code{\\link[shiny]{tabPanel}} \\code{tab} to the tabBox. If \\code{tab} should\n#' be selected upon being inserted use \\code{select = TRUE}. If the \\code{tab}\n#' should not be closeable via an \\code{\\link[shiny]{actionButton}} use\n#' \\code{closeable = FALSE}.}\n#' \\item{remove_tab(target)}{Remove the tab with value \\code{target}.}\n#' \\item{set_session}{Set the session for the tabBox. This method is separated\n#' from the instantiation because you will usually want to use the tabBox\n#' already in the ui when there is no session present yet. Therefore the session\n#' has to be manually in the server function.}\n#' \\item{tabBox()}{Return the HTML that builds the \\code{\\link[shinydashboard]{\n#' tabBox}}.}\n#' Use \\code{get()} without any arguments to see the names of all private\n#' fields and methods.\n#'\n#' @export\nTabBox <- R6Class(\n \"tabBox_R6\",\n public = list(\n initialize = function(id, title = \"Viewer\",\n width = 6, height = NULL, side = c(\"left\", \"right\")) {\n private$id <- id\n private$title <- title\n private$width <- width\n private$height <- height\n private$side <- match.arg(side)\n },\n \n # Wird nur benötigt, wenn man mehrere Plots, die sich genau gleich verhalten\n # erzeugen möchte, ein Anwendungsfall fällt mir zurzeit nicht ein\n appendPlot = function(plot_reactive, title, select = TRUE,\n closeable = TRUE) {\n private$tabCounter <- private$tabCounter + 1\n unique_id <- shiny:::createUniqueId()\n private$session$output[[unique_id]] <- renderPlot({\n return(plot_reactive())\n })\n data_value <- title %_% private$tabCounter\n tab <- tabPanel(\n title = title,\n plotOutput(\n outputId = private$session$ns(unique_id)\n ),\n value = data_value\n )\n shiny::appendTab(\n inputId = private$id,\n tab = tab,\n select = select,\n session = private$session\n )\n if (closeable) private$createActionButton(data_value)\n invisible(self)\n },\n \n append_tab = function(tab, select = TRUE, closeable = TRUE) {\n private$tabCounter <- private$tabCounter + 1\n data_value <- tab$attribs[[\"data-value\"]]\n if (data_value %in% private$open_tab_values) {\n updateTabsetPanel(\n session = private$session,\n inputId = private$id,\n selected = data_value\n )\n } else {\n private$open_tab_values <- c(private$open_tab_values, data_value)\n private$tab_values <- c(private$tab_values, data_value)\n shiny::appendTab(\n inputId = private$id,\n tab = tab,\n select = select,\n session = private$session\n )\n if (closeable) private$createActionButton(data_value)\n }\n invisible(self)\n },\n \n get = function(what) {\n if (missing(what)) return(names(private))\n private[[what]]\n },\n \n insert_tab = function(tab, target, position = c(\"before\", \"after\"),\n select = FALSE, closeable = TRUE) {\n private$tabCounter <- private$tabCounter + 1\n data_value <- tab$attribs[[\"data-value\"]]\n if (data_value %in% private$open_tab_values) {\n updateTabsetPanel(\n session = private$session,\n inputId = private$id,\n selected = data_value\n )\n } else {\n private$open_tab_values <- c(private$open_tab_values, data_value)\n private$tab_values <- c(private$tab_values, data_value)\n shiny::insertTab(\n inputId = private$id,\n tab = tab,\n target = target,\n position = match.arg(position),\n select = select,\n session = private$session\n )\n if (closeable) private$createActionButton(tab)\n }\n invisible(self)\n },\n \n is_open = function(value) {\n value %in% private$open_tab_values\n },\n \n is_value = function(value) {\n value %in% private$tab_values\n },\n \n prepend_tab = function(tab, select = FALSE, closeable = TRUE) {\n private$tabCounter <- private$tabCounter + 1\n data_value <- tab$attribs[[\"data-value\"]]\n if (data_value %in% private$open_tab_values) {\n updateTabsetPanel(\n session = private$session,\n inputId = private$id,\n selected = data_value\n )\n } else {\n private$open_tab_values <- c(private$open_tab_values, data_value)\n private$tab_values <- c(private$tab_values, data_value)\n shiny::prependTab(\n inputId = private$id,\n tab = tab,\n target = target,\n select = select,\n session = private$session\n )\n if (closeable) private$createActionButton(tab)\n }\n invisible(self)\n },\n \n remove_tab = function(target) {\n index <- which(private$open_tab_values == target)\n private$open_tab_values <- private$open_tab_values[-index]\n shiny::removeTab(\n inputId = private$id,\n target = target,\n session = private$session\n )\n invisible(self)\n },\n \n set_session = function(session) {\n private$session <- session\n },\n \n tabBox = function(collapsible = FALSE) {\n if (!private$once) {\n if (!collapsible) {\n ui <- shinydashboard::tabBox(\n id = private$id,\n title = private$title,\n width = private$width,\n height = private$height,\n side = private$side\n )\n } else {\n ui <- shinydashboard::box(\n title = private$title,\n collapsible = TRUE,\n width = private$width,\n height = private$height,\n shinydashboard::tabBox(\n id = private$id,\n width = 12,\n side = private$side\n )\n )\n ui$children[[1]]$children[[2]]$children[[1]]$attribs$class <- paste(\n ui$children[[1]]$children[[2]]$children[[1]]$attribs$class,\n \"collapsible-tab-box\"\n )\n }\n private$once <- TRUE\n return(ui)\n }\n else print(\"tabBox has been already created.\")\n }\n ),\n private = list(\n id = NULL,\n title = \"Viewer\",\n width = 6,\n height = NULL,\n side = \"left\",\n once = FALSE,\n session = NULL,\n tabCounter = 0,\n open_tab_values = character(),\n tab_values = character(),\n \n createActionButton = function(data_value) {\n closeId <- private$id %_% private$tabCounter\n div_button <- div(\n class = \"div-btn-close\",\n actionButton(\n inputId = closeId,\n label = NULL,\n icon = icon(\"window-close\")\n )\n )\n selector <- paste0(\"#\", private$id, \" li a[data-value=\\\"\", data_value, \"\\\"]\")\n insertUI(\n selector = selector,\n where = \"beforeEnd\",\n ui = div_button\n )\n observeEvent(private$session$input[[closeId]], {\n self$remove_tab(target = data_value)\n }, domain = private$session)\n }\n )\n)"
},
{
"alpha_fraction": 0.5552353262901306,
"alphanum_fraction": 0.5552353262901306,
"avg_line_length": 18.296297073364258,
"blob_id": "10a684574fa3772fd1fadec3b714a423acb42d6a",
"content_id": "5bc4b41cf20c4ea7b8708e7cada1d8d129d31b84",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 1042,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 54,
"path": "/University/DQE/modules/aufgaben/ortsauswahl.R",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "ortsauswahl_ui <- function(id) {\n ns <- NS(id)\n \n tagList(\n data_selector_ui(\n id = ns(\"id_data_selector\")\n ),\n actionButtonQW(\n inputId = ns(\"add_histogram\"),\n label = NULL,\n icon = icon(\"area-chart\"),\n tooltip = \"Öffne Histogramm\"\n )\n )\n}\n\nortsauswahl <- function(input, output, session, .values) {\n \n ns <- session$ns\n \n data <- reactive({\n data_selector_return$data()\n })\n \n # Beispiel: Anfang\n histogram <- reactive({\n ggplot(data = mtcars, mapping = aes(x = mpg)) +\n geom_histogram(bins = nclass.Sturges(mpg)) +\n theme_bw()\n })\n \n observeEvent(input$add_histogram, {\n .values$viewer$append_tab(\n tab = tabPanel(\n title = \"Histogram\",\n value = \"histogram\",\n plotOutput(\n outputId = ns(\"histogram\")\n )\n )\n )\n })\n \n output$histogram <- renderPlot({\n histogram()\n })\n # Beispiel: Ende\n \n data_selector_return <- callModule(\n module = data_selector,\n id = \"id_data_selector\",\n .values = .values\n )\n}"
},
{
"alpha_fraction": 0.5109623670578003,
"alphanum_fraction": 0.5109623670578003,
"avg_line_length": 41.07767105102539,
"blob_id": "b6c07d8bdffa30ecc13387849b50d6b4c16b19ed",
"content_id": "47d939b828d65f31996c93db6ae015d9774f69f3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4333,
"license_type": "no_license",
"max_line_length": 295,
"num_lines": 103,
"path": "/Spotify/dbconnector.py",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "import mysql\nimport mysql.connector\nfrom mysql.connector import Error\nimport pandas as pd\nfrom datetime import datetime\n\nclass DatabaseHandler():\n \n \"\"\"\n Class to write the spotify data into a MySql Database. The database has two tables with the names [spotify_history, song_data]\n spotify_history has the following colums in that order: (ID (primary key, auto increment), Time, Song_Name, Spotify_ID, \n Spotify_URI, Popularity, Object_Type)\n song_data has the following colums in that order: (Spotify_ID (primary key), Spotify_URI, Artist, Album, Duration, Acousticness, \n Danceability, Energy, Instrumentalness, key_spotify, Liveness, Loudness, Mode, Speechiness, Tempo, Time_Signature, Valence)\n \"\"\"\n def __init__(self, host, database, user, password, auth_plugin = None):\n self.host = host\n self.database = database\n self.user = user\n self.password = password\n self.connection = None\n self.cursor = None\n self.auth_plugin = auth_plugin\n \n \n def connect(self):\n \"\"\"\n Function to connect to a database and the feedback, if that connection was succesful or not\n \"\"\"\n self.connection = mysql.connector.connect(host=self.host, \n database=self.database, \n user=self.user, \n password=self.password, \n auth_plugin=self.auth_plugin)\n if self.connection.is_connected():\n print(\"Succesful connection to the database {} as {}\".format(self.database, self.user))\n self.cursor = self.connection.cursor()\n else:\n print(\"The connection to the database was not successful.\")\n \n def close(self):\n \"\"\"\n Closing of the database connection\n \"\"\"\n self.connection.close()\n \n def write_to_db(self, df):\n \n \"\"\"\n Write to the MySql Database\n \"\"\"\n #query for the history data\n query = \"INSERT IGNORE INTO spotify_history (Time, Song_Name, Spotify_ID, Spotify_URI, Popularity, Object_Type) VALUES (%s, %s, %s, %s, %s, %s)\"\n \n val = []\n for index, row in df.iterrows():\n #some songs don't have milisecond, so the dateformat needs to be adapted\n try:\n timestamp = datetime.strptime(row[\"timestamp\"], '%Y-%m-%dT%H:%M:%S.%fZ')\n except:\n datetime.strptime(row[\"timestamp\"], '%Y-%m-%dT%H:%M:%SZ')\n finally:\n val.append((timestamp, \n row[\"name\"], \n row[\"id\"], \n row[\"uri\"], \n row[\"popularity\"], \n row[\"object_type\"]))\n\n self.cursor.executemany(query, val)\n print(\"New Songs in the History {}\".format(self.cursor.rowcount))\n \n #query for the song properties\n query = \"INSERT IGNORE INTO song_data (Spotify_ID, Spotify_URI, Artist, Album, Duration, Acousticness, Danceability, Energy, Instrumentalness, key_spotify, Liveness, Loudness, Mode, Speechiness, Tempo, Time_Signature, Valence) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)\"\n val = []\n for index, row in df.iterrows():\n val.append((row[\"id\"], \n row[\"uri\"], \n row[\"artist\"], \n row[\"album\"], \n row[\"duration_ms\"], \n row[\"acousticness\"],\n row[\"danceability\"],\n row[\"energy\"],\n row[\"instrumentalness\"],\n row[\"key\"],\n row[\"liveness\"],\n row[\"loudness\"],\n row[\"mode\"],\n row[\"speechiness\"],\n row[\"tempo\"],\n row[\"time_signature\"],\n row[\"valence\"])\n )\n \n print(\"New Songs in the database: {}\".format(self.cursor.rowcount))\n self.cursor.executemany(query, val)\n \n self.connection.commit()\n\n\n def query_data(self, sql_query):\n pass"
},
{
"alpha_fraction": 0.5120350122451782,
"alphanum_fraction": 0.5159737467765808,
"avg_line_length": 21.9698486328125,
"blob_id": "61e8a3db2971726c3cc8dff41c35d5b41671c735",
"content_id": "38a23cfbe7f88955e664742e76d4967c4170fdf8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 4576,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 199,
"path": "/University/DQE/App.R",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "# Falls es an dieser Stelle Fehlermeldungen gibt, müssen die Packages mit\n# install.packages installiert werden\nlibrary(shiny)\n# Dashboard\nlibrary(shinydashboard)\n# Tidyverse\nlibrary(tidyverse)\n# DT-Tabellen in shiny\nlibrary(DT)\n# plotly-Plots\nlibrary(plotly)\n# wird für source_directory benötigt\nlibrary(R.utils)\n# Import von .xls- und .xlsx-Dateien\nlibrary(readxl)\n# Import von .csv-Dateien\nlibrary(readr)\n# Bearbeiten von Strings\nlibrary(stringr)\n# Objektorientiertes System; z.B. TabBox ist ein R6-Objekt\nlibrary(R6)\n# UI\nlibrary(shinyWidgets)\n# Tooltips\nlibrary(shinyBS)\n\n# Source source_directory.R\nsource(\"./modules/predefined/source_directory.R\", encoding = \"UTF-8\")\n\n# Nutze source_directory, um gesamten Ordner zu sourcen; setze verbose = FALSE,\n# um keine Mitteilungen in der Konsole zu sehen\nsource_directory(\n \"./modules\", encoding = \"UTF-8\", modifiedOnly = FALSE, chdir = TRUE, \n verbose = TRUE, envir = globalenv()\n)\n\n# Erzeuge einen Viewer, in dem Plots und Tabellen in einzelnen Tabs dargestellt\n# werden können\nviewer <- TabBox$new(\n id = \"viewer\",\n title = \"Viewer\",\n width = 12\n)\n\n# Scrollen in zu breiten DT-Tabellen\noptions(DT.options = list(scrollX = TRUE))\n\nui <- div(\n tags$head(\n # Include custom css styles\n tags$link(\n rel = \"stylesheet\",\n type = \"text/css\",\n href = \"styles.css\"\n )\n ),\n dashboardPage(\n dashboardHeader(\n title = \"DQE-App\"\n ),\n dashboardSidebar(\n sidebarMenu(\n menuItem(\n text = \"Import\",\n tabName = \"import\"\n ),\n menuItem(\n text = \"Projekt\",\n menuSubItem(\n text = \"Ortsauswahl\",\n tabName = \"ortsauswahl\"\n ),\n menuSubItem(\n text = \"Standardisierung\",\n tabName = \"standardisierung\"\n ),\n menuSubItem(\n text = \"Versuchsplan\",\n tabName = \"versuchsplan\"\n ),\n menuSubItem(\n text = \"Steepest-Ascent\",\n tabName = \"steepest_ascent\"\n )\n )\n )\n ),\n dashboardBody(\n fluidRow(\n column(\n width = 6,\n tabItems(\n tabItem(\n tabName = \"import\",\n box(\n title = \"Import\",\n width = 12,\n excel_csv_file_input_ui(\n id = \"id_excel_csv_file_input\"\n )\n )\n ),\n tabItem(\n tabName = \"ortsauswahl\",\n box(\n title = \"Ortsauswahl\",\n width = 12,\n ortsauswahl_ui(\n id = \"id_ortsauswahl\"\n )\n )\n ),\n tabItem(\n tabName = \"standardisierung\",\n box(\n title = \"Standardisierung\",\n width = 12,\n standardisierung_ui(\n id = \"id_standardisierung\"\n )\n )\n ),\n tabItem(\n tabName = \"versuchsplan\",\n box(\n title = \"Versuchsplan\",\n width = 12,\n versuchsplan_ui(\n id = \"id_versuchsplan\"\n )\n )\n ),\n tabItem(\n tabName = \"steepest_ascent\",\n box(\n title = \"Steepest-Ascent\",\n width = 12,\n steepest_ascent_ui(\n id = \"id_steepest_ascent\"\n )\n )\n )\n )\n ),\n column(\n width = 6,\n # Container, in dem die Inhalte des Viewers dargestellt werden\n viewer$tabBox()\n )\n )\n )\n )\n)\n\nserver <- function(input, output, session) {\n \n # Verknüpfe Viewer mit der session\n viewer$set_session(session)\n \n # Erzeuge eine Liste, die allen Modulen als Argument übergeben wird\n .values <- list(\n data_storage = ObjectStorage$new(),\n viewer = viewer \n )\n \n # Rufe Module auf\n callModule(\n module = excel_csv_file_input,\n id = \"id_excel_csv_file_input\",\n .values = .values\n )\n \n callModule(\n module = ortsauswahl,\n id = \"id_ortsauswahl\",\n .values = .values\n )\n \n callModule(\n module = standardisierung,\n id = \"id_standardisierung\",\n .values = .values\n )\n \n callModule(\n module = steepest_ascent,\n id = \"id_steepest_ascent\",\n .values = .values\n )\n \n callModule(\n module = versuchsplan,\n id = \"id_versuchsplan\",\n .values = .values\n )\n}\n\n# Erzeuge die App\nshinyApp(ui, server)"
},
{
"alpha_fraction": 0.6788399815559387,
"alphanum_fraction": 0.6895810961723328,
"avg_line_length": 29.064516067504883,
"blob_id": "4c6e445ef6bde4cbb492967b4a8df1127a9fa85b",
"content_id": "2fd2cbbe8cf8366fcbea3207c0fcc761e46f9e12",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 931,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 31,
"path": "/Spotify/main.py",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "from authorisation import SpotifyAuthorisation\nfrom dbconnector import DatabaseHandler\nfrom extractdata import DataExtracter\n\n#Define the information needed to extract the data\nclient_id = \"\"\nscope = \"user-read-recently-played\"\nclient_secret = \"\"\nredirect_uri = \"http://localhost:8888\"\n\n#Check for Authorisation and get a token to make the requests in the spotify webapi\nsp = SpotifyAuthorisation(client_id, client_secret, scope, redirect_uri)\n\n#Load the tokeninformation save as file in the same directionary\ntoken_info = sp.get_tokeninfo()\ntoken = token_info[\"access_token\"]\n\n#Extract the data\nda = DataExtracter(token)\ndata = da.extract_data()\n\n#Write the data into a dataframe\ndb = DatabaseHandler(host = \"127.0.0.1\",\n database = \"\",\n user = \"\",\n password = \"\",\n auth_plugin = \"mysql_native_password\")\n\ndb.connect()\ndb.write_to_db(data)\ndb.close()"
},
{
"alpha_fraction": 0.4485222399234772,
"alphanum_fraction": 0.45701026916503906,
"avg_line_length": 46.187320709228516,
"blob_id": "b3d8f45c8a96fb4bbd7aab6fec166317cfa688f8",
"content_id": "116cdddd0bf0524bfaa2ecd94a1323a585c157c5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 16376,
"license_type": "no_license",
"max_line_length": 134,
"num_lines": 347,
"path": "/University/Introduction Data Engineering/Case_Study_Gruppe_25.R",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "\n#install.packages(\"leaflet\")\n#install.packages(\"leaflet.extras\")\n\nlibrary(shiny)\nlibrary(tidyverse)\nlibrary(shinyWidgets)\nlibrary(leaflet)\nlibrary(leaflet.extras)\n\n\n#loading the final dataset resulting from the \nload(\"Finaler_Datensatz_25.RData\")\n\n#uncomment the following line to load dataset faster\n#final <- head(final, 500)\n\n\n#create a data frame jsut containing the locations\nplaces <- group_by(final, Ort) %>%\n summarise(n=n())\n\n# Define UI for application that draws a histogram\nui <- fluidPage(\n \n # Application title\n titlePanel(\"Defective & Registered Cars\"),\n \n \n # Show a plot of the generated distribution\n mainPanel(width = \"100%\",\n \n #input modules based on the date, location and car manufacturer\n wellPanel(width = \"90%\",\n titlePanel(\"Filters for Displaying Registrations over Time\"),\n fluidRow(\n column(width = 6, \n \n dateRangeInput(\"registration\", \n label = \"Choose the time period.\",\n start = min(final$Zulassung),\n end = max(final$Zulassung),\n min = min(final$Zulassung),\n max = max(final$Zulassung),\n startview = \"year\",\n format = \"yyyy-mm-dd\"),\n \n radioButtons(\"manufacturer\", \n label = \"Showcase the data by\", \n choices = c(\"Car manufacturer\", \"Engine manufacturer\"),\n select = \"Car manufacturer\")\n ),\n \n column(width= 6,\n \n pickerInput(\"location\",\n label = \"Choose the location to observe\",\n choices = c(\"All\", places$Ort),\n selected = \"All\"),\n \n conditionalPanel(\"input.manufacturer == 'Car manufacturer'\",\n \n selectInput(\"car_manufacturer\", \n label = \"Choose the Car manufacturer\",\n choices = c(\"All\", 1, 2),\n selected = \"All\")\n ),\n \n conditionalPanel(\"input.manufacturer == 'Engine manufacturer'\",\n \n selectInput(\"engine_manufacturer\", \n label = \"Choose the Engine manufacturer\",\n choices = c(\"All\", 101, 102, 103, 104),\n selected = \"All\")\n )\n )\n ),\n \n \n # Output of the plot related to the registration over the time\n plotOutput(\"registration\",\n width = \"100%\" \n )\n ), \n \n wellPanel(width = \"90%\",\n titlePanel(\"Filters for Displaying Geographical distribution\"),\n fluidRow(\n #the input given by the user for the heatmap\n column(6,\n dateRangeInput(\"heatmap\", \n label = \"Choose the time period\",\n start = min(final$Zulassung),\n end = max(final$Zulassung),\n min = min(final$Zulassung),\n max = max(final$Zulassung)),\n \n radioButtons(\"manufacturer2\", \n label = \"Showcase the data by\", \n choices = c(\"Car manufacturer\", \"Engine manufacturer\"),\n select = \"Car manufacturer\")\n \n \n ),\n #the user chooses whether to display the density numerically though clusters, or graphically through a heatmap\n column(6,\n selectInput(\"mapDisplay\",\n label = \"Select display mode of map\",\n choices = c(\"Marker Clusters\", \"Heatmap\", \"Heatmap with Marker Clusters\"),\n selected = \"Heatmap with Marker Clusters\"\n ),\n conditionalPanel(\"input.mapDisplay == 'Heatmap' | input.mapDisplay == 'Heatmap with Marker Clusters'\",\n \n numericInput(\"fehleranzahl\",\n label = \"Choose mininum amount of defective cars\",\n value = 20,\n min = 1,\n max = 10000,\n step = 1)\n ),\n conditionalPanel(\"input.manufacturer2 == 'Car manufacturer'\",\n \n selectInput(\"car_manufacturer2\", \n label = \"Choose the Car manufacturer\",\n choices = c(\"All\", 1, 2),\n selected = \"All\")\n ),\n \n conditionalPanel(\"input.manufacturer2 == 'Engine manufacturer'\",\n \n selectInput(\"engine_manufacturer2\", \n label = \"Choose the Engine manufacturer\",\n choices = c(\"All\", 101, 102, 103, 104),\n selected = \"All\")\n )\n \n )\n ),\n \n titlePanel(\n h4(\"Geographical Distribution of Cars\")\n ),\n titlePanel(\n h6(\"Click or scroll on the clusters to zoom in. Click the markers to reveal information about the car.\")\n ),\n mainPanel(width = \"90%\",\n \n # Show the distribution of registered cars on a map\n leafletOutput(\"gerMap\", \n width = \"90%\",\n height = \"650px\")\n )\n ),\n wellPanel(width = \"90%\",\n titlePanel(\"Tabular Representation of Data\"),\n #user has the choice of using the table to further explore the map, or to search for items individually\n selectInput(\"mapFilters\", \n label = \"Use filters from Map Display?\",\n choices = c(\"Use Map Filters\", \"Display Unfiltered Dataset\"),\n selected = \"Use Map Filters\"),\n mainPanel(width = \"100%\",\n dataTableOutput(\"table\") \n )\n \n )\n \n )\n)\n\n# Define server logic \nserver <- function(input, output) {\n \n \n #filter dataset for sorting cars by car manufacturer\n registration <- reactive({\n \n count <- filter(final, Zulassung >= input$registration[1] & Zulassung <= input$registration[2])\n if (input$location != \"All\") {\n count <- filter(count, Ort == input$location)\n }\n if (input$car_manufacturer != \"All\") {\n count <- filter(count, Herstellernummer_Fahrzeug == input$car_manufacturer)\n }\n count$Zulassung <- format.Date(count$Zulassung, format = \"%Y-%m\")\n count$Zulassung <- as.character(count$Zulassung)\n count %>% group_by(Zulassung, Herstellernummer_Fahrzeug) %>%\n summarise(n = n())\n \n })\n #filter dataset for sorting cars by engine manufacturer\n registration_engine <- reactive({\n count <- filter(final, Zulassung >= input$registration[1] & Zulassung <= input$registration[2])\n if (input$location != \"All\") {\n count <- filter(count, Ort == input$location)\n }\n if (input$engine_manufacturer != \"All\") {\n count <- filter(count, Herstellernummer_Motor == input$engine_manufacturer)\n }\n count$Zulassung <- format.Date(count$Zulassung, format = \"%Y-%m\")\n count$Zulassung <- as.character(count$Zulassung)\n count %>% group_by(Zulassung, Herstellernummer_Motor) %>%\n summarise(n = n())\n })\n \n \n output$registration <- renderPlot({\n if (input$manufacturer == \"Car manufacturer\"){\n #create the plot based on the reactive registration() as bar plot\n p <- ggplot(data = registration(), aes(x = Zulassung, y = n, fill=factor(Herstellernummer_Fahrzeug))) +\n geom_bar(stat = \"identity\")\n #change the colours to shwocase the differen car manufacturer\n p <- p +scale_fill_manual(breaks = c(\"1\", \"2\"),\n values=c(\"#0B3B17\", \"#ff7f24\"))\n #name axis, plot and legend\n p <- p + guides(fill = guide_legend(title=\"Car manufacturer\"))\n p <- p + labs(x = \"Date Range\",\n y = \"Amount of Defective Cars registered\",\n title = \"Registration of defective cars\")\n } else {\n \n # create the plot based on the reactive registration_engine() as bar plot\n p <- ggplot(data = registration_engine(), aes(x = Zulassung, y = n, fill=factor(Herstellernummer_Motor))) +\n geom_bar(stat = \"identity\")\n # change the colours to shwocase the differen car manufacturer\n p <- p +scale_fill_manual(breaks = c(\"101\", \"102\", \"103\", \"104\"),\n values=c(\"#0B3B17\", \"#ff7f24\", \"#BEBEBE\", \"#a52a2a\"))\n #name axis, plot and legend\n p <- p + guides(fill = guide_legend(title=\"Engine manufacturer\"))\n p <- p + labs(x = \"Date Range\",\n y = \"Amount of Defective Cars registered\",\n title = \"Registration of defective cars by Engine manufacturers\")\n p\n }\n \n #format the plot in a nice format\n p <- p + theme(legend.position =\"bottom\", \n plot.title=element_text(size = 18, face=\"bold\"), \n axis.title = element_text(size = 12),\n legend.title.align = 0,\n legend.direction = \"horizontal\",\n legend.text = element_text(size = 12),\n legend.title = element_text(size = 12, face = \"bold\"),\n panel.background = element_rect(fill = \"white\", colour = \"black\"),\n panel.grid.major.y = element_line(colour = \"black\", linetype = \"solid\"),\n panel.grid.minor.y = element_line(colour = \"black\", linetype = \"dotted\"))\n p\n \n })\n \n filteredCars <- reactive ({\n #apply the filters as chosen by the user\n cars <- filter(final, Zulassung >= input$heatmap[1] & Zulassung <= input$heatmap[2])\n if (input$car_manufacturer2 != \"All\" & input$manufacturer2 == \"Car manufacturer\") {\n cars <- filter(cars, Herstellernummer_Fahrzeug == input$car_manufacturer2)\n }\n if (input$engine_manufacturer2 != \"All\" & input$manufacturer2 == \"Engine manufacturer\") {\n cars <- filter(cars, Herstellernummer_Motor == input$engine_manufacturer2)\n }\n cars\n }\n )\n \n \n \n #summarise locations to reflect number of defective cars per location\n heatmap <- reactive ({\n if (input$mapDisplay == 'Marker Clusters' ) {\n #ensure that the column names stay the same by taking the first row of the final dataset...\n heat <- head(final,1)\n #adding an empty \"fehleranzahl\" column...\n heat$fehleranzahl <- NA\n #setting output to an empty df, so that nothing can be displayed on the heatmap\n heat <- filter(heat, FALSE)\n } else {\n heat <- filteredCars() %>%\n group_by(Ort, Laengengrad, Breitengrad) %>%\n summarise(fehleranzahl = n()) %>%\n #filter by number of registered cars\n filter(fehleranzahl >= input$fehleranzahl)\n }\n heat\n })\n \n markers <- reactive ({\n if(input$mapDisplay == 'Heatmap') {\n #if the markers are not supposed to be displayed, this returns an empty data frame that still has the same column names\n markers <- filter(final, FALSE) \n } else {\n markers <- filteredCars()\n }\n markers\n })\n #prepare a dataset to be displayed as an overview in the table\n browseTable <- reactive({\n if (input$mapFilters == \"Use Map Filters\"){\n table <- filteredCars()\n } else {\n table <- final\n }\n #removing unneeded information:\n #the gps coordinates are not really relevant to humans\n #the car and motor manufacturer numbers are already represented in in ID_Fahrzeug and ID_Motor respectively\n table <- subset(table, select = -c(Laengengrad, Breitengrad, Herstellernummer_Fahrzeug, Herstellernummer_Motor))\n \n colnames(table) <- c(\"ID T1\", \"ID Engine\", \"ID Car\", \"Location of Registration\", \"Date of Registration\", \"Postcode\")\n table\n })\n \n \n #using the leaflet package, we can visualise the data on a map\n output$gerMap <- renderLeaflet(\n #take the dataset of cars that results from the filters that the user chose.\n leaflet(data = filteredCars()) %>% \n #adds a rendered map, by default openstreetmap\n addTiles() %>%\n #set the bounds of the map according to the coordinates of the locations\n fitBounds(min(final$Laengengrad),min(final$Breitengrad),max(final$Laengengrad),max(final$Breitengrad)) %>%\n #adds a heatmap representation of the density of registraiton of faulty cars\n addHeatmap(data = heatmap(), lng = ~Laengengrad, lat = ~Breitengrad, \n intensity = ~fehleranzahl, blur = 12, max = 10.0, radius = 8) %>%\n #postions one marker for each entry in the table\n addMarkers(data = markers(), ~Laengengrad, ~Breitengrad, \n #display large amounts of markers as clusters\n clusterOptions = markerClusterOptions(), \n #display information about the car corresponding to the marker being clicked\n popup = ~paste(\"Zugelassen in: <b> \" ,Ort,\"</b> <br/>\",\n \"PLZ: \", Postleitzahl, \"<br/>\",\n \"Zulassung: \", Zulassung, \"<br/>\",\n \"ID_Fahrzeug: \", ID_Fahrzeug, \"<br/>\",\n \"ID_Motor: \", ID_Motor, \"<br/>\",\n \"ID_T1: \", ID_T1, \"<br/>\"\n )\n \n )\n )\n #display the dataset in a Data Table that the user can search, filter, and sort\n output$table <- renderDataTable(\n browseTable(),\n options = list(\n pageLength = 10\n )\n )\n \n \n}\n\n# Run the application \nshinyApp(ui = ui, server = server)\n\n"
},
{
"alpha_fraction": 0.5683653354644775,
"alphanum_fraction": 0.572681725025177,
"avg_line_length": 43.06748580932617,
"blob_id": "dd8b3c8043a64553f700c75c66a29418aff944a2",
"content_id": "27170c27b3a6db0277a276d1a46a9e2754e50874",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7182,
"license_type": "no_license",
"max_line_length": 132,
"num_lines": 163,
"path": "/Spotify/authorisation.py",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "import webbrowser\nimport requests\nimport json\nimport base64\nimport six\n\n#To Do\n# Save token in document in json format\n# get the token out of the document, if token is expired access get new token and overwrite it\n\nclass SpotifyAuthorisation():\n \n \"\"\"\n Class to handle the Authorization to the Spotify Web API. The class is initiated with a client_id, client_secret, scope and \n redirect_uri. \n \n Spotify Dashboard: https://developer.spotify.com/dashboard/login \n Documentaton Spotify Web API: https://developer.spotify.com/documentation/web-api/quick-start/\n Authorization Guide: https://developer.spotify.com/documentation/general/guides/authorization-guide/\n \n client_id and client secret: Will be provided when an App is created on the Spotify Developer Pages\n Scope: Determines the authorization the user needs to provide to access specific data in the Spotify Web API. More information \n can be found in the documentation for the spotify web api\n redirect_uri: Needs to be added to the white list of the App created on the Spotify Developer Pages\n \"\"\"\n \n def __init__(self, client_id:str, client_secret:str, scope:str, redirect_uri:str):\n self.client_id = client_id\n self.client_secret = client_secret\n self.scope = scope\n self.redirect_uri = redirect_uri\n \n def read_token(self, document):\n \"\"\"\n Reads the file with the information about the token.\n \"\"\"\n with open(r\"C:\\Users\\KhacM\\GitHub\\MinhLe2\\Spotify\\Authorisation\\{}\".format(document), \"r\") as file:\n token_info = file.read()\n file.close()\n\n token_info = json.loads(token_info)\n \n return token_info\n \n def get_tokeninfo(self): \n \"\"\"\n Get the current Token information saved in the document Token_info.txt. If the Document does not exist a new token will be \n created. If the token is expired a new token will be created with the refresh token in the document. If there is no refresh \n token a complete new token will be created and saved in .../Authorisation/Token_info.txt. \n \"\"\"\n try:\n token_info = self.read_token(\"Token_info.txt\")\n except:\n print(\"It seems that there is no token available. A new token will be created.\")\n self.get_token()\n \n token_info = self.read_token(\"Token_info.txt\")\n \n #Test if the token is expired or not\n expired = self.test_token(token_info[\"access_token\"])\n \n #If the token is expired a new one will be created with the refresh_token. If the refresh_token does not exist a complete \n #new one will be created\n if expired:\n try:\n token_info = self.read_token(\"Refresh.txt\")\n self.refresh_token(token_info[\"refresh_token\"])\n token_info = self.read_token(\"Token_info.txt\")\n except:\n print(\"No Refresh Token found...\")\n self.get_token()\n token_info = self.read_token(\"Token_info.txt\")\n return token_info\n\n \n def get_authorization_code(self):\n \n \"\"\"\n Redirect the user to the page, where the Authorization Code in the url can be copied to get the access token\n \"\"\"\n #Request to the spotify web api endpoint to access the authorization code\n query = \"https://accounts.spotify.com/authorize\"\n response = requests.get(query, \n params = {\"client_id\": self.client_id,\n \"response_type\": \"code\",\n \"redirect_uri\": self.redirect_uri,\n \"scope\": self.scope,\n \"show_dialog\": \"false\"\n })\n \n #Open the url, so that the url of the redirection page can be copied\n webbrowser.open(response.url)\n \n def get_token(self):\n \"\"\"\n Function to get the Token with the Authorization Code. The URL of the page the user is redirect to needs to be copied into \n the terminal. The response of the request to get the token will be saved in ./Authorisation/Token_info.txt to make it \n accessable via the App\n \"\"\"\n print(\"A new token will be created...\")\n self.get_authorization_code()\n print(\"Please paste the url of the redirected into the console:\")\n redirect_url = input()\n code_index = redirect_url.find(\"code\")\n \n query = \"https://accounts.spotify.com/api/token\"\n code = redirect_url[code_index+5:]\n auth_header = base64.b64encode(six.text_type(self.client_id + \":\" + self.client_secret).encode(\"ascii\"))\n headers = {\"Authorization\": \"Basic %s\" % auth_header.decode(\"ascii\"), \"Content-Type\": \"application/x-www-form-urlencoded\"}\n\n response = requests.post(query, \n data = {\"grant_type\": \"authorization_code\",\n \"code\": code,\n \"redirect_uri\": \"http://localhost:8888\"},\n headers = headers)\n \n if response.status_code == 200:\n with open(\"Authorisation/Token_info.txt\", \"w+\") as file:\n file.write(response.text)\n file.close\n else:\n print(response.content)\n \n def refresh_token(self, refresh_token):\n \"\"\"\n Method to refresh the token with the help of a refresh token\n \"\"\"\n \n print(\"The token will be refreshed...\")\n query = \"https://accounts.spotify.com/api/token\"\n auth_header = base64.b64encode(six.text_type(self.client_id + \":\" + self.client_secret).encode(\"ascii\"))\n headers = {\"Authorization\": \"Basic %s\" % auth_header.decode(\"ascii\"), \"Content-Type\": \"application/x-www-form-urlencoded\"}\n\n response = requests.post(query, \n data = {\"grant_type\": \"refresh_token\",\n \"refresh_token\": refresh_token},\n headers = headers)\n \n if response.status_code == 200:\n token = response.json()[\"access_token\"]\n with open(\"Authorisation/Token_info.txt\", \"w+\") as file:\n file.write(response.text)\n file.close\n print(\"The token is refreshed.\")\n else:\n print(response.content)\n \n def test_token(self, token): \n \"\"\"\n Test if the token is expired or not. Returns True if the token is expired.\n \"\"\"\n spot_id = \"4evmHXcjt3bTUHD1cvny97\"\n endpoint = \"https://api.spotify.com/v1/audio-features\"\n header = {\"Authorization\": \"Bearer {}\".format(token)}\n \n response = requests.get(\"{}/{}\".format(endpoint, spot_id),\n headers = header)\n \n if response.status_code == 401:\n print(\"The token is expired\")\n return True\n else:\n return False"
},
{
"alpha_fraction": 0.7811785936355591,
"alphanum_fraction": 0.7834627628326416,
"avg_line_length": 86.55999755859375,
"blob_id": "a566d4e74ca88ea9e6635c60e12790fe41b7809a",
"content_id": "575748ae1fc4b226d6cd174bfc59c83a3f4fb5ca",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 4378,
"license_type": "no_license",
"max_line_length": 558,
"num_lines": 50,
"path": "/Spotify/README.md",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "# Spotify Data Collector\n\n## To Do\n- Create Tableau Dashboard providing interesting insights into listening habits \n- edit the path where the token is saved to a flexible path\n- automate the renaming of the token information with the refresh token, when there is no document with the refresh token\n\n## Purpose of the Project\n\nThe purpose of this project is to collect the most recent heard songs on spotify and collect and store them into a MySql Database. The data should be used to visualize interesting insights about the personal music habits into a Tableau Dashboard. If the main.py is executed the a MySQL Database will be written with the most recently listened songs. The app access thereby the Spotify Web Api to extract these songs. For more information about the Spotify Web Api, please check their pages (https://developer.spotify.com/documentation/web-api/quick-start/). \n\n## First Steps\n\n1. Create an App on the Spotify for Developer Page (https://developer.spotify.com/dashboard/).\n2. Add a redirect URL to the white list of the created Application.\n3. Set up a MySQL Database. More information can be found on the MySQL Pages (https://dev.mysql.com/doc/mysql-getting-started/en/). The database with the neccessary tables and columns can be created by running the SQL Code in the folder \"SQL\".\n4. Edit the main.py file with your personal information. Add your client_id and secret which can be found in the view of your application on Spotify for Developer. Edit also the redirect_url to one specified in the white list of your app. Add your MySQL Database information, so that the client can connect to your database and write the information in. \n5. Specify the path in the authorization.py file in the read_token method to the file which leads to the Authorization folder in your local client.\n6. Run the main.py file for the first time to get your Authorization Information. After running main.py you will be redirected to your redirect_url. Copy the url of in paste it into the terminal. The code will extract the neccessary information on its own. \n7. The document will be saved in the folder Authorization. If you run the code the first time, rename your file to \"Refresh.txt\". This file will include your refresh token, which the app will access to get access in the future. \n8. Automate the execution of the main.py file. If you have windows you can do that easily with the Windows Task Scheduler.\n\n## Content of the Modules\n- **main.py**: The main Python File to execute the data collection.\n- **authorization.py**: Handles the Authorization of to access the Spotify Web Api after the Authorization Code Flow. More information about the Authorization and the Authoriztion Code Flow can be found here: https://developer.spotify.com/documentation/general/guides/authorization-guide/\n- **dbconnector.py**: Handles the connection to the MySQL database and writes the information provided by the Spotify Web Api to it.\n- **extractdata.py**: Handles the extraction of the recently listened songs and some audio features of this song. The extracted data will be returned as pandas Dataframe\n\n## Data Collected\n\nThe following data will be collected and written to the database:\n \n **Song History**\n - ID (primary key, auto increment): This is the unique ID in your MySQL Database, which will update automatically\n - Time: The time you listened to the song\n - Song_Name: The Name of the Song\n - Spotify_ID: The unique Spotify ID for that song. This is needed to join the tables with the second table \"song_data\"\n - Spotify_URI: A unique identifier for each song specified by Spotify\n - Popularity: The popularity of the song \n - Object_Type: The object type from where the song was played from\n \n **Song Data**\n- Spotify_ID (primary key), Spotify_URI: The same like in Song History\n- Artist: The artist of the song:\n- Album: The album the song belongs to\n- Duration: The duration of the song\n- The following Audio Features: Acousticness, Danceability, Energy, Instrumentalness, key_spotify, Liveness, Loudness, Mode, Speechiness, Tempo, Time_Signature, Valence (More information about each can be found here: https://developer.spotify.com/documentation/web-api/reference/tracks/get-audio-features/)\n\n## Restrictions\n- The Spotify Web Api can return a maximum of the last 50 listened songs. Keep that in mind if you want to collect the data to avoid gaps in it.\n"
},
{
"alpha_fraction": 0.5799999833106995,
"alphanum_fraction": 0.5799999833106995,
"avg_line_length": 15.708333015441895,
"blob_id": "2f5d17d349646bebd9c028b805a3794aba7bee2e",
"content_id": "ec346cc6610e04bd950e0e2821dc49788d3d5187",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 400,
"license_type": "no_license",
"max_line_length": 59,
"num_lines": 24,
"path": "/University/DQE/modules/aufgaben/versuchsplan.R",
"repo_name": "minh-le1994/MinhLe",
"src_encoding": "UTF-8",
"text": "versuchsplan_ui <- function(id) {\n ns <- NS(id)\n \n tagList(\n data_selector_ui(\n id = ns(\"id_data_selector\")\n )\n )\n}\n\nversuchsplan <- function(input, output, session, .values) {\n \n ns <- session$ns\n \n data <- reactive({\n data_selector_return$data()\n })\n \n data_selector_return <- callModule(\n module = data_selector,\n id = \"id_data_selector\",\n .values = .values\n )\n}"
}
] | 21 |
raulchacon/top-repos
|
https://github.com/raulchacon/top-repos
|
3a9b490322ff098c01f333b026e618e70316a91c
|
5f7b4a32df21f6a6e3e8fddcc7aa78fe083d8f4e
|
636284d15fdbc34643dccbbf22bb4bf3420eed8f
|
refs/heads/master
| 2015-09-26T05:07:56.877029 | 2015-09-13T16:19:33 | 2015-09-13T16:19:33 | 42,217,230 | 1 | 0 | null | 2015-09-10T02:33:24 | 2015-09-11T02:08:59 | 2015-09-13T16:19:33 |
Python
|
[
{
"alpha_fraction": 0.7208510637283325,
"alphanum_fraction": 0.7242553234100342,
"avg_line_length": 24,
"blob_id": "675468653885acd9370d890e68cd4326f2373363",
"content_id": "4031908f06d7a47791a43de41555d7bdcd8f92f1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1175,
"license_type": "no_license",
"max_line_length": 181,
"num_lines": 47,
"path": "/README.md",
"repo_name": "raulchacon/top-repos",
"src_encoding": "UTF-8",
"text": "## Top Repos\n\n[](https://travis-ci.org/raulchacon/top-repos)\n[](https://coveralls.io/github/raulchacon/top-repos?branch=master)\n\nA single python module, `toprepos`, that prints a JSON dictionary containing the top number of repos that contain a given keyword, sorted by the number of forks (highest to lowest).\n\n## Installation\n\n```\n$ pip install -r requirements.txt\n```\n\n## Usage\n\n```\n$ python toprepos.py -h\n```\n\n```\nusage: toprepos.py [-h] [--token TOKEN] keyword count\n\nGet top github repos by keyword\n\npositional arguments:\n keyword keyword to search\n count Number of repositories to return\n\noptional arguments:\n -h, --help show this help message and exit\n --token TOKEN GitHub API token\n```\n\nExample:\n\nPrints JSON dictionary of top 30 repositories sorted by number of forks in descending order that include the keyword `python`\n\n```\n$ python toprepos.py python 30\n```\n\n## Running Tests\n\n```\n$ pip install -r requirements_test.txt\n$ python -m unittest tests\n```\n"
},
{
"alpha_fraction": 0.4421052634716034,
"alphanum_fraction": 0.6526315808296204,
"avg_line_length": 12.714285850524902,
"blob_id": "ef30d5c06cffb5c26e8e96bc3c748c69aa7ece4f",
"content_id": "291fc9dbd2747fe4caf9f0c3a22d94de830138e2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 95,
"license_type": "no_license",
"max_line_length": 16,
"num_lines": 7,
"path": "/requirements_test.txt",
"repo_name": "raulchacon/top-repos",
"src_encoding": "UTF-8",
"text": "cookies==2.2.1\nfuncsigs==0.4\nmock==1.3.0\npbr==1.7.0\nrequests==2.7.0\nresponses==0.4.0\nsix==1.9.0"
},
{
"alpha_fraction": 0.575208306312561,
"alphanum_fraction": 0.5980635285377502,
"avg_line_length": 33.56031036376953,
"blob_id": "b6ec0f5350c5429e19f208b640c1e84358390e3d",
"content_id": "992ebd5a521c22e835c43e421fe7c45deb4fee6f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8882,
"license_type": "no_license",
"max_line_length": 153,
"num_lines": 257,
"path": "/tests.py",
"repo_name": "raulchacon/top-repos",
"src_encoding": "UTF-8",
"text": "import sys\nimport json\nimport unittest\n\nimport responses\n\nfrom toprepos import trim_repo, get_top_repos, get_per_page, _main\n\n\nclass PseudoArgs(object):\n def __init__(self, keyword, count, token):\n self.keyword = keyword\n self.count = count\n self.token = token\n\n\nclass PseudoFile(list):\n \"\"\"Simplified file interface\n \"\"\"\n write = list.append\n\n def getvalue(self):\n return ''.join(self)\n\n\nclass TopReposShellTests(unittest.TestCase):\n \"\"\"Test the CLI options\n \"\"\"\n def setUp(self):\n self._saved_argv = sys.argv\n self._saved_stdout = sys.stdout\n self._saved_stderr = sys.stderr\n self._config_filenames = []\n self.stdin = ''\n sys.argv = ['toprepos']\n sys.stdout = PseudoFile()\n sys.stderr = PseudoFile()\n\n def toprepos(self, *args):\n del sys.stdout[:], sys.stderr[:]\n sys.argv[1:] = args\n try:\n _main()\n errorcode = None\n except SystemExit:\n errorcode = sys.exc_info()[1].code\n return sys.stdout.getvalue(), sys.stderr.getvalue(), errorcode\n\n def test_print_usage(self):\n stdout, stderr, errcode = self.toprepos('--help')\n self.assertFalse(errcode)\n self.assertFalse(stderr)\n self.assertTrue(stdout.startswith('usage: toprepos [-h] [--token TOKEN] keyword count'))\n\n def test_with_no_arguments(self):\n stdout, stderr, errcode = self.toprepos()\n self.assertEqual(2, errcode)\n self.assertTrue(\"too few arguments\" in stderr)\n\n def test_with_1_of_2_positional_args(self):\n stdout, stderr, errcode = self.toprepos('keyword')\n self.assertEqual(2, errcode)\n self.assertTrue(\"too few arguments\" in stderr)\n\n @responses.activate\n def test_passing_arguments_to_get_top_repos_with_positional_args(self):\n with open('fixtures.json') as f:\n data = json.load(f)\n\n body = json.dumps(data)\n\n headers = {\n 'content_type': 'json',\n 'link': '<https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=100&page=2>; rel=\"next\",'\n }\n\n responses.add(\n responses.GET,\n url='https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=30',\n body=body,\n status=200,\n adding_headers=headers,\n match_querystring=True\n )\n\n self.toprepos('python', '22')\n\n self.assertEqual(len(responses.calls), 1)\n self.assertEqual(responses.calls[0].request.url, 'https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=30')\n self.assertEqual(responses.calls[0].response.text, body)\n\n @responses.activate\n def test_passing_arguments_to_get_top_repos_with_all_args(self):\n with open('fixtures.json') as f:\n data = json.load(f)\n\n body = json.dumps(data)\n\n headers = {\n 'content_type': 'json',\n 'link': '<https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=100&page=2>; rel=\"next\",'\n }\n\n responses.add(\n responses.GET,\n url='https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=30',\n body=body,\n status=200,\n adding_headers=headers,\n match_querystring=True\n )\n\n self.toprepos('python', '22', '--token', '12345')\n\n self.assertEqual(len(responses.calls), 1)\n self.assertEqual(responses.calls[0].request.url, 'https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=30')\n self.assertEqual(responses.calls[0].request.headers['Authorization'], 'token 12345')\n self.assertEqual(responses.calls[0].response.text, body)\n\n\nclass TestTopRepos(unittest.TestCase):\n def setUp(self):\n self.maxDiff = None\n with open('fixtures.json') as f:\n self.data = json.load(f)\n\n def test_trim_repo(self):\n item = self.data['items'][0]\n\n trim_item = trim_repo(item)\n\n self.assertEqual(trim_item, {\n 'id': 11086078,\n 'name': u'MFRC522-python',\n 'description': u'A small class to interface with the NFC reader Module MFRC522',\n 'language': u'Python',\n 'created_at': u'2013-07-01T06:14:21Z',\n 'html_url': u'https://github.com/mxgxw/MFRC522-python',\n 'watchers_count': 70,\n 'forks_count': 52,\n 'owner': {\n 'username': u'mxgxw',\n 'id': 1070304,\n 'html_url': u'https://github.com/mxgxw'\n }\n })\n\n def test_per_page_when_lte_30(self):\n per_page = get_per_page(30)\n\n self.assertEqual(per_page, 30)\n\n def test_per_page_when_lte_50(self):\n per_page = get_per_page(50)\n\n self.assertEqual(per_page, 50)\n\n def test_per_page_when_gt_50(self):\n per_page = get_per_page(51)\n\n self.assertEqual(per_page, 100)\n\n @responses.activate\n def test_get_top_repos_with_only_one_api_call(self):\n body = json.dumps(self.data)\n\n headers = {\n 'content_type': 'json',\n 'link': '<https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=100&page=2>; rel=\"next\",'\n }\n\n responses.add(\n responses.GET,\n url='https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=30',\n body=body,\n status=200,\n adding_headers=headers,\n match_querystring=True\n )\n\n res = get_top_repos('python', 22, '12345')\n\n repos = json.loads(res)\n\n self.assertEqual(len(responses.calls), 1)\n self.assertEqual(responses.calls[0].request.url, 'https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=30')\n self.assertEqual(responses.calls[0].request.headers['Authorization'], 'token 12345')\n self.assertEqual(responses.calls[0].response.text, body)\n\n self.assertEqual(repos['keyword'], 'python')\n self.assertEqual(repos['requested_count'], 22)\n self.assertEqual(repos['returned_count'], 22)\n self.assertEqual(len(repos['repos']), 22)\n self.assertEqual(repos['repos'][0]['id'], 11086078)\n\n @responses.activate\n def test_get_top_repos_with_more_than_one_api_call(self):\n body = json.dumps(self.data)\n\n headers = {\n 'content_type': 'json',\n 'link': '<https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=100&page=2>; rel=\"next\",'\n }\n\n responses.add(\n responses.GET,\n url='https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=100',\n body=body,\n status=200,\n adding_headers=headers,\n match_querystring=True\n )\n responses.add(\n responses.GET,\n url='https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=100&page=2',\n body=body,\n status=200,\n content_type='json',\n adding_headers={'Link': 'whatevs'},\n match_querystring=True\n )\n\n res = get_top_repos('python', 200)\n\n repos = json.loads(res)\n\n self.assertEqual(len(responses.calls), 2)\n self.assertEqual(responses.calls[0].request.url, 'https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=100')\n self.assertEqual(responses.calls[1].request.url, 'https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=100&page=2')\n self.assertEqual(responses.calls[0].response.text, body)\n self.assertEqual(responses.calls[1].response.text, body)\n\n self.assertEqual(repos['keyword'], 'python')\n self.assertEqual(repos['requested_count'], 200)\n self.assertEqual(repos['returned_count'], 200)\n self.assertEqual(len(repos['repos']), 200)\n self.assertEqual(repos['repos'][0]['id'], 11086078)\n\n @responses.activate\n def test_get_top_repos_when_not_200_OK(self):\n responses.add(\n responses.GET,\n url='https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=30',\n body='',\n status=404,\n content_type='json',\n adding_headers={'Link': 'whatevs'},\n match_querystring=True\n )\n\n self.assertRaises(RuntimeError, get_top_repos, 'python', 22)\n self.assertEqual(len(responses.calls), 1)\n self.assertEqual(responses.calls[0].request.url, 'https://api.github.com/search/repositories?q=python&sort=forks&order=desc&per_page=30')\n\n\nif __name__ == \"__main__\":\n unittest.main()\n"
},
{
"alpha_fraction": 0.5446373224258423,
"alphanum_fraction": 0.551146924495697,
"avg_line_length": 26.3389835357666,
"blob_id": "6470738599091ea6affd1b2ae78b32de88b62f98",
"content_id": "c23b615c136d4b0722492467b6997d9128473a94",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3226,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 118,
"path": "/toprepos.py",
"repo_name": "raulchacon/top-repos",
"src_encoding": "UTF-8",
"text": "import json\nimport argparse\n\nimport requests\n\n\ndef trim_repo(repo):\n \"\"\"Return new dictionary with only desired keys, values\n \"\"\"\n return {\n 'id': repo['id'],\n 'name': repo['name'],\n 'description': repo['description'],\n 'language': repo['language'],\n 'created_at': repo['created_at'],\n 'html_url': repo['html_url'],\n 'watchers_count': repo['watchers_count'],\n 'forks_count': repo['forks_count'],\n 'owner': {\n 'username': repo['owner']['login'],\n 'id': repo['owner']['id'],\n 'html_url': repo['owner']['html_url']\n }\n }\n\n\ndef get_per_page(count):\n \"\"\"Determin per_page sized based on count and\n GitHub's allowed per_page sizes\n \"\"\"\n if count <= 30:\n return 30\n elif count <= 50:\n return 50\n\n return 100\n\n\ndef get_top_repos(keyword, count, token=''):\n \"\"\"Return JSON dictionary containing top `count` github repos\n containing supplied `keyword`. Optionally use GitHub token if\n supplied\n \"\"\"\n per_page = get_per_page(count)\n\n params = {\n 'q': keyword,\n 'sort': 'forks',\n 'order': 'desc',\n 'per_page': per_page\n }\n url = 'https://api.github.com/search/repositories'\n headers = {'Accept': 'application/vnd.github.v3+json'}\n\n if token != '':\n headers['Authorization'] = 'token %s' % token\n\n response = requests.get(url, params=params, headers=headers)\n\n if response.status_code != 200:\n raise RuntimeError(\"Status Code recieved: %s\" % response.status_code)\n\n data = response.json()\n\n repos = []\n for item in data['items']:\n repos.append(trim_repo(item))\n if len(repos) == count:\n break\n\n result = {\n 'keyword': keyword,\n 'requested_count': count,\n 'repos': repos\n }\n\n # Check if there is another page of results\n # based on `Link` in response header\n while ('rel=\"next\"' in response.headers['Link'] and\n len(result['repos']) < count):\n links = response.headers['Link'].split(',')\n\n # Send a GET request to next link\n for link in links:\n if 'rel=\"next\"' in link:\n link_parts = link.split(';')\n next_link = link_parts[0][1:-1]\n response = requests.get(next_link, headers=headers)\n\n if response.status_code == 200:\n data = response.json()\n\n for item in data['items']:\n result['repos'].append(trim_repo(item))\n if len(result['repos']) == count:\n break\n break\n\n result['returned_count'] = len(result['repos'])\n\n return json.dumps(result)\n\n\ndef _main():\n parser = argparse.ArgumentParser(\n description='Get top github repos by keyword'\n )\n\n parser.add_argument('keyword', help='keyword to search')\n parser.add_argument('count', help='Number of repositories to return', type=int)\n parser.add_argument('--token', help='GitHub API token', default='')\n args = parser.parse_args()\n\n print get_top_repos(args.keyword, args.count, args.token)\n\n\nif __name__ == '__main__':\n _main()\n"
},
{
"alpha_fraction": 0.5959596037864685,
"alphanum_fraction": 0.5959596037864685,
"avg_line_length": 11.375,
"blob_id": "7c4d45aae32c6f361e37822d278e0f7a6f2faa2d",
"content_id": "df15c191308acc089dd0791e5bfcf096c335698c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "INI",
"length_bytes": 99,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 8,
"path": "/.coveragerc",
"repo_name": "raulchacon/top-repos",
"src_encoding": "UTF-8",
"text": "[report]\n\nexclude_lines =\n if __name__ == .__main__.:\n\n[html]\n\ndirectory = coverage_html_report\n"
}
] | 5 |
intellogercorp/frappe
|
https://github.com/intellogercorp/frappe
|
538f9d5dacbcbef20cc5327b35593fcf2eca1bee
|
943e37adf38eb19839fb8e0fafd8c1a315e0d797
|
a53046808eacb0d274ebbeba85c69ba89ef2d798
|
refs/heads/develop
| 2023-06-24T20:03:51.127690 | 2021-07-28T06:48:16 | 2021-07-28T06:48:16 | 345,658,707 | 0 | 0 |
MIT
| 2021-03-08T13:05:19 | 2021-03-08T13:05:21 | 2021-04-01T13:24:49 | null |
[
{
"alpha_fraction": 0.7458158731460571,
"alphanum_fraction": 0.7771966457366943,
"avg_line_length": 52.11111068725586,
"blob_id": "31d407a26078227a40d5aa05e18b66300e68a286",
"content_id": "30b5d43905b7a2bcb7ab8c0f95e53534f5290102",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 956,
"license_type": "permissive",
"max_line_length": 57,
"num_lines": 18,
"path": "/frappe/public/js/controls.bundle.js",
"repo_name": "intellogercorp/frappe",
"src_encoding": "UTF-8",
"text": "import \"air-datepicker/dist/js/datepicker.min.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.cs.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.da.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.de.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.en.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.es.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.fi.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.fr.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.hu.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.nl.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.pl.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.pt-BR.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.pt.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.ro.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.sk.js\";\nimport \"air-datepicker/dist/js/i18n/datepicker.zh.js\";\nimport \"./frappe/ui/capture.js\";\nimport \"./frappe/form/controls/control.js\";\n"
},
{
"alpha_fraction": 0.6938144564628601,
"alphanum_fraction": 0.6969072222709656,
"avg_line_length": 34.925926208496094,
"blob_id": "8e59085d428e68025d9e2d388a5f3c7acec8fc9a",
"content_id": "2e99a6f3cd6852be2e50526f3338051816fe1a03",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 970,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 27,
"path": "/frappe/patches/v13_0/rename_cancelled_docs.py",
"repo_name": "intellogercorp/frappe",
"src_encoding": "UTF-8",
"text": "import frappe\nfrom frappe.model.naming import NameParser\nfrom frappe.model.rename_doc import rename_doc\n\ndef execute():\n\t\"\"\"Rename already cancelled documents by adding `CAN-X` postfix instead of `-X`.\n\t\"\"\"\n\tfor doctype in frappe.db.get_all('DocType'):\n\t\tdoctype = frappe.get_doc('DocType', doctype.name)\n\t\tif doctype.is_submittable and frappe.db.table_exists(doctype.name):\n\t\t\tcancelled_docs = frappe.db.get_all(doctype.name, ['amended_from', 'name'], {'docstatus':2})\n\n\t\t\tfor doc in cancelled_docs:\n\t\t\t\tif '-CAN-' in doc.name:\n\t\t\t\t\tcontinue\n\n\t\t\t\tcurrent_name = doc.name\n\n\t\t\t\tif getattr(doc, \"amended_from\", None):\n\t\t\t\t\torig_name, counter = NameParser.parse_docname(doc.name)\n\t\t\t\telse:\n\t\t\t\t\torig_name, counter = doc.name, 0\n\t\t\t\tnew_name = f'{orig_name}-CAN-{counter or 0}'\n\n\t\t\t\tprint(f\"Renaming {doctype.name} record from {current_name} to {new_name}\")\n\t\t\t\trename_doc(doctype.name, current_name, new_name, ignore_permissions=True, show_alert=False)\n\tfrappe.db.commit()\n"
}
] | 2 |
mukvrm/OSCP-Prep
|
https://github.com/mukvrm/OSCP-Prep
|
9746285c4de96f1ba4d7427c9d64d8bab3bbe3e6
|
6e08a2450343b3f6b36e40a4b43e927754002f6f
|
27046e1eb392af7e7c96802c9d925780eef24a0d
|
refs/heads/master
| 2022-04-23T02:20:49.217323 | 2020-04-26T23:13:48 | 2020-04-26T23:13:48 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6916376352310181,
"alphanum_fraction": 0.6986062526702881,
"avg_line_length": 29.263158798217773,
"blob_id": "0a1d3612ce7a2c587f6362e75a043fc70ffd3672",
"content_id": "392d2f8dc0a063bb9fbfcacd74f0dbea76265a7c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 574,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 19,
"path": "/Python 3/httpClient-CheckResourceviaGET.py",
"repo_name": "mukvrm/OSCP-Prep",
"src_encoding": "UTF-8",
"text": "import http.client\n\nprint('** Check resources in WWW server and output HTTP status code via GET/HTTP **\\n')\n\nhost = input('Insert the host/DST IP: ')\nport = input('Insert the DST PORT:(default:80) ')\nurl = input('Insert the URL of the resource you want to check for ')\n\nif(port == ''):\n port = 80\n\ntry:\n connection = http.client.HTTPConnection(host, port)\n connection.request('GET', url)\n response = connection.getresponse()\n print('Status Code Returned: ', response.status)\n connection.close()\nexcept ConnectionRefusedError:\n print('Connection Failed')"
},
{
"alpha_fraction": 0.6819085478782654,
"alphanum_fraction": 0.6898608207702637,
"avg_line_length": 27,
"blob_id": "75d05d1c98643083159ee702e878fc98fc2d29e4",
"content_id": "478b47d036d8a4b8565cdea51e5f47432e63d1ee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 503,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 18,
"path": "/Python 3/httpClient.py",
"repo_name": "mukvrm/OSCP-Prep",
"src_encoding": "UTF-8",
"text": "import http.client\n\nprint('** Lists methods available via OPTIONS/HTTP **\\n')\n\nhost = input('Insert the host/DST IP: ')\nport = input('Insert the DST PORT:(default:80) ')\n\nif(port == ''):\n port = 80\n\ntry:\n connection = http.client.HTTPConnection(host, port)\n connection.request('OPTIONS', '/')\n response = connection.getresponse()\n print('Enabled methods on webserver are: ', response.getheader('allow'))\n connection.close()\nexcept ConnectionRefusedError:\n print('Connection Failed')"
},
{
"alpha_fraction": 0.6194690465927124,
"alphanum_fraction": 0.6312684416770935,
"avg_line_length": 29.81818199157715,
"blob_id": "6759d8eddd9b0296733461d94a01e9d3be2e19ce",
"content_id": "8cfc817aa0be37c7b95b6e4ba1df8450a1c7d4f9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 678,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 22,
"path": "/Python 3/PortScanner.py",
"repo_name": "mukvrm/OSCP-Prep",
"src_encoding": "UTF-8",
"text": "# Goal - Port scanner\nimport socket\n\nTARGET = input(\"Server's IP: \")\nPRANGE = input(\"Enter port range i.e 1-100: \")\n\nlowport = int(PRANGE.split('-')[0])\nhighport = int(PRANGE.split('-')[1])\n\n# Output given range and target\nprint('Scanning host ', TARGET, 'from port ', lowport, 'to port ', highport)\n\n# For loop iterates attempting connections on all ports in range\n# 0 = port open; otherwise closed.\nfor port in range(lowport, highport):\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n status = s.connect_ex((TARGET, port))\n if(status == 0):\n print('*** Port', port, '- OPEN ***')\n else:\n print('*** Port', port, '- CLOSED ***')\n s.close()\n"
},
{
"alpha_fraction": 0.737609326839447,
"alphanum_fraction": 0.7441691160202026,
"avg_line_length": 28.191490173339844,
"blob_id": "57ca6ae05e8319482476fa3d7bce449cfb4685b9",
"content_id": "ecd4fe2835afbbfa2cdd93fe6753b14b488875b5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1372,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 47,
"path": "/Python 3/TCP Server.py",
"repo_name": "mukvrm/OSCP-Prep",
"src_encoding": "UTF-8",
"text": "# Goal \n# Bind to specific IPv4 address and TCP port (client) and \n# listen for incoming TCP communications (server)\n\n# import socket module (low level network interface)\n\nimport socket\n\n# Takes input from the user and saves it in variables SRV_ADDR and SRV_PORT\n\nSRV_ADDR = input(\"Type the server IP address: \")\nSRV_PORT = int(input(\"Type the server port: \"))\n\n# Create a new socket using the default family socket (AF_INET) that uses TCP and \n# the default socket type connection-oriented (SOCK_STREAM)\n\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\n# bind function binds the socket to the provided address and port, while\n# the listen function instructs the socket to listen for an incoming connection. \n\ns.bind((SRV_ADDR, SRV_PORT))\n\n# the argument 1 specifies the maximum number of queued connections.\n\ns.listen(1)\nprint(\"Server started! Waiting for connections...\")\n\n# connection is the socket object we will use to send and receive data\n# address contains the client address bound to the socket\nconnection, address = s.accept()\n\n# Print address of connected client and \n\nprint('Client connected with address:', address)\n\n# start an infinite loop \n\nwhile 1:\n data = connection.recv(1024)\n if not data: break\n connection.sendall(b'-- Message Received --\\n')\n\n# print all messages received from it\n\n print(data.decode('utf-8'))\nconnection.close()\n"
},
{
"alpha_fraction": 0.5033368468284607,
"alphanum_fraction": 0.5567263960838318,
"avg_line_length": 33.719512939453125,
"blob_id": "f4d61588156afbdea37cb89ac9804895e86824a0",
"content_id": "b30707e2593add4ec1060ae19725ee413be98d73",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2847,
"license_type": "no_license",
"max_line_length": 132,
"num_lines": 82,
"path": "/readme.md",
"repo_name": "mukvrm/OSCP-Prep",
"src_encoding": "UTF-8",
"text": "# Portfolio\n\nProjects\n \n Home Lab \n \n Hypervisor Type 1 - ESXi 6.7 | (32GB memory 2.5Tb 3 datastores and 3.9 GHz i7-4770 8mb Cache)\n \n Kali - Pentesting VM | (ESXi)\n \n Windows 10 - Overflow research on ASLR, DEP environments | (ESXi)\n\n Windows 8 - Overflow research on ASLR, DEP environments | (ESXi)\n\n Windows XP Professional VM - Overflow research | (ESXi) \n\n Network (TBD Diagram - maybe Draw.io)\n \n Layer 3 Protocols - OSPF, EIGRP \n Layer 2 Protocols - STP, PVST, MST, DTP, VTP, VLAN 802.1Q, Ethernet Port-Channel (PAgP, LACP)\n Other protocols - DHCP, NAT, HSRP\n Network security - ACL\n Remote Management - FTP, SSH, VNC, SNMP\n Security - Policy Based IPSec VPN, Route Based IPSec VPN\n\n\n Virtualized Environment - EVEng | (ESXi)\n \n Physical devices\n \n Router(s)\n \n 2821 IOS 15.x\n 2821 IOS 15.x\n 2821 IOS 15.x\n \n Switch(es)\n \n 3750 IOS 15.x\n 3750 IOS 15.x\n 3560 PoE 48 12.x\n 2950 12.x\n\n Web - Pugge.ninja\n\n Debian | (ESXi)\n \n Wordpress\n\nProgramming\n\n Python 3\n Bash\n C++\n\nAcademics\n\n eLearnsecurity:\n Penetration Testing Student v4\n Penetration Testing Professional v5\n \n MS Cybersecurity and Information Assurance - 2020\n\n CCNP - 2020\n \n 1) ENCOR - https://www.cisco.com/c/dam/en_us/training-events/le31/le46/cln/marketing/exam-topics/350-401-ENCOR.pdf\n \n a) ENARSI - https://www.cisco.com/c/dam/en_us/training-events/le31/le46/cln/marketing/exam-topics/300-410-ENARSI.pdf\n \n b) ENSDWI - https://www.cisco.com/c/dam/en_us/training-events/le31/le46/cln/marketing/exam-topics/300-415-ENSDWI.pdf\n \n 2) SCOR - https://www.cisco.com/c/dam/en_us/training-events/le31/le46/cln/marketing/exam-topics/350-701-SCOR.pdf\n\n a) SVPN - https://www.cisco.com/c/dam/en_us/training-events/le31/le46/cln/marketing/exam-topics/300-730-SVPN.pdf\n \n b) SNCF - https://www.cisco.com/c/dam/en_us/training-events/le31/le46/cln/marketing/exam-topics/300-710-SNCF.pdf\n \n c) SISE - https://www.cisco.com/c/dam/en_us/training-events/le31/le46/cln/marketing/exam-topics/300-715-SISE.pdf\n \n d) SESA - https://learningnetwork.cisco.com/community/certifications/ccnp-security/sesa/exam-topics\n \n e) SAUTO - https://learningnetwork.cisco.com/community/certifications/ccnp-security/sauto/exam-topics\n"
},
{
"alpha_fraction": 0.703045666217804,
"alphanum_fraction": 0.7144669890403748,
"avg_line_length": 26.13793182373047,
"blob_id": "a86c68e7202f1929b7eab5a56d1c01fe6125b323",
"content_id": "adc9f3958d2b1962d9debc62eebf565e7baaf87c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 788,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 29,
"path": "/Python 3/TCP Client.py",
"repo_name": "mukvrm/OSCP-Prep",
"src_encoding": "UTF-8",
"text": "# Goal \n# Client that starts a connection to the Python server\n# and sends a message. Use function 'connect'\n\n# Notes, to start listening on port 1234 in Linux,\n# use \"socat -v tcp-l:1234,fork exec:'/bin/cat'\"\n\nimport socket\n\nCLT_ADDR = input(\"Server's IP: \")\nCLT_PORT = int(input(\"Type the TCP port you would like to connect to: \"))\n\n# Create new socket using the default family socket (AF_INET) that uses TCP\n# the default socket type connection-oriented (SOCK_STREAM)\n\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\n# Connect to a remote socket at address\n\ns.connect((CLT_ADDR, CLT_PORT))\n\n# print Target IPv4 address and TCP port\n\nprint(\"Connected to:\", CLT_ADDR, \"on port:\", CLT_PORT, \"\\n\")\n\nmessage = input(\"Enter message to send: \")\n\ns.sendall(message.encode())\ns.close()\n\n"
},
{
"alpha_fraction": 0.6751928329467773,
"alphanum_fraction": 0.6894031763076782,
"avg_line_length": 36.33333206176758,
"blob_id": "9370be1a27dac41ba920392758c674c50c01b9da",
"content_id": "1284b8838ca09fd4fdf4dc19372dfc18e9bec704",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2463,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 66,
"path": "/Python 3/Backdoor server.py",
"repo_name": "mukvrm/OSCP-Prep",
"src_encoding": "UTF-8",
"text": "import socket, platform, os\n\n# Listen on user provided IPv4 address\n# SRV_ADDR = '' for it to use all available interfaces\n# // Alternatively,\n# SRV_ADDR = input('Type IP to run server on: ')\n# but it would require user input\n\nSRV_ADDR = \"\"\n# Listen on statically defined port:\n# SRV_PORT = 7777\n# // Alternatively,\n# SRV_PORT = int(input('Type port/TCP to listen on: '))\n# but it would require user input\n\nSRV_PORT = 7777\n\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\ns.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n# bind function assigns user provided IP address and port to the socket\ns.bind((SRV_ADDR, SRV_PORT))\n# listen for incoming connections\ns.listen(1)\n# To accept a connection the socket must be bound to an address and\n# listening for connections.\n# The return value is a pair (conn, address) where conn is\n# a new socket object usable to send and receive data\n# on the connection, and address is the address bound to the socket\n# on the other end of the connection.\nconnection, address = s.accept()\nprint('Client connected with address:', address)\n# While loop occurs as long as there is 1 connection made\nwhile 1:\n try:\n data = connection.recv(1024)\n except:continue\n# If data received from client decodes to \"1\" in utf-8\n# make a variable with the output of the\n# platform and machine functions of the platform module\n if(data.decode('utf-8') == '1'):\n tosend = platform.platform() + \"\" + platform.machine()\n connection.sendall(tosend.encode())\n# If data received from client decodes to \"2\" in utf-8\n# make a variable with the output of the\n# listdir function of the os module and encode it in utf-8\n elif(data.decode('utf-8') == '2'):\n data = connection.recv(1024)\n try:\n filelist = os.listdir(data.decode('utf-8'))\n tosend = \"\"\n for x in filelist:\n tosend += \",\" + x\n# If the path doesn't exist assign the string \"Wrong Path\"\n# to the tosend variable and encode\n except:\n tosend = \"Wrong path\"\n connection.sendall(tosend.encode())\n# If data received from client decodes to \"0\" in utf-8\n# close the connection\n elif(data.decode('utf-8') == '0'):\n connection.close()\n# Connection socket object and the address of the\n# client establishing the connection are passed on\n# to the accept function on that connection/address tuple\n# so that connection is accepted?\n connection, address = s.accept()"
}
] | 7 |
memleak13/dscript
|
https://github.com/memleak13/dscript
|
e5f87374678c6f7747c2837d51884d21ab0b80d6
|
accb534294250f5a7f929177f122bca8d73d308a
|
3e619d132c9e737897580c28e09a2b193c624033
|
refs/heads/master
| 2020-05-30T14:36:58.754067 | 2013-05-02T23:07:07 | 2013-05-02T23:07:07 | 9,824,466 | 0 | 1 | null | null | null | null | null |
[
{
"alpha_fraction": 0.572842538356781,
"alphanum_fraction": 0.5795707106590271,
"avg_line_length": 35.54878234863281,
"blob_id": "e9cbe66be9be07422519ed7737ceaad77eb697b7",
"content_id": "d2c3ffb8c3fda62b84f3ec8789a538029dec9745",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 17984,
"license_type": "no_license",
"max_line_length": 146,
"num_lines": 492,
"path": "/dscript.py",
"repo_name": "memleak13/dscript",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python\n\"\"\"\n Version:\tAlpha 0.3 - Modem Script - CGI Functionality\n Author: \tMemleak13\n Date: \t27.03.13\n\n BUG: rxpwr = psid -> needs to be corrected, wrong field is extracted!\n\t\n\"\"\"\n\nimport cgi,cgitb\nimport re\nimport sys\nimport time\nimport telnetlib\nfrom pysnmp.entity.rfc3413.oneliner import cmdgen\n\ncgitb.enable()\n\n#Class Definitions\nclass Cmts(object):\n def __init__(self, ip, name):\n self.ip = ip\n\tself.name = name\n self.macdomains = '' #a list of all macdomain objects, as there is only one right now, it is a string\n\tself.tn = TelnetAccess(self.ip, IOS_UID, IOS_PW) # creates a telnet obj and logs into cmts stays logged in\n \n def createMacDomain(self, iface):\n\tself.macdomains = MacDomain(iface)\n\n def getCMs(self):\n\tself.tn.runCommand('show cable modem cable ' + ubr01shr.macdomains.name)\n\n def getCMverbose(self, cmmac):\n\tself.tn.runCommand('show cable modem ' + cmmac + ' verbose')\n\t#print ('show cable modem ' + cmmac + ' verbose')\n\n def __del__(self):\n\tself.tn.closeTN() \t# close telnet connection\n\tdel self.tn \t\t# delete object\n\t\n\nclass MacDomain(object):\n def __init__(self, name):\n\tself.name = name\n\tself.cmtotal = '' # total cm in macdomain\n self.cmlist = [] # a list of all cm objects in this mac domain\n \n\tdef extractData(self):\n\t\n\t#Step 2.1: #Reading and filtering the cmts output to include only modems\n\t\t #by deleteing first 4 and last 2 lines, file is stored in cleanedlist\n\t\n\tfin = open ('./telnetoutput', 'r') #('./test','r') #in production change to telnetoutput\n\tcleanedlist = []\n\tfor line in fin: \n \tcleanedlist.append(line)\n\tdel cleanedlist[0:4]\n\tdel cleanedlist[len(cleanedlist)-1]\n\tdel cleanedlist[len(cleanedlist)-1]\n\tself.cmtotal = len(cleanedlist)\n\tfin.close()\n\tprint ('Total modems on card: %d' % self.cmtotal)\n\t\n\t#Step 2.2 : - Line by line the cleanedlist is splitted into its values\n\t# - Modem is then created with these values\t\n\t\n\tcmdatafromcmts = []\n\tfor line in cleanedlist:\n\t\tdel cmdatafromcmts[:]\n\t\tcmdatafromcmts = line.split()\n\t\tmodem = Modem(cmdatafromcmts[0].strip(),cmdatafromcmts[1].strip(),cmdatafromcmts[2].strip(),cmdatafromcmts[3].strip(),cmdatafromcmts[5].strip())\n\t\tprint \"Modem Mac: \" + cmdatafromcmts[0]\n\n\t\t#Step 2.3 : - Telneting to cmts, running verbose command, storing output in telnetoutput\n\t\t# - Filtering verbose values and adding them to created modem object \n\t\tubr01shr.getCMverbose(cmdatafromcmts[0])\n\t\tmodem.setUSData()\n\t\t\n\t\t#Step 2.4 : - Gathering CM DS Data by SNMP and storing them in created modem object\n modem.setDSData()\n\n\t\t#Step 2.5: adding the CM to the modemlist\n\t\tself.cmlist.append(modem)\n\n\nclass Modem(object):\n snmpcommunity = 'web4suhr'\n def __init__(self, mac, ip, iface, state, rxpwr):\n\t#To keep things simple, I created list for all attributes\n\t#but the initial ones. Even if they only take one attribute\n\t\n self.mac = mac\n self.ip = ip\n self.iface = iface\n self.state = state\n self.rxpwr = rxpwr\n self.macversion = []\n self.upsnr = []\n self.receivedpwr = []\n self.reportedtransmitpwr = []\n self.dspwr = []\n self.toff = []\n self.uncorrectables = []\n self.flaps = []\n self.errors = []\n self.reason = []\n self.padj = []\n self.docsIfDownChannelPower = []\n self.docsIfCmStatusTxPower = []\n self.docsIfSigQSignalNoise = []\n self.docsIfSigQUncorrectables = []\n self.docsIfSigQMicroreflections = []\n self.docsIfCmStatusInvalidUcds = []\n self.docsIfCmStatusT3Timeouts = []\n self.docsIfCmStatusT4Timeouts = []\n \n #Setting data gathered from cmts verbose, flaps, erros and dspwr are strings\n def setUSData(self):\n \tfin = open ('./telnetoutput', 'r')\n for line in fin:\n\t\t#each line is checked for the expression, the splitted into values, first \":\" as perimeter,\n\t\t#then to seperate multiple values (Bondend), it is splitted again with \" \"\n \tif 'MAC Version' in line:\n \tvalue = line.split(':')\n value = value[1].split()\n for index in value:\n self.macversion.append(index.strip())\n\n elif 'Upstream SNR' in line:\n value = line.split(':')\n\t\t\tvalue = value[1].split()\n\t\t\tfor index in value:\n \tself.upsnr.append(index.strip())\n\n elif 'Received Power' in line:\n value = line.split(':')\n\t\t value = value[1].split()\n for index in value:\n self.receivedpwr.append(index.strip())\n\n elif 'Reported Transmit Power' in line:\n value = line.split(':')\n value = value[1].split()\n for index in value:\n self.reportedtransmitpwr.append(index.strip())\n\n elif 'Downstream Power' in line:\n value = line.split(':')\n\t\t\tself.dspwr.append(value[1].strip())\n\n elif 'Timing Offset' in line:\n value = line.split(':')\n\t\t\tvalue = value[1].split()\n for index in value:\n self.toff.append(index.strip())\n\n elif 'Uncorrectable Codewords' in line:\n value = line.split(':')\n value = value[1].split()\n for index in value:\n self.uncorrectables.append(index.strip())\n\n elif 'Flaps' in line:\n value = line.split(':')\n\t\t\tself.flaps.append(value[1].strip())\n\n elif 'Errors' in line:\n value = line.split(':')\n self.errors.append(value[1].strip())\n\n elif 'CM Initialization Reason' in line:\n value = line.split(':')\n value = value[1].split()\n for index in value:\n self.reason.append(index.strip())\n \tfin.close()\n\n #Setting data gathered from CM by SNMP\n #TEST: The values needs to be checked as dictionaries are generally unsorted!\n def setDSData(self):\n\tif 'online' in self.state:\n\t\t#ONLINE counts the amount of online modems snmp requests are sent to \n\t\t#this will then be displayed as soon as script finishes\n\t\tglobal ONLINE\n\t\tONLINE += 1\n\n\t\treceivedsnmpvalues = self.getsnmp()\n\t\tfor mib, snmpvalue in sorted(receivedsnmpvalues.iteritems()):\n\t\t\tif 'docsIfDownChannelPower' in mib:\n\t\t\t\tself.docsIfDownChannelPower.append(snmpvalue)\n if 'docsIfSigQSignalNoise' in mib:\n self.docsIfSigQSignalNoise.append(snmpvalue)\n if 'docsIfSigQUncorrectables' in mib:\n self.docsIfSigQUncorrectables.append(snmpvalue)\n if 'docsIfSigQMicroreflections' in mib:\n self.docsIfSigQMicroreflections.append(snmpvalue)\n if 'docsIfCmStatusTxPower' in mib:\n self.docsIfCmStatusTxPower.append(snmpvalue)\n if 'docsIfCmStatusInvalidUcds' in mib:\n self.docsIfCmStatusInvalidUcds.append(snmpvalue)\n if 'docsIfCmStatusT3Timeouts' in mib:\n self.docsIfCmStatusT3Timeouts.append(snmpvalue)\n if 'docsIfCmStatusT4Timeouts' in mib:\n self.docsIfCmStatusT4Timeouts.append(snmpvalue)\n\t\n def getsnmp(self):\n\tsnmpvalue = {} #dictionary which will be returned\n\tcmdGen = cmdgen.CommandGenerator()\n\terrorIndication, errorStatus, errorIndex, varBindTable = cmdGen.nextCmd(\n \tcmdgen.CommunityData(Modem.snmpcommunity, mpModel=0),\n cmdgen.UdpTransportTarget((self.ip, 161)),\n \t\tcmdgen.MibVariable('DOCS-IF-MIB', 'docsIfDownChannelPower'),\n\t\tcmdgen.MibVariable('DOCS-IF-MIB', 'docsIfSigQSignalNoise'),\n \t\tcmdgen.MibVariable('DOCS-IF-MIB', 'docsIfCmStatusTxPower'),\n \t\tcmdgen.MibVariable('DOCS-IF-MIB', 'docsIfSigQUncorrectables'),\n \t\tcmdgen.MibVariable('DOCS-IF-MIB', 'docsIfSigQMicroreflections'),\n \t\tcmdgen.MibVariable('DOCS-IF-MIB', 'docsIfCmStatusInvalidUcds'),\n \t\tcmdgen.MibVariable('DOCS-IF-MIB', 'docsIfCmStatusT3Timeouts'),\n \t\tcmdgen.MibVariable('DOCS-IF-MIB', 'docsIfCmStatusT4Timeouts'),\n\t\t#cmdgen.MibVariable('DOCS-IF-MIB', 'docsIfCmStatusT1Timeouts'),\n #cmdgen.MibVariable('DOCS-IF-MIB', 'docsIfCmStatusT2Timeouts'),\n\t\t#cmdgen.MibVariable('DOCS-IF-MIB', 'docsIfUpChannelTxTimingOffset'),\n\t\t#cmdgen.MibVariable('DOCS-IF-MIB', 'docsIfCmStatusInvalidMaps'),\n \t\tlookupNames=True, lookupValues=True\n \t\t)\n\n\tif errorIndication:\n \tprint(errorIndication)\n\telse:\n \tif errorStatus:\n \t\tprint('%s at %s' % (\n \terrorStatus.prettyPrint(),\n \terrorIndex and varBindTable[-1][int(errorIndex)-1] or '?'\n )\n \t)\n \telse:\n\t\t\tfor varBindTableRow in varBindTable:\n \t\tfor name, val in varBindTableRow:\n\t\t\t\t\tsnmpvalue[name.prettyPrint()] = val.prettyPrint()\n \t\t\t#print('%s = %s' % (name.prettyPrint(), val.prettyPrint()))\n\t\t\t\t\t#snmpvalue.append(val.prettyPrint())\n\t\t\t\t\t#print snmpvalue\n\treturn snmpvalue\n\t\n\n\nclass TelnetAccess(object):\n\n \t# Defining regular expressions for the different prompts here\n \tios_unprivPrompt = re.compile ('.*>')\n ios_privPrompt = re.compile ('.*#')\n regexlist = [ios_unprivPrompt, ios_privPrompt, 'Username:', 'Password:', 'username:', 'password:']\n\t\n\tdef __init__(self, ip, uid, password):\n\t\t\n\t\tself.ip = ip\n\t\tself.uid = uid\n\t\tself.password = password\n\t\tself.telnetoutput = ''\n\n\t\t#Connecting to host\n \t\tself.tn = telnetlib.Telnet(self.ip)\n\n \t\t#IOS Login prodedure (unpriv -> enable -> priv)\n \t\tself.tn.expect(TelnetAccess.regexlist) #regexlist is global\n \t\tself.tn.write(self.uid + \"\\n\")\n \t\tself.tn.expect(TelnetAccess.regexlist)\n \t \tself.tn.write(self.password + \"\\n\")\n\t\tself.tn.expect(TelnetAccess.regexlist)\n\t\t#time.sleep(1) #Setting a delay, otherwise prg. execution to fast and command is run before telnet obj is init.\n\n\tdef runCommand(self,command):\n\n\t\t#Opening filehandle\n self.telnetoutput = open('telnetoutput', 'w')\n \t\t\n\t\t#Executing command and returning output\n \t\t#self.tn.expect(TelnetAccess.regexlist)\n \t\tself.tn.write(command + \"\\n\")\n \t\ttime.sleep(0.3)\n \t\toutput = self.tn.read_very_eager()\n \t\tself.telnetoutput.write(output)\n\n\t\t#Close filehandle\n\t\tself.telnetoutput.close()\n\n\tdef closeTN(self):\n\t\tself.tn.close()\n\n\n#Main\n\n#Global Parameters\nIOS_UID = 'dscript'\nIOS_PW = 'hf4ev671'\nONLINE = 0 #DEBUG, for comparing performance. This counter counts every online modem, while running through the script\n\n# Step 1: Receive US value from drop down menu using CGI, Create CMTS Object, telnet cmts and login, issue command\n#\t receive cm list\n\n# 1.1 - CGI Processing\nform = cgi.FieldStorage() # instantiate only once!\nselected = form.getfirst('us', 'empty')\n# Avoid script injection escaping the user input\nselected = cgi.escape(selected)\n\nprint \"Content-type:text/html\\r\\n\\r\\n\"\nprint \"<!DOCTYPE HTML>\"\nprint \"<html>\"\nprint \"<head>\"\nprint selected\nprint \"</head>\"\nprint \"<body>\"\n\n\n# 1.2 - Script Processing\nubr01shr = Cmts('10.10.10.50', 'ubr01shr')\nubr01shr.createMacDomain(selected)\nubr01shr.getCMs()\n\n# Step 2 (2.1 - 2.5): - Extract Data from cm list (telnet output from cmts)\n#\t\t - Create Modem Object\n#\t\t - Populate CM values from CMTS and CM (SNMP)\n#\t\t - Add all CMs to macdomain.clist\t \n\nubr01shr.macdomains.extractData()\t \t\nprint ('Total modems on card: %d' % ubr01shr.macdomains.cmtotal)\n\n# Step 3 - Creating Output\n#print \"Content-type:text/html\\r\\n\\r\\n\"\n#print \"<!DOCTYPE HTML>\"\n#print \"<html>\"\n#print \"<head>\"\n#print selected\n#print \"</head>\"\n#print \"<body>\"\n\nprint \"<table border=1>\"\nprint \"<tr>\"\nprint \"<th>mac</th>\"\nprint \"<th>ip</th>\"\nprint \"<th>iface</th>\"\nprint \"<th>state</th>\"\nprint \"<th>rxpwr</th>\" \nprint \"<th>Docsis</th>\"\nprint \"<th>upsnr</th>\"\nprint \"<th>upsnr</th>\"\nprint \"<th>receivedpwr</th>\"\nprint \"<th>receivedpwr</th>\"\nprint \"<th>reportedtransmitpwr</th>\"\nprint \"<th>reportedtransmitpwr</th>\"\nprint \"<th>dspwr</th>\"\nprint \"<th>toff</th>\"\nprint \"<th>toff</th>\"\nprint \"<th>toff</th>\"\nprint \"<th>toff</th>\"\nprint \"<th>uncorrectables</th>\"\nprint \"<th>uncorrectables</th>\"\nprint \"<th>flaps</th>\"\nprint \"<th>errors</th>\"\nprint \"<th>reason</th>\"\nprint \"<th>docsIfDownChannelPower</th>\"\nprint \"<th>docsIfDownChannelPower</th>\"\nprint \"<th>docsIfDownChannelPower</th>\"\nprint \"<th>docsIfDownChannelPower</th>\"\nprint \"<th>docsIfSigQSignalNoise</th>\"\nprint \"<th>docsIfSigQSignalNoise</th>\"\nprint \"<th>docsIfSigQSignalNoise</th>\"\nprint \"<th>docsIfSigQSignalNoise</th>\"\nprint \"<th>docsIfSigQUncorrectables</th>\"\nprint \"<th>docsIfSigQUncorrectables</th>\"\nprint \"<th>docsIfSigQUncorrectables</th>\"\nprint \"<th>docsIfSigQUncorrectables</th>\"\nprint \"<th>docsIfSigQMicroreflections</th>\"\nprint \"<th>docsIfSigQMicroreflections</th>\"\nprint \"<th>docsIfSigQMicroreflections</th>\"\nprint \"<th>docsIfSigQMicroreflections</th>\"\nprint \"<th>docsIfCmStatusTxPower</th>\"\nprint \"<th>docsIfCmStatusInvalidUcds</th>\"\nprint \"<th>docsIfCmStatusT3Timeouts</th>\"\nprint \"<th>docsIfCmStatusT4Timeouts</th>\"\nprint \"</tr>\"\n\nfor cm in ubr01shr.macdomains.cmlist:\n\tif \"DOC3.0\" in cm.macversion:\n\t\tprint \"<tr>\"\n\t\tprint \"<td>\" + cm.mac + \"</td>\"\n\t\tprint \"<td>\" + cm.ip + \"</td>\"\n\t\tprint \"<td>\" + cm.iface + \"</td>\"\n\t\tprint \"<td>\" + cm.state + \"</td>\"\n\t\tprint \"<td>\" + cm.rxpwr + \"</td>\"\n\n\t\tfor value in cm.macversion:\n\t\t\tprint \"<td>\" + value + \"</td>\"\n for value in cm.upsnr:\n print \"<td>\" + value + \"</td>\"\n for value in cm.receivedpwr:\n print \"<td>\" + value + \"</td>\"\n for value in cm.reportedtransmitpwr:\n print \"<td>\" + value + \"</td>\"\n for value in cm.dspwr:\n print \"<td>\" + value + \"</td>\"\n for value in cm.toff:\n print \"<td>\" + value + \"</td>\"\n for value in cm.uncorrectables:\n print \"<td>\" + value + \"</td>\"\n for value in cm.flaps:\n print \"<td>\" + value + \"</td>\"\n for value in cm.errors:\n print \"<td>\" + value + \"</td>\"\n for value in cm.reason:\n print \"<td>\" + value + \"</td>\"\n\n for value in cm.docsIfDownChannelPower:\n print \"<td>\" + value + \"</td>\"\n for value in cm.docsIfSigQSignalNoise:\n print \"<td>\" + value + \"</td>\"\n for value in cm.docsIfSigQUncorrectables:\n print \"<td>\" + value + \"</td>\"\n for value in cm.docsIfSigQMicroreflections:\n print \"<td>\" + value + \"</td>\"\n for value in cm.docsIfCmStatusTxPower:\n print \"<td>\" + value + \"</td>\"\n for value in cm.docsIfCmStatusInvalidUcds:\n print \"<td>\" + value + \"</td>\"\n for value in cm.docsIfCmStatusT3Timeouts:\n print \"<td>\" + value + \"</td>\"\n for value in cm.docsIfCmStatusT4Timeouts:\n print \"<td>\" + value + \"</td>\"\n\t\tprint \"</tr>\"\n\n\t\t\n\nprint \"</table>\"\nprint \"</body>\"\nprint \"</html>\"\n\n\n\"\"\"\n#Debug\nfor cm in ubr01shr.macdomains.cmlist:\n\tprint 'mac: ' + cm.mac\n\tprint 'ip: ' + cm.ip\n\tprint 'iface: ' + cm.iface\n\tprint 'state: ' + cm.state\n\tprint 'rxpwr: ' + cm.rxpwr\n\n\tfor value in cm.macversion:\n print 'macversion: ' + value\n for value in cm.upsnr:\n print 'upsnr: ' + value\n for value in cm.receivedpwr:\n print 'receivedpwr: ' + value\n for value in cm.reportedtransmitpwr:\n print 'reportedtransmitpwr: ' + value\n for value in cm.dspwr:\n print 'dspwr: ' + value\n for value in cm.toff:\n print 'toff: ' + value\n for value in cm.uncorrectables:\n print 'uncorrectables: ' + value\n for value in cm.flaps:\n print 'flaps: ' + value\n for value in cm.errors:\n print 'errors: ' + value\n for value in cm.reason:\n print 'reason: ' + value\n\t\n\tfor value in cm.docsIfDownChannelPower:\n\t\tprint 'docsIfDownChannelPower: ' + value\n for value in cm.docsIfSigQSignalNoise:\n print 'docsIfSigQSignalNoise: ' + value\n for value in cm.docsIfSigQUncorrectables:\n print 'docsIfSigQUncorrectables: ' + value\n for value in cm.docsIfSigQMicroreflections:\n print 'docsIfSigQMicroreflections: ' + value\n for value in cm.docsIfCmStatusTxPower:\n print 'docsIfCmStatusTxPower: ' + value \n\tfor value in cm.docsIfCmStatusInvalidUcds:\n print 'docsIfCmStatusInvalidUcds: ' + value\n for value in cm.docsIfCmStatusT3Timeouts:\n print 'docsIfCmStatusT3Timeouts: ' + value\n for value in cm.docsIfCmStatusT4Timeouts:\n print 'docsIfCmStatusT4Timeouts: ' + value\n\tprint \"**********************************\\n\\n\"\n\n\"\"\"\nprint 'ONLINE %d' % ONLINE\n\n#Closing up\ndel ubr01shr\n\n\n"
}
] | 1 |
sarkhub/sand
|
https://github.com/sarkhub/sand
|
f530130cc366efbad476ed5f5785c386bd0e3d10
|
6d893b7be88dc83a462fd7794d5e2e2e0465379b
|
50a56cf1562225ec353a0c538d1ea6136b1259b9
|
refs/heads/master
| 2020-09-01T17:24:31.267651 | 2020-02-21T22:08:55 | 2020-02-21T22:08:55 | 219,015,266 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.4489571750164032,
"alphanum_fraction": 0.4500548839569092,
"avg_line_length": 16.97916603088379,
"blob_id": "d84b7b5610ea1a45001093cee491677a2384556d",
"content_id": "9083236713f48059c9fa8cd14ba136d8a5ca4955",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 911,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 48,
"path": "/pythoncompletedlessons/dictionaryexamples2del.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "###########################################\r\n#\r\n# dictionaryexamples2del\r\n# deleting\r\n#\r\n#\r\n###########################################\r\n\r\n\r\n\r\ndef main():\r\n\r\n countries = {\"CA\": \"Canada\",\r\n \"US\": \"United States\",\r\n \"GB\": \"Great Britain\",\r\n \"MX\": \"Mexico\"}\r\n\r\n print()\r\n print(countries)\r\n print()\r\n\r\n # code that checks if a key is in a dictionary before getting its value\r\n code = \"US\"\r\n if code in countries:\r\n country = countries[code]\r\n del countries[code]\r\n print(country + \" was deleted.\")\r\n else:\r\n print(\"There is no country for this code: \" + code)\r\n\r\n print()\r\n print(countries)\r\n print()\r\n\r\n\r\n # using pop to delete\r\n print(\"pop example\")\r\n print()\r\n country = countries.pop(\"MX\")\r\n\r\n print()\r\n print(countries)\r\n print()\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n"
},
{
"alpha_fraction": 0.5290423631668091,
"alphanum_fraction": 0.5886970162391663,
"avg_line_length": 26.266666412353516,
"blob_id": "6e411765e510989a1f0036e8a1fa9e9abf39219f",
"content_id": "35431781e50763a20f77367645d7df9e842e78f7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1274,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 45,
"path": "/pythoncompletedlessons/randomtest.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "###########################################\r\n#\r\n# randomtest\r\n#\r\n# testing modules for random\r\n# number generation\r\n#\r\n###########################################\r\n\r\nimport random\r\n\r\n\r\n# the use of random method\r\nprint(\"Using random float\")\r\nnumber = random.random() # a float value >= 0.0 and < 1.0\r\nprint(number)\r\n\r\nnumber = random.random() * 100 # a float value >= 0.0 and < 100\r\nprint(number)\r\nnumber = round(number, 2) # round number to two decimal places\r\nprint(number)\r\n\r\n# the use of randint method\r\nprint(\"Using randint\")\r\nnumber = random.randint(1, 100) # an int from 1 to 100\r\nprint(number)\r\nnumber = random.randint(101, 200) # an int from 101 to 200\r\nprint(number)\r\nnumber = random.randint(0, 7) # an int from 0 to 7\r\nprint(number)\r\n\r\n# the use of randrange method\r\nprint(\"Using randrange\")\r\nnumber = random.randrange(1, 100) # an int from 1 to 99\r\nprint(number)\r\nnumber = random.randrange(100, 200, 2) # an even int from 100 to 198\r\nprint(number)\r\nnumber = random.randrange(11, 250, 2) # an odd int from 11 to 249\r\nprint(number)\r\n\r\n# simulate rolling a pair of dice\r\nprint(\"Roll dice\")\r\ndie1 = random.randint(1, 6)\r\ndie2 = random.randint(1, 6)\r\nprint(\"Your roll: \", die1, die2)\r\n\r\n"
},
{
"alpha_fraction": 0.574636697769165,
"alphanum_fraction": 0.616908848285675,
"avg_line_length": 26.452829360961914,
"blob_id": "c242b4c335a71285fa86c586f144fc7a1d054b12",
"content_id": "6cd5af9cfdee0474197dc7ede1f5ccafc3905355",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1514,
"license_type": "no_license",
"max_line_length": 109,
"num_lines": 53,
"path": "/trading_system_MA_50_100.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Jan 10 14:30:07 2020\r\n\r\n@author: -\r\n\"\"\"\r\n###############################################################\r\n## Trading System simple\r\n\r\nimport numpy as np\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport pandas_datareader as pdr\r\n\r\n\r\nstock_hist = pdr.get_data_yahoo('ORCL', start = '2019-06-01')\r\n# get rid of Adj Close \r\nstock_hist = stock_hist.drop('Adj Close', axis=1)\r\n# round to 2 decimal places\r\nstock_hist = round(stock_hist,2)\r\n\r\n\r\nstock_hist['50-day'] = stock_hist['Close'].rolling(50).mean()\r\nstock_hist['100-day'] = stock_hist['Close'].rolling(100).mean()\r\nstock_hist['Change'] = np.log(stock_hist.Close / stock_hist.Close.shift())\r\n\r\n\r\n# print(stock_hist)\r\n\r\n\r\n# when faster signal moves above slower = signal to buy\r\n\r\n# when slower signal moves above faster = signal to sell short\r\n\r\nwith plt.style.context('ggplot'):\r\n plt.figure(figsize=(8,6))\r\n plt.plot(stock_hist.Close[-120:])\r\n plt.plot(stock_hist['50-day'][-120:])\r\n plt.plot(stock_hist['100-day'][-120:])\r\n plt.legend(loc=2)\r\n \r\n # this does not take in account equality\r\nstock_hist['position'] = np.where(stock_hist['50-day'] > stock_hist['100-day'], 1, 0)\r\nstock_hist['position'] = np.where(stock_hist['50-day'] <= stock_hist['100-day'] , -1, stock_hist['position'])\r\n\r\nprint(stock_hist['position'])\r\n\r\n# capture the return\r\nstock_hist['system'] = stock_hist['position'] * stock_hist['Change']\r\n\r\n\r\n# plot\r\nstock_hist[['Change', 'system']].cumsum().plot()\r\n\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.5386835932731628,
"alphanum_fraction": 0.5635104179382324,
"avg_line_length": 22.742856979370117,
"blob_id": "0aa83dc0c282d7e6ae702f96ba41ec3ce427419c",
"content_id": "ba4096a4593813910de4411c9e8cc83756e210be",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1732,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 70,
"path": "/pythoncompletedlessons/listexamples.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "###########################################\r\n#\r\n# listexamples\r\n#\r\n# working with list\r\n#\r\n# random is used for shuffle() and choice()\r\n###########################################\r\n\r\nimport random\r\n\r\ndef main():\r\n\r\n\r\n\r\n\r\n # how to use the count(), reverse() and sort() methods\r\n numlist = [5, 15, 84, 3, 14, 2, 8, 10, 14, 25]\r\n count = numlist.count(14)\r\n print(\"Count is: \", count)\r\n\r\n numlist.reverse()\r\n print(\"Reversal of list\", numlist)\r\n\r\n numlist.sort()\r\n\r\n print(\"Sorted list\" , numlist)\r\n\r\n print()\r\n\r\n # how to use the sort() function with mixed-case lists\r\n foodlist = [\"orange\", \"apple\", \"Pear\", \"banana\"]\r\n foodlist.sort()\r\n print(\"Sorted foodlist\", foodlist)\r\n\r\n # how to use the key argument to fix the sort order\r\n foodlist.sort(key=str.lower)\r\n print(\"Sorted by key foodlist\", foodlist)\r\n\r\n print()\r\n\r\n # how to use the sorted() function with mixed-case lists\r\n # results of simple sort\r\n foodlist = [\"orange\", \"apple\", \"Pear\", \"banana\"]\r\n sorted_foodlist = sorted(foodlist)\r\n print(sorted_foodlist)\r\n\r\n # how to use the key argument to fix the sort order\r\n sorted_foodlist = sorted(foodlist, key=str.lower)\r\n print(sorted_foodlist)\r\n\r\n # Note that the sorted() function creates a new list, but the sort() does not\r\n\r\n # min() and max()\r\n numlist = [5, 6, 7, 8, 50, 90, 4, 10, 25]\r\n minimum = min(numlist)\r\n print(\"Min number\", minimum)\r\n\r\n maximum = max(numlist)\r\n print(\"Max number\", maximum)\r\n\r\n numlist = [70, 71, 72, 80, 65, 20]\r\n choice = random.choice(numlist)\r\n print(\"Choice: \", choice)\r\n\r\n random.shuffle(numlist)\r\n print(\"Random shuffle: \", numlist)\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n"
},
{
"alpha_fraction": 0.6107142567634583,
"alphanum_fraction": 0.6178571581840515,
"avg_line_length": 21.66666603088379,
"blob_id": "3db58e0174b60b18617cb788de0c63bd3939a420",
"content_id": "6b2ad480fdaf2ab7c7d1a459bd8e1e82014ba3c7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 280,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 12,
"path": "/csv_reader.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "import os\nimport csv\nimport time\n\nos.chdir(r\"/home/dbsand/sand/dat/\")\n\nwith open('listings.csv', newline='') as csvfile:\n reader = csv.DictReader(csvfile)\n for row in reader:\n print(row['TICKER'])\n print('wait event')\n time.sleep(10) # seconds\n "
},
{
"alpha_fraction": 0.44609665870666504,
"alphanum_fraction": 0.4788103997707367,
"avg_line_length": 15.826666831970215,
"blob_id": "73ec079c926f4d640117b2b61e4c5673ae35b186",
"content_id": "614403bb421b5c3fa4cdf0e8f9914b34c0b3e51c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1345,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 75,
"path": "/plottester.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "\r\n\r\n###########################################\r\n#\r\n# plottester\r\n#\r\n# procedural way to load multiple files\r\n#\r\n###########################################\r\n\r\nimport os\r\nimport pandas as pd\r\n\r\n\r\n\r\n \r\ndef generate_plot(x, y, partner_name):\r\n import matplotlib.pyplot as plt \r\n \r\n \r\n \r\n # plotting the points \r\n plt.plot(x, y, color='green', linestyle='dashed', linewidth = 3, \r\n marker='o', markerfacecolor='blue', markersize=12) \r\n \r\n # setting x and y axis range \r\n plt.ylim(1,20) \r\n plt.xlim(1,20) \r\n \r\n # naming the x axis \r\n plt.xlabel('x - axis') \r\n # naming the y axis \r\n plt.ylabel('y - axis') \r\n \r\n # giving a title to my graph \r\n plt.title(partner_name) \r\n \r\n # function to show the plot \r\n plt.show() \r\n\r\n\r\n\r\n\r\ndef main():\r\n\r\n print(\"Starting plottester routine\")\r\n \r\n # x axis values \r\n x = [1,2,3,4,5,6] \r\n # corresponding y axis values \r\n y = [2,4,1,5,2,6] \r\n \r\n partner_name = 'Infocon'\r\n\r\n generate_plot(x, y, partner_name)\r\n \r\n \r\n # x axis values \r\n x = [12,10,10,14,18,7] \r\n # corresponding y axis values \r\n y = [12,14,11,15,20,16] \r\n \r\n partner_name = 'Indiana'\r\n\r\n generate_plot(x, y, partner_name)\r\n\r\n \r\n \r\n \r\n \r\n\r\n print(\"Complete!\")\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n "
},
{
"alpha_fraction": 0.3773006200790405,
"alphanum_fraction": 0.39263802766799927,
"avg_line_length": 14.300000190734863,
"blob_id": "eb41ae542a20b8bff4e005a9d7c568a1144f454a",
"content_id": "41b4a06992a37e053b8f402f83a6fe106ccdd8c9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 326,
"license_type": "no_license",
"max_line_length": 38,
"num_lines": 20,
"path": "/pythoncompletedlessons/localvar.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "######################################\r\n#\r\n# localval\r\n#\r\n# used to demonstrate variable\r\n#\r\n######################################\r\n\r\ndef calc_tax(amount, tax_rate):\r\n tax = amount * tax_rate\r\n return tax\r\n\r\n\r\ndef main():\r\n tax = calc_tax(80.0, .05)\r\n print(\"Tax: \", tax)\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n"
},
{
"alpha_fraction": 0.5426356792449951,
"alphanum_fraction": 0.5484496355056763,
"avg_line_length": 26.77777862548828,
"blob_id": "6bdc303ddfee1010b3a2206282c5e3d0f9284a01",
"content_id": "12bdf77720db4ed56d3ad9ad9753939d6f56a636",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 516,
"license_type": "no_license",
"max_line_length": 60,
"num_lines": 18,
"path": "/pythoncompletedlessons/milespergallonconditionals.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "##########################################\r\n# milespergallonconditionals.py\r\n#\r\n##########################################\r\n\r\n# display welcome message\r\nprint(\"Miles per gallon\")\r\nprint()\r\n\r\n# get input\r\nmiles_driven = float(input(\"Enter miles driven: \"))\r\ngallons_used = float(input(\"Enter gallons of gas used: \"))\r\n\r\nif miles_driven > 0 and gallons_used > 0:\r\n mpg = round((miles_driven / gallons_used), 2)\r\n print(\"Miles Per Gallon: \", mpg)\r\nelse:\r\n print(\"Both entries must be greater than zero\")"
},
{
"alpha_fraction": 0.49436089396476746,
"alphanum_fraction": 0.5093985199928284,
"avg_line_length": 18.384614944458008,
"blob_id": "bf9ab59ad80d45da6a479c8f8b12f7c9b44820c9",
"content_id": "78ee1513df24bdd5c13dc56106e149fefd32b963",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 532,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 26,
"path": "/pythoncompletedlessons/temperature.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "######################################\r\n#\r\n# temperature\r\n#\r\n# testing modules\r\n#\r\n######################################\r\n\"\"\"\r\nThis module contains functions for converting\r\ntemperatures between degrees F and degrees C\r\n\"\"\"\r\ndef to_celsius(fahrenheit):\r\n \"\"\"\r\n :param fahrenheit:\r\n :return: celsius\r\n \"\"\"\r\n celsius = (fahrenheit - 32) * 5/9\r\n return celsius\r\n\r\ndef to_farenheit(celcius):\r\n \"\"\"\r\n :param celcius:\r\n :return: fahrenheit\r\n \"\"\"\r\n farenheit = celcius * 9/5 + 32\r\n return farenheit\r\n\r\n"
},
{
"alpha_fraction": 0.48327869176864624,
"alphanum_fraction": 0.48655736446380615,
"avg_line_length": 18.527027130126953,
"blob_id": "4592a70fed2da3ef8d94a1525f39883e02b62dbd",
"content_id": "089e0bb9238605031c33a636abc2a279ae1bfd5e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1525,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 74,
"path": "/stockfileloader.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "\r\n###########################################\r\n#\r\n# stockfileloader\r\n#\r\n# procedural way to load multiple files\r\n#\r\n###########################################\r\n\r\nimport os\r\nimport pandas as pd\r\n\r\n\r\n\r\n \r\n\r\n\r\nFILEDIR = 'C:\\PANDAS2019\\pandas_tutorial_jupyter_output\\data'\r\n\r\n\r\ndef check_files():\r\n print(\"Checking for files\")\r\n for file in os.listdir(FILEDIR):\r\n if file.endswith(\".csv\"):\r\n # print(file)\r\n # stock_name=file.replace(\".csv\",\"\")\r\n # print(stock_name)\r\n # stock_name = pd.read_csv(file, index_col=0, parse_dates=True)\r\n # stock_name\r\n load_files(file)\r\n \r\n\r\ndef load_files(file):\r\n print(\"file -> \", file)\r\n \r\n os.chdir(FILEDIR)\r\n currStock = pd.read_csv(file)\r\n \r\n stockname=file.replace(\".csv\",\"\")\r\n print(\"Loaded -> \", stockname)\r\n \r\n \r\n # print(currStock) # DEBUG show contents\r\n plot_files(currStock, stockname)\r\n\r\n \r\n\r\n\r\ndef plot_files(currStock, stockname):\r\n print(\"plotting -> \", stockname)\r\n print()\r\n print(currStock.shape)\r\n #currStock.legend(label=stockname)\r\n currStock[\"Close\"].plot(grid = True, legend=True) # Plot the adjusted closing price of current stock\r\n\r\n return\r\n \r\n\r\ndef main():\r\n\r\n print(\"Starting stockfileloader routine\")\r\n\r\n check_files()\r\n\r\n \r\n \r\n \r\n \r\n\r\n print(\"Complete!\")\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n "
},
{
"alpha_fraction": 0.5342350006103516,
"alphanum_fraction": 0.5612848401069641,
"avg_line_length": 19.472726821899414,
"blob_id": "3a09bdc96640958fc9bd7ddfb9919f5c2bc81a62",
"content_id": "1f1e1ba06325111999ebbbb1f006c01f8c2f89cb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1183,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 55,
"path": "/logging_test/data_reader_test.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "import os\r\nimport datetime\r\nimport pandas as pd\r\n\r\n\r\nx = datetime.datetime.now()\r\ndt_now = x.strftime(\"%y%m%d%H%M%S%f\")\r\next = '.txt'\r\nfile_nm = 'data_reader_test'\r\n\r\n\r\n\r\ndef init_log():\r\n\r\n os.chdir('C:\\PANDAS2019\\logs')\r\n log_msg = 'Application '+file_nm+' run at '+dt_now\r\n file_object = open(dt_now+file_nm+ext, 'w')\r\n file_object.write(log_msg)\r\n file_object.close()\r\n\r\ndef log_append(log_msg_append):\r\n\r\n os.chdir('C:\\PANDAS2019\\logs')\r\n file_object = open(dt_now + file_nm + ext, 'a')\r\n file_object.write('\\n')\r\n file_object.write(log_msg_append)\r\n file_object.close()\r\n\r\n\r\n# --------------------------------------------------------\r\n# -- main\r\ninit_log()\r\n\r\n\r\n# --------------------------------------------------------\r\n# -- read data and display results\r\nos.chdir('C:\\PANDAS\\practicaltimeseries\\data')\r\nprint('X')\r\nsp500_df = pd.read_csv('GSPC.csv')\r\n\r\n# change the row indices of the dataframe using the Date column\r\nsp500_df.index = sp500_df['Date']\r\n\r\nprint(sp500_df.head(10))\r\n\r\n\r\n\r\n# -- log results\r\nlog_append('sp500 top 10 Close:')\r\nlog_append(pd.Series.to_string(sp500_df['Close'].head(10)))\r\n\r\n\r\n\r\n\r\nlog_append('completed process')\r\n\r\n"
},
{
"alpha_fraction": 0.4858208894729614,
"alphanum_fraction": 0.4873134195804596,
"avg_line_length": 21.508771896362305,
"blob_id": "b716f6bacaa7965e87952e16482dcf6e32225216",
"content_id": "e774ef24a7951bb0ee051dc1d4753b2bd220b235",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1340,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 57,
"path": "/pythoncompletedlessons/wordcounter.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "###########################################\r\n#\r\n# wordcounter\r\n#\r\n#\r\n#\r\n###########################################\r\n\r\ndef get_words_from_file(filename):\r\n with open(filename) as file:\r\n text = file.read() # read str from file\r\n\r\n text = text.replace(\"\\n\", \"\")\r\n text = text.replace(\",\", \"\")\r\n text = text.replace(\".\", \"\")\r\n text = text.lower()\r\n\r\n words = text.split(\" \") # convert str to list\r\n print(words)\r\n return words\r\n\r\n\r\ndef count_words(words):\r\n # define a dict to store the word count\r\n word_count = {}\r\n for word in words:\r\n if word in word_count:\r\n word_count[word] += 1\r\n else:\r\n word_count[word] = 1\r\n return word_count\r\n\r\n\r\ndef display_word_count(word_count):\r\n words = list(word_count.keys())\r\n words.sort(key=str.lower)\r\n for word in words:\r\n count = word_count[word]\r\n print(word, \"=\", count)\r\n\r\n\r\ndef main():\r\n print(\"The word counter program.\")\r\n print()\r\n\r\n filename = \"wordsfile.txt\"\r\n\r\n # get words, count and display\r\n words = get_words_from_file(filename) # get list of words\r\n word_count = count_words(words) # create dict from list\r\n display_word_count(word_count)\r\n\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n"
},
{
"alpha_fraction": 0.528205156326294,
"alphanum_fraction": 0.5897436141967773,
"avg_line_length": 15.181818008422852,
"blob_id": "211491892bbd4a8a2a525b1e06a94a6d4bbedce4",
"content_id": "3d76cc6886c4564eee003126cdcbce4b7c613003",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 195,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 11,
"path": "/pdr_get_quote.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Jan 3 13:51:10 2020\r\n\r\n@author: \r\n\"\"\"\r\n\r\nimport pandas as pd\r\nimport pandas_datareader as pdr\r\n\r\nprint(pdr.get_quote_yahoo(\"AAPL\")['price'])\r\n\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.6598639488220215,
"alphanum_fraction": 0.6734693646430969,
"avg_line_length": 22.66666603088379,
"blob_id": "b4db412e551bacf895de4d22e9e9a89f2661efd0",
"content_id": "5691bee9dbe631e01ce8aaca9232a42ee38f1270",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 294,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 12,
"path": "/logging_test/logger.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "import os\r\nimport datetime\r\n\r\nx = datetime.datetime.now()\r\ndt_now = x.strftime(\"%y%m%d%H%M%S%f\")\r\nfile_nm = 'logger.txt'\r\n\r\nos.chdir('C:\\PANDAS2019\\logs')\r\nlog_msg = 'this is the log from the application'\r\nfile_object = open(dt_now+file_nm, 'w')\r\nfile_object.write(log_msg)\r\nfile_object.close()"
},
{
"alpha_fraction": 0.8181818127632141,
"alphanum_fraction": 0.8181818127632141,
"avg_line_length": 15.5,
"blob_id": "35bf3b88b94414488850f5c9535921694f17b8da",
"content_id": "49b659aeb43214a3870e169aff08b3aaeff33c37",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 66,
"license_type": "no_license",
"max_line_length": 40,
"num_lines": 4,
"path": "/README.md",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "# pysand\npython sandbox\n\ncontains python and pandas library files\n"
},
{
"alpha_fraction": 0.5348615050315857,
"alphanum_fraction": 0.5434575080871582,
"avg_line_length": 18.096153259277344,
"blob_id": "c6c8d4b43178ad937ce43eb7269286d19fd0f3a4",
"content_id": "4707d11613b0c7638a2ac5b21d13b5daeccb9d81",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1047,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 52,
"path": "/pythoncompletedlessons/datetimeexamples.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "\r\n###########################################\r\n#\r\n# datetimeexamples\r\n#\r\n# date time functions\r\n#\r\n###########################################\r\n\r\nfrom datetime import date, time, datetime, timedelta\r\n\r\n\r\n\r\ndef main():\r\n\r\n invoice_date = date.today()\r\n\r\n print(\"invoice date: \", invoice_date)\r\n\r\n invoice_datetime = datetime.now()\r\n\r\n print()\r\n\r\n print(\"invoice date time \", invoice_datetime)\r\n\r\n print()\r\n\r\n\r\n # timespans (timedelta)\r\n three_weeks = timedelta(weeks=3)\r\n three_weeks_from_today = date.today() + three_weeks\r\n print(\"three weeks from now \", three_weeks_from_today)\r\n\r\n print()\r\n three_weeks_ago = date.today() - three_weeks\r\n print(\"three weeks ago \", three_weeks_ago)\r\n print()\r\n\r\n # how long until halloween\r\n halloween = datetime(2019, 10, 31)\r\n time_span = halloween - datetime.now()\r\n\r\n days = time_span.days\r\n seconds = time_span.seconds\r\n\r\n print(\"Halloween is in\", days, \"days or\", seconds, \"seconds\")\r\n print()\r\n\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n"
},
{
"alpha_fraction": 0.3499999940395355,
"alphanum_fraction": 0.35499998927116394,
"avg_line_length": 13.760000228881836,
"blob_id": "f7000c454ced1bc3e5f4453c6fc4de9b4cf89170",
"content_id": "aa4c3b7bdf97facdade3beae7bc67714cee355a2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 400,
"license_type": "no_license",
"max_line_length": 52,
"num_lines": 25,
"path": "/pythoncompletedlessons/csvreadtest.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "\r\n\r\n###########################################\r\n#\r\n# csvreadstest\r\n#\r\n#\r\n#\r\n###########################################\r\n\r\nimport csv\r\n\r\nFILENAME = \"testthiscsv.txt\"\r\n\r\ndef main():\r\n\r\n\r\n\r\n with open(FILENAME, newline=\"\") as file:\r\n reader = csv.reader(file)\r\n for row in reader:\r\n print(row[0] + \" (\" + str(row[1]) + \")\")\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n\r\n"
},
{
"alpha_fraction": 0.49666109681129456,
"alphanum_fraction": 0.5008347034454346,
"avg_line_length": 19.428571701049805,
"blob_id": "6d2ab0da3020bb728a1ea071393e139e590d6e03",
"content_id": "8705526043bf3f259c199f54b96fb0750c32dfab",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1198,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 56,
"path": "/logging_test/logger_module2.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "import os\r\nimport datetime\r\nimport pandas as pd\r\n\r\nx = datetime.datetime.now()\r\ndt_now = x.strftime(\"%y%m%d%H%M%S%f\")\r\next = '.txt'\r\nfile_nm = 'logger_module2'\r\n\r\nos.chdir('C:\\PANDAS2019\\logs')\r\n\r\ndef init_log():\r\n\r\n log_msg = 'Application '+file_nm+' run at '+dt_now\r\n file_object = open(dt_now+file_nm+ext, 'w')\r\n file_object.write(log_msg)\r\n file_object.close()\r\n\r\ndef log_append(log_msg_append):\r\n\r\n file_object = open(dt_now + file_nm + ext, 'a')\r\n file_object.write('\\n')\r\n file_object.write(log_msg_append)\r\n file_object.close()\r\n\r\n# ------------------------------------------------------------\r\n# -- Check for files\r\n# -- checks downloads dir for new downloaded file(s)\r\ndef check_for_files():\r\n\r\n for file in os.listdir('C:\\PANDAS\\practicaltimeseries\\data'):\r\n if file.endswith(\".csv\"):\r\n print(file)\r\n\r\n\r\n\r\n\r\n\r\n# ------------------------------------------------------------\r\n# -- main\r\ninit_log()\r\nlog_append('main')\r\nlog_append('Checking for file(s)')\r\ncheck_for_files()\r\n\r\n\r\n\r\n\r\n# ------------------------------------------------------------\r\n# -- Load file(s) for analysis\r\n\r\n\r\n\r\nlog_append('success!')\r\n\r\nprint('Exit '+file_nm)"
},
{
"alpha_fraction": 0.6930618286132812,
"alphanum_fraction": 0.697586715221405,
"avg_line_length": 23.5,
"blob_id": "cb4e66fed81d556d9a50cab5bd7f5fdd4f3923be",
"content_id": "b199a6b749e26db8ee5d9a262b9c3cb6dd2bbdc3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1326,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 52,
"path": "/alpha_vantage_ticker_pull_export.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "from alpha_vantage.timeseries import TimeSeries\r\nfrom alpha_vantage.techindicators import TechIndicators\r\n#from matplotlib.pyplot import figure\r\n#import matplotlib.pyplot as plt\r\nimport pandas as pd\r\nimport os \r\n\r\nos.chdir(r\"C:\\tmp\")\r\n\r\n# Your key here\r\nkey = 'key'\r\n\r\n\r\n# ticker we are wanting to capture data on\r\nticker_symbol = 'BRK.B'\r\n\r\n\r\n# Chose your output format, or default to JSON (python dict)\r\nts = TimeSeries(key, output_format='pandas')\r\nti = TechIndicators(key)\r\n\r\n# Get the data, returns a tuple\r\n# aapl_data is a pandas dataframe, aapl_meta_data is a dict\r\ntick_data, tick_meta_data = ts.get_daily(symbol=ticker_symbol)\r\n\r\n\r\n\r\n\r\n# Visualization\r\n#figure(num=None, figsize=(15, 6), dpi=80, facecolor='w', edgecolor='k')\r\n#tick_data['4. close'].plot()\r\n#plt.tight_layout()\r\n#plt.grid()\r\n#plt.show()\r\n\r\n\r\n# convert dict to dataframe\r\nstock_df = pd.DataFrame.from_dict(tick_data)\r\n\r\n# add symbol column and poplulate\r\nstock_df['symbol'] = ticker_symbol\r\n\r\n# see contents with new column and ticker symbol\r\n#stock_df\r\n\r\n# export to csv\r\nexport_csv = stock_df.to_csv (r'file_out.csv', index = True, header=True) \r\n\r\n# replace '.' in name so file does not have two '.'\r\nticker_symbol = ticker_symbol.replace(\".\",\"\")\r\n# change generic file name to that of the ticker\r\nos.rename('file_out.csv', ticker_symbol+'.csv')\r\n"
},
{
"alpha_fraction": 0.6134072542190552,
"alphanum_fraction": 0.6355846524238586,
"avg_line_length": 23.350648880004883,
"blob_id": "6b20ff6735cced33f750d7363c494f5e9509d1a8",
"content_id": "45a256164076f09b632c7bfc472aac2348c3e52b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1984,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 77,
"path": "/moving_average_forecasting.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Jan 10 12:56:54 2020\r\n\r\n@author: -\r\n\"\"\"\r\n\r\n#######################################################################\r\n\r\n# Moving Average Forecasting\r\n\r\n\r\n\r\nimport numpy as np\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport pandas_datareader as pdr\r\n\r\nstock_hist = pdr.get_data_yahoo('ORCL', start = '2015-01-01')\r\n# get rid of Adj Close \r\nstock_hist = stock_hist.drop('Adj Close', axis=1)\r\n# round to 2 decimal places\r\nstock_hist = round(stock_hist,2)\r\n\r\n# create 5 day average\r\n# stock_hist['5-day'] = stock_hist['Close'].rolling(5).mean()\r\n\r\n# print(stock_hist)\r\n\r\n# this assumes you aready know the closing price which we do not\r\n# so we have to use shift()\r\n# roll forecast foward one period.\r\nstock_hist['5-day'] = stock_hist['Close'].rolling(5).mean().shift()\r\n\r\n# print(stock_hist)\r\n\r\n\r\n# plot 5-day alongside close.\r\nplt.plot(stock_hist['Close'][-120:])\r\nplt.plot(stock_hist['5-day'][-120:])\r\n\r\n# so we are off a little since the end of the graph does not line up\r\n\r\n# put in error measure\r\n# Mean Absolute Deviation\r\nstock_hist['MAD'] = np.abs(stock_hist['Close'] - stock_hist['5-day'])\r\n\r\nprint('Mean Absolute Deviation')\r\nprint(stock_hist['MAD'].mean())\r\n# this shows how much you are off in dollars positive or negative\r\n\r\n\r\n# account for the scaling of the data we are using\r\n# Mean Absolute Percent Error\r\nstock_hist['MAPE'] = stock_hist['MAD'] / stock_hist['Close']\r\nprint('Mean Absolute Percent Error')\r\nprint(stock_hist['MAPE'].mean())\r\n\r\n\r\n\r\n# Mean Square Error\r\n# this resembles the variance and therefore the std dev\r\nstock_hist['MSE'] = stock_hist['MAD'] ** 2\r\nprint('Mean Square Error')\r\nprint(stock_hist['MSE'].mean())\r\n# This is the square as the name implies\r\n\r\nMSE = stock_hist['MSE'].mean()\r\n\r\n\r\nRMSE = np.sqrt(MSE)\r\nprint('Relative Mean Square Error')\r\nprint(RMSE)\r\n\r\n\r\n# go through with 10 day and see if you get better error measures.\r\n# rolling(5) -> rolling(10)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.3258427083492279,
"alphanum_fraction": 0.38327091932296753,
"avg_line_length": 17.0238094329834,
"blob_id": "ed328204aace39ee2e923f9b6c9c27419ea82c9e",
"content_id": "ea054e76a9fb86e200822a824d2c3ee6ed28635f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 801,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 42,
"path": "/pythoncompletedlessons/tupleexamples.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "###########################################\r\n#\r\n# tupleexamples\r\n#\r\n# working with tuples\r\n#\r\n###########################################\r\n\r\n\r\n\r\ndef get_location():\r\n # compute values of x, y, z\r\n x = 10\r\n y = 20\r\n z = x + y\r\n return x, y, z\r\n\r\n\r\ndef main():\r\n\r\n stats = (48.0, 30.5, 20.2, 100.0, 48.0)\r\n\r\n print(stats)\r\n\r\n # access items in the tuple\r\n print(stats[-1]) # last item\r\n print(stats[1:2]) # (30.5,)\r\n print(stats[1:3]) # (30.5, 20.2)\r\n print(stats[1:4]) # (30.5, 20.2, 100.0)\r\n\r\n\r\n print()\r\n\r\n # call get_location and unpack the returned tuple\r\n x, y, z = get_location()\r\n print(\"x: \", x)\r\n print(\"y: \", y)\r\n print(\"z: \", z)\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n\r\n"
},
{
"alpha_fraction": 0.6198723912239075,
"alphanum_fraction": 0.6253418326377869,
"avg_line_length": 24.16666603088379,
"blob_id": "ac211ba0c29130de2915f9c4aca1eed97ce3dd58",
"content_id": "37644e5e2e613347d2f64824e63fdd8cac4217e1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1097,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 42,
"path": "/logging_test/pandas_life_expectancy.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "import os\r\nimport datetime\r\nimport pandas as pd\r\n\r\nx = datetime.datetime.now()\r\ndt_now = x.strftime(\"%y%m%d%H%M%S%f\")\r\next = '.txt'\r\nfile_nm = 'pandas_life_expectancy'\r\n\r\n\r\n\r\nos.chdir('C:\\PANDAS2019\\logs')\r\nlog_msg = 'Application '+file_nm+' run at '+dt_now\r\nfile_object = open(dt_now+file_nm+ext, 'w')\r\nfile_object.write(log_msg)\r\nfile_object.close()\r\n\r\ndef log_append(log_msg_append):\r\n file_object = open(dt_now + file_nm + ext, 'a')\r\n file_object.write('\\n')\r\n file_object.write(log_msg_append)\r\n file_object.close()\r\n\r\n# ------------------------------------------------------------\r\n# -- Load file for analysis\r\nlog_append('Load file for analysis')\r\n\r\ndf = pd.read_csv('C:\\PANDAS\\CH10\\data\\gapminder.tsv', sep='\\t')\r\n\r\nprint(df.head())\r\n\r\n# calculate the average life expectancy for each year\r\navg_life_exp_by_year = df.groupby('year').lifeExp.mean()\r\nprint(avg_life_exp_by_year)\r\nlog_append('Results of average life expectancy by year:')\r\nlog_append(pd.Series.to_string(avg_life_exp_by_year))\r\n# -- END Load file for analysis\r\n\r\n\r\nlog_append('success!')\r\n\r\nprint('Exit '+file_nm)"
},
{
"alpha_fraction": 0.6473379135131836,
"alphanum_fraction": 0.667369544506073,
"avg_line_length": 25.691177368164062,
"blob_id": "a502579f7d5fa94005daf44c4e52d5855afbc7e9",
"content_id": "7fff279f218dab0cf0d053a6ab9291cb800807ab",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1897,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 68,
"path": "/stock_puller.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Jan 10 11:06:12 2020\r\n\r\n@author: -\r\n\"\"\"\r\n\r\nimport numpy as np\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport pandas_datareader as pdr\r\n\r\nstock_hist = pdr.get_data_yahoo('ORCL', start = '2015-01-01')\r\n# get rid of Adj Close \r\nstock_hist = stock_hist.drop('Adj Close', axis=1)\r\n# round to 2 decimal places\r\nstock_hist = round(stock_hist,2)\r\n\r\n# print(stock_hist)\r\n\r\n# with plt.style.context('ggplot'):\r\n# plt.plot(stock_hist.Close)\r\n \r\n \r\n# resample or downsample because data is too noisy\r\nstock_hist.index = pd.DatetimeIndex(stock_hist.index)\r\nmonthly = stock_hist.resample('BM').last() \r\n# last business day of month also .first() BM business\r\n# M is just month sat or sun included.\r\n\r\nprint(monthly)\r\n\r\n\r\nplt.plot(monthly['Close'])\r\n\r\n# calculate the change from day to day.\r\n# shift(5) = shift from last 5 days.\r\nstock_hist['Change'] = stock_hist['Close'] - stock_hist['Close'].shift()\r\n\r\n# print(stock_hist)\r\n \r\n# not really useful because it is not scaled to the price.\r\n\r\n# need natural log of the change\r\nstock_hist['LN_Change'] = np.log(stock_hist['Close'] / stock_hist['Close'].shift())\r\n\r\n# print(stock_hist)\r\n\r\n# this is now scaled to the price and we get the instantaneous rate of return\r\n\r\n# see how it behaves\r\nwith plt.style.context('ggplot'):\r\n plt.figure(figsize=(10,8))\r\n plt.hist(stock_hist.LN_Change[1:],bins=50, edgecolor='black', density='True')\r\n \r\n \r\n# create volatility measure by std dev as compared to last month\r\n# Daily Vol volatility\r\nstock_hist['Daily Vol'] = stock_hist['LN_Change'].rolling(21).std().shift()\r\n# calculate expected change\r\nstock_hist['Exp Change'] = stock_hist['Close'] * stock_hist['Daily Vol']\r\n\r\n# print(stock_hist)\r\n\r\n# start at 22nd row since NaN is present and not able to be calculated\r\nstock_hist = stock_hist[22:]\r\n\r\nprint(stock_hist)\r\n\r\n\r\n\r\n\r\n\r\n "
},
{
"alpha_fraction": 0.583106279373169,
"alphanum_fraction": 0.6130790114402771,
"avg_line_length": 24.35714340209961,
"blob_id": "2ca006952e45e313db61cd2c409f3c9743029850",
"content_id": "8cb1a35fe4fd0d54147d6d26dcc1cd8eee80f117",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 367,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 14,
"path": "/pythoncompletedlessons/testscores.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "counter = 0\r\nscore_total = 0\r\ntest_score = 0\r\n\r\nwhile test_score != 999:\r\n test_score = int(input(\"Enter test score: \"))\r\n if test_score >= 0 and test_score <= 100:\r\n score_total += test_score\r\n counter += 1\r\n\r\naverage_score = round(score_total / counter)\r\n\r\nprint(\"Total Score: \" + str(score_total))\r\nprint(\"Average Score: \" + str(average_score))"
},
{
"alpha_fraction": 0.48275861144065857,
"alphanum_fraction": 0.5082101821899414,
"avg_line_length": 16.707693099975586,
"blob_id": "a2d743c3be41111c99a2176b7a16526714655e0b",
"content_id": "e7710d4cbcc14846d87d2e847a9f5f2394779ab5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1218,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 65,
"path": "/pythoncompletedlessons/listexamples2.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "###########################################\r\n#\r\n# listexamples2\r\n#\r\n# working with list\r\n#\r\n# copy, slice and concatenate lists\r\n#\r\n# copy module imported for deepcopy()\r\n###########################################\r\n\r\nimport copy\r\n\r\n\r\ndef main():\r\n\r\n # make a shallow copy of a list\r\n # anything you do to one is reflected in both\r\n list_one = [1, 2, 3, 4, 5]\r\n list_two = list_one\r\n list_two[4] = 7\r\n print(list_one)\r\n print(list_two)\r\n\r\n print()\r\n\r\n # make a deep copy of a list\r\n # anything you do to one does NOT effect the other\r\n list_one = [1, 2, 3, 4, 5]\r\n list_two = copy.deepcopy(list_one)\r\n list_two[1] = 4\r\n print(list_one)\r\n print(list_two)\r\n\r\n print()\r\n\r\n # slicing lists\r\n # slice start and end arguments\r\n numbers = [52, 54, 56, 58, 60, 62]\r\n print(numbers)\r\n print(numbers[0:2])\r\n print(numbers[:2])\r\n print(numbers[4:])\r\n\r\n print()\r\n\r\n\r\n # concatenate\r\n inventory = [\"staff\", \"robe\"]\r\n chest = [\"scroll\", \"pestle\"]\r\n combined = inventory + chest\r\n print(inventory)\r\n inventory += chest\r\n print(inventory)\r\n print()\r\n print(combined)\r\n print()\r\n\r\n\r\n\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n\r\n"
},
{
"alpha_fraction": 0.500275194644928,
"alphanum_fraction": 0.5101816058158875,
"avg_line_length": 19.77108383178711,
"blob_id": "9fd152ac7e0c0c1953dd73d90701d748d89d0d08",
"content_id": "fb23ef0f3f2b2dfd8df5862dd45733f08ac4a17b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1817,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 83,
"path": "/file_read_plot_out.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "\r\n\r\n\r\nimport os\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\n\r\n\r\n\r\n \r\n\r\n\r\nFILEDIR = 'C:\\PANDAS2019\\pandas_tutorial_jupyter_output\\data'\r\n\r\n\r\ndef check_files():\r\n print(\"Checking for files\")\r\n for file in os.listdir(FILEDIR):\r\n if file.endswith(\".csv\"):\r\n # print(file)\r\n # stock_name=file.replace(\".csv\",\"\")\r\n # print(stock_name)\r\n # stock_name = pd.read_csv(file, index_col=0, parse_dates=True)\r\n # stock_name\r\n load_files(file)\r\n \r\n\r\ndef load_files(file):\r\n print(\"file -> \", file)\r\n \r\n os.chdir(FILEDIR)\r\n currStock = pd.read_csv(file)\r\n \r\n stockname=file.replace(\".csv\",\"\")\r\n print(\"Loaded -> \", stockname)\r\n \r\n \r\n # print(currStock) # DEBUG show contents\r\n plot_files(currStock, stockname)\r\n\r\n \r\n\r\n\r\ndef plot_files(currStock, stockname):\r\n print(\"plotting -> \", stockname)\r\n print()\r\n print(currStock.shape)\r\n #currStock.legend(label=stockname)\r\n currStock[\"Close\"].plot(grid = True, legend=True) # Plot the adjusted closing price of current stock\r\n \r\n ######\r\n currStock.index = currStock['Date']\r\n\r\n print(currStock.head(10))\r\n\r\n\r\n os.chdir('C:\\PANDAS2019\\plots_out')\r\n plt.figure(figsize=(5.5, 5.5))\r\n currStock['Close'].plot(color='b')\r\n plt.title(stockname)\r\n plt.xlabel('Time')\r\n plt.ylabel('Closing Value')\r\n plt.savefig(stockname+'.png', format='png', dpi=300)\r\n\r\n ######\r\n\r\n return\r\n \r\n\r\ndef main():\r\n\r\n print(\"Starting stockfileloader routine\")\r\n\r\n check_files()\r\n\r\n \r\n \r\n \r\n \r\n\r\n print(\"Complete!\")\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n "
},
{
"alpha_fraction": 0.5197505354881287,
"alphanum_fraction": 0.5467775464057922,
"avg_line_length": 15.592592239379883,
"blob_id": "f5145e8ab8368514251b6a86806e34d6679f9cb8",
"content_id": "7a5ee8915f75fa922d860e54f1780b90683ad662",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 481,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 27,
"path": "/pdr_get_quote_every_x_seconds.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Jan 3 13:51:10 2020\r\n\r\n@author: \r\n\"\"\"\r\n\r\nimport pandas as pd\r\nimport pandas_datareader as pdr\r\nfrom time import sleep\r\n\r\nsymbols = \"AAPL ORCL MJ\".split()\r\n\r\ndef get_prices(symbols):\r\n symbols.sort()\r\n return pdr.get_quote_yahoo(symbols)['price']\r\n\r\n\r\ndef main():\r\n while True:\r\n print(get_prices(symbols))\r\n print(\"CTRL + C to quit\")\r\n sleep(5)\r\n \r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n \r\n"
},
{
"alpha_fraction": 0.3920595645904541,
"alphanum_fraction": 0.3920595645904541,
"avg_line_length": 14.791666984558105,
"blob_id": "54744cc25de636cf251a177fb982ee684d57483c",
"content_id": "755769a0a51fa8b6049acff27aee6d279ff333a7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 403,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 24,
"path": "/pythoncompletedlessons/fileIO.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "###########################################\r\n#\r\n# fileIO\r\n#\r\n# use files\r\n#\r\n###########################################\r\n\r\n\r\ndef main():\r\n outfile = open(\"test.txt\", \"w\")\r\n outfile.write(\"NEW X Test\")\r\n outfile.close()\r\n\r\n print(\"write complete\")\r\n\r\n infile = open(\"test.txt\", \"r\")\r\n print(infile.readline())\r\n\r\n print(\"read complete\")\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n"
},
{
"alpha_fraction": 0.5198019742965698,
"alphanum_fraction": 0.5198019742965698,
"avg_line_length": 16.545454025268555,
"blob_id": "f362793900828549ac3c367d7f4cb436ce185991",
"content_id": "3096d257fa215f182b2d41bbd8235c06a75f1439",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 202,
"license_type": "no_license",
"max_line_length": 37,
"num_lines": 11,
"path": "/logging_test/scratchpad.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "import datetime\r\n\r\nx = datetime.datetime.now()\r\nprint(x.strftime(\"%y%m%d%H%M%S%f\"))\r\n# %f is microsecond\r\n\r\ndt_now = x.strftime(\"%y%m%d%H%M%S%f\")\r\n\r\nprint(\"---------------------------\")\r\n\r\nprint(dt_now)"
},
{
"alpha_fraction": 0.6326530575752258,
"alphanum_fraction": 0.6394557952880859,
"avg_line_length": 21.600000381469727,
"blob_id": "37756ad7f57457a3e545a515820bb4b358ad2d25",
"content_id": "a3e741abec4e10aad2c870d1e21748102d26fc86",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 588,
"license_type": "no_license",
"max_line_length": 51,
"num_lines": 25,
"path": "/logging_test/logger_module.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "import os\r\nimport datetime\r\n\r\nx = datetime.datetime.now()\r\ndt_now = x.strftime(\"%y%m%d%H%M%S%f\")\r\next = '.txt'\r\nfile_nm = 'logger_module'\r\n\r\nos.chdir('C:\\PANDAS2019\\logs')\r\nlog_msg = 'Application '+file_nm+' run at '+dt_now\r\nfile_object = open(dt_now+file_nm+ext, 'w')\r\nfile_object.write(log_msg)\r\nfile_object.close()\r\n\r\ndef log_append(log_msg_append):\r\n file_object = open(dt_now + file_nm + ext, 'a')\r\n file_object.write('\\n')\r\n file_object.write(log_msg_append)\r\n file_object.close()\r\n\r\nlog_append('new appended message')\r\n\r\nlog_append('success!')\r\n\r\nprint('Exit '+file_nm)"
},
{
"alpha_fraction": 0.4886164665222168,
"alphanum_fraction": 0.4956217110157013,
"avg_line_length": 12.692307472229004,
"blob_id": "16324c87a2b99943a72245d42fbafb9ac8729a72",
"content_id": "6dc1cd23882e616f6bfcffb2554478cb393b1fde",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 571,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 39,
"path": "/pythoncompletedlessons/stockfileloader.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "###########################################\r\n#\r\n# stockfileloader\r\n#\r\n# procedural way to load multiple files\r\n#\r\n###########################################\r\n\r\nimport os\r\nimport pandas as pd\r\nos.chdir('C:\\PANDAS2019\\pandas_tutorial_jupyter_output\\data')\r\nos.getcwd()\r\n\r\ndef check_files():\r\n print(\"Checking for files\")\r\n\r\ndef load_files(name):\r\n print(\"name -> \", name)\r\n\r\ndef main():\r\n\r\n print(\"Starting stockfileloader routine\")\r\n\r\n check_files()\r\n\r\n load_files('AAPL')\r\n\r\n print(\"Complete!\")\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()"
},
{
"alpha_fraction": 0.5845959782600403,
"alphanum_fraction": 0.6174242496490479,
"avg_line_length": 20.600000381469727,
"blob_id": "eaf34b6250d4674d568501e7bd29dc999de887f3",
"content_id": "4e05f9b241167b2c1df6a0a508ec8be2eb28fa1a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 792,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 35,
"path": "/df_to_list.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "\r\nimport matplotlib.pyplot as plt \r\nfrom pandas import DataFrame\r\n\r\nProducts = {'Product': ['Tablet','iPhone','Laptop','Monitor'],\r\n 'Price': [250,800,1200,300]\r\n }\r\n\r\ndf = DataFrame(Products, columns= ['Product', 'Price'])\r\n\r\nProducts_list = df.values.tolist()\r\nprint (Products_list)\r\n\r\nprint(df.Price)\r\n\r\nx = df.Product\r\ny = df.Price\r\n\r\n# plotting the points \r\nplt.plot(x, y, color='green', linestyle='dashed', linewidth = 3, \r\n marker='o', markerfacecolor='blue', markersize=12) \r\n \r\n# setting x and y axis range \r\nplt.ylim(1,2000) \r\nplt.xlim(1,2000) \r\n \r\n# naming the x axis \r\nplt.xlabel('x - axis') \r\n# naming the y axis \r\nplt.ylabel('y - axis') \r\n \r\n# giving a title to my graph \r\nplt.title(partner_name) \r\n \r\n# function to show the plot \r\nplt.show() "
},
{
"alpha_fraction": 0.33856502175331116,
"alphanum_fraction": 0.36547085642814636,
"avg_line_length": 15.600000381469727,
"blob_id": "79d9f0e2a171ece6f3c5ed9fda0c64f335534161",
"content_id": "555191690a9dd4112f8ced61bcc3f1aad72d16da",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 446,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 25,
"path": "/pythoncompletedlessons/csvwritetest.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "\r\n\r\n###########################################\r\n#\r\n# csvwritestest\r\n#\r\n#\r\n#\r\n###########################################\r\n\r\nimport csv\r\n\r\nFILENAME = \"testthiscsv.txt\"\r\n\r\ndef main():\r\n\r\n movies = [[\"New Movie\", 1990],\r\n [\"X\", 2001],\r\n [\"Y\", 2019]]\r\n\r\n with open(FILENAME, \"w\", newline=\"\") as file:\r\n writer = csv.writer(file)\r\n writer.writerows(movies)\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n\r\n"
},
{
"alpha_fraction": 0.47625765204429626,
"alphanum_fraction": 0.4837799668312073,
"avg_line_length": 20.542552947998047,
"blob_id": "dbc06c1a18ea7e224517458db1ff56a6ba4e90d7",
"content_id": "33942a87841dbccfe438c0db27881ab7628700f4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2127,
"license_type": "no_license",
"max_line_length": 108,
"num_lines": 94,
"path": "/stockfileloaderV2.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "\r\n\r\n###########################################\r\n#\r\n# stockfileloader\r\n#\r\n# procedural way to load multiple files\r\n#\r\n###########################################\r\n\r\nimport os\r\nimport pandas as pd\r\n\r\n\r\n\r\n \r\n\r\n\r\nFILEDIR = 'C:\\PANDAS2019\\pandas_tutorial_jupyter_output\\data'\r\n\r\n\r\ndef check_files():\r\n print(\"Checking for files\")\r\n for file in os.listdir(FILEDIR):\r\n if file.endswith(\".csv\"):\r\n # print(file)\r\n # stock_name=file.replace(\".csv\",\"\")\r\n # print(stock_name)\r\n # stock_name = pd.read_csv(file, index_col=0, parse_dates=True)\r\n # stock_name\r\n load_files(file)\r\n \r\n\r\ndef load_files(file):\r\n print(\"file -> \", file)\r\n \r\n os.chdir(FILEDIR)\r\n currStock = pd.read_csv(file)\r\n \r\n stockname=file.replace(\".csv\",\"\")\r\n print(\"Loaded -> \", stockname)\r\n \r\n \r\n # print(currStock) # DEBUG show contents\r\n plot_files(currStock, stockname)\r\n\r\n \r\n\r\n\r\ndef plot_files(currStock, stockname):\r\n print(\"plotting -> \", stockname)\r\n print()\r\n print(currStock.shape)\r\n #currStock.legend(label=stockname)\r\n currStock[\"Close\"].plot(grid = True, legend=True) # Plot the adjusted closing price of current stock\r\n\r\n x = currStock[\"Date\"]\r\n y = currStock[\"Close\"]\r\n # plotting the points \r\n plt.plot(x, y, color='green', linestyle='dashed', linewidth = 3 \r\n #, marker='o'\r\n , markerfacecolor='blue', markersize=12) \r\n \r\n # setting x and y axis range \r\n plt.ylim(1,200) \r\n plt.xlim(1,200) \r\n \r\n # naming the x axis \r\n plt.xlabel('x - axis') \r\n # naming the y axis \r\n plt.ylabel('y - axis') \r\n \r\n # giving a title to my graph \r\n plt.title(stockname) \r\n \r\n # function to show the plot \r\n plt.show() \r\n \r\n\r\ndef main():\r\n\r\n print(\"Starting stockfileloader routine\")\r\n\r\n check_files()\r\n\r\n \r\n \r\n \r\n \r\n\r\n print(\"Complete!\")\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n "
},
{
"alpha_fraction": 0.5960502624511719,
"alphanum_fraction": 0.6858168840408325,
"avg_line_length": 23.136363983154297,
"blob_id": "49ce81e883a1211ef0820d42e383a101cbf57f0a",
"content_id": "d86c0032dc1aa97593bae5a10c2eb5b086d48c6c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 557,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 22,
"path": "/stock_data_read_and_plot_out.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "\r\n\r\nimport os\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\n\r\nos.chdir('C:\\PANDAS\\practicaltimeseries\\data')\r\nos.getcwd()\r\n\r\nsp500_df = pd.read_csv('GSPC.csv')\r\n\r\n# Change the row indices of the dataframe using the Date column\r\nsp500_df.index = sp500_df['Date']\r\n\r\nprint(sp500_df.head(10))\r\n\r\n\r\nos.chdir('C:\\PANDAS2019\\plots_out')\r\nplt.figure(figsize=(5.5, 5.5))\r\nsp500_df['Close'].plot(color='b')\r\nplt.title('S&P 500 between 2001 - 2018')\r\nplt.xlabel('Time')\r\nplt.ylabel('Closing Value')\r\nplt.savefig('sp500_20190918.png', format='png', dpi=300)\r\n"
},
{
"alpha_fraction": 0.5334281921386719,
"alphanum_fraction": 0.5362731218338013,
"avg_line_length": 25.115385055541992,
"blob_id": "792e95c55435954b2f387980a50104d7dd642d72",
"content_id": "eebcbf3bf00cbccf8cdff2f471dd730aac554589",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 703,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 26,
"path": "/pythoncompletedlessons/accumulatescores.py",
"repo_name": "sarkhub/sand",
"src_encoding": "UTF-8",
"text": "##########################################\r\n# accumulatescores.py\r\n#\r\n##########################################\r\n\r\n# display title\r\nprint(\"The test score program\")\r\nprint()\r\nprint(\"Enter three test scores\")\r\nprint(\"==========================\")\r\n\r\n# get scores from user and accumulate the total\r\ntotal_score = 0\r\ntotal_score += int(input(\"Enter test score: \"))\r\ntotal_score += int(input(\"Enter test score: \"))\r\ntotal_score += int(input(\"Enter test score: \"))\r\n\r\n# calculate the average score\r\naverage_score = round(total_score / 3)\r\n\r\n# format and display results\r\nprint(\"==========================\")\r\nprint(\"Total Score: \", total_score,\r\n \"\\nAverage Score:\", average_score)\r\nprint()\r\nprint(\"Bye\")"
}
] | 36 |
sabmeua/gg_gst
|
https://github.com/sabmeua/gg_gst
|
561b0d7e3ab095d970e57b5ab2c48ca14606822a
|
5d59218578cfe9f00f8944446446a5ec5bc99a0a
|
59bd892fb88210562988e1b22a3f76d38a6d5b56
|
refs/heads/master
| 2022-11-18T04:02:04.696759 | 2020-07-21T07:35:25 | 2020-07-21T07:35:25 | 269,573,203 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6614824533462524,
"alphanum_fraction": 0.7223693132400513,
"avg_line_length": 39.2933349609375,
"blob_id": "f6c9839963430a09b4f836b7dd24ff744be167d8",
"content_id": "7533ed876d11fd8444665e95f3cb20945c3abbe3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 6044,
"license_type": "no_license",
"max_line_length": 145,
"num_lines": 150,
"path": "/Dockerfile",
"repo_name": "sabmeua/gg_gst",
"src_encoding": "UTF-8",
"text": "FROM amazonlinux:2\n\n# Set ENV_VAR for Greengrass RC to be untarred inside Docker Image\nARG GREENGRASS_RELEASE_URL=https://d1onfpft10uf5o.cloudfront.net/greengrass-core/downloads/1.10.2/greengrass-linux-x86-64-1.10.2.tar.gz\n\n# Install Greengrass Core Dependencies\nRUN yum update -y && \\\n yum install -y shadow-utils tar.x86_64 gzip xz wget iproute java-1.8.0 make && \\\n yum install -y openssl-devel python27 python37 && \\\n ln -s /usr/bin/java /usr/local/bin/java8 && \\\n wget $GREENGRASS_RELEASE_URL && \\\n wget https://nodejs.org/dist/v6.10.2/node-v6.10.2-linux-x64.tar.xz && \\\n tar xf node-v6.10.2-linux-x64.tar.xz && \\\n cp node-v6.10.2-linux-x64/bin/node /usr/bin/nodejs6.10 && \\\n wget https://nodejs.org/dist/v8.10.0/node-v8.10.0-linux-x64.tar.xz && \\\n tar xf node-v8.10.0-linux-x64.tar.xz && \\\n cp node-v8.10.0-linux-x64/bin/node /usr/bin/nodejs8.10 && \\\n wget https://nodejs.org/dist/v12.13.0/node-v12.13.0-linux-x64.tar.xz && \\\n tar xf node-v12.13.0-linux-x64.tar.xz && \\\n cp node-v12.13.0-linux-x64/bin/node /usr/bin/nodejs12.x && \\\n ln -s /usr/bin/nodejs12.x /usr/bin/node && \\\n rm -rf node-v6.10.2-linux-x64.tar.xz node-v6.10.2-linux-x64 && \\\n rm -rf node-v8.10.0-linux-x64.tar.xz node-v8.10.0-linux-x64 && \\\n rm -rf node-v12.13.0-linux-x64 node-v12.13.0-linux-x64.tar.xz && \\\n yum remove -y wget && \\\n rm -rf /var/cache/yum\n\n# Copy Greengrass Licenses AWS IoT Greengrass Docker Image\nCOPY greengrass-license-v1.pdf /\n\n# Copy start-up script\nCOPY \"greengrass-entrypoint.sh\" /\n\n# Setup Greengrass inside Docker Image\nRUN export GREENGRASS_RELEASE=$(basename $GREENGRASS_RELEASE_URL) && \\\n tar xzf $GREENGRASS_RELEASE -C / && \\\n rm $GREENGRASS_RELEASE && \\\n useradd -r ggc_user && \\\n groupadd -r ggc_group\n\n# Expose 8883 to pub/sub MQTT messages\nEXPOSE 8883\n\nRUN ln -s /lib/python2.7/site-packages/amazon_linux_extras /lib/python3.7/site-packages/\n\nRUN yum update -y && \\\n yum install -y gstreamer1 gstreamer1-devel gstreamer1-plugins-base \\\n gstreamer1-plugins-base-devel gstreamer1-plugins-bad-free \\\n gstreamer1-plugins-bad-free-devel gstreamer1-plugins-good \\\n gstreamer1-plugins-base-tools gstreamer1-plugins-bad-free-gtk \\\n gstreamer1-plugins-ugly-free gstreamer1-plugins-ugly-free-devel \\\n python3-devel pycairo pycairo-devel pygobject3-devel cairo-gobject-devel \\\n wget bzip2-devel orc-devel && \\\n yum group install -y Development tools && \\\n rm -rf /var/cache/yum\n\nRUN git clone https://github.com/GStreamer/gst-python.git\nWORKDIR gst-python\nRUN git fetch --tag\nRUN git checkout `gst-launch-1.0 --version | head -n1 | awk '{print $NF}'`\nENV PYTHON=/usr/bin/python3\nRUN ./autogen.sh --disable-gtk-doc\nRUN make -j8 && make install\n\nWORKDIR /\n\nRUN curl -O -L https://www.nasm.us/pub/nasm/releasebuilds/2.14.02/nasm-2.14.02.tar.bz2\nRUN tar xf nasm-2.14.02.tar.bz2\nWORKDIR nasm-2.14.02\nRUN ./autogen.sh\nRUN ./configure\nRUN make -j8 && make install\n\nWORKDIR /\n\nRUN curl -O -L https://www.tortall.net/projects/yasm/releases/yasm-1.3.0.tar.gz\nRUN tar xf yasm-1.3.0.tar.gz\nWORKDIR yasm-1.3.0\nRUN ./configure\nRUN make -j8 && make install\n\nWORKDIR /\n\n# libx264 must be older than ver.0.152\n#RUN git clone https://github.com/mirror/x264.git\n#WORKDIR x264\n#RUN git fetch origin stable\n#RUN git checkout stable\nRUN wget ftp://ftp.videolan.org/pub/x264/snapshots/x264-snapshot-20180801-2245-stable.tar.bz2\nRUN tar xf x264-snapshot-20180801-2245-stable.tar.bz2\nWORKDIR x264-snapshot-20180801-2245-stable\nRUN ./configure --enable-static --enable-shared\nRUN make -j8 && make install\nENV PKG_CONFIG_PATH=/usr/local/lib/pkgconfig:$PKG_CONFIG_PATH\nRUN ldconfig /usr/local/lib\n\nWORKDIR /\n\nRUN wget https://gstreamer.freedesktop.org/src/gst-plugins-ugly/gst-plugins-ugly-`gst-launch-1.0 --version | head -n1 | awk '{print $NF}'`.tar.xz\nRUN tar xf ./gst-plugins-ugly-`gst-launch-1.0 --version | head -n1 | awk '{print $NF}'`.tar.xz\nRUN ln -s ./gst-plugins-ugly-`gst-launch-1.0 --version | head -n1 | awk '{print $NF}'` ./gst-plugins-ugly\nWORKDIR gst-plugins-ugly\nRUN ./configure --enable-x264 --enable-orc\nRUN make -j8 && make install\n\nWORKDIR /\n\nRUN wget https://gstreamer.freedesktop.org/src/gst-libav/gst-libav-`gst-launch-1.0 --version | head -n1 | awk '{print $NF}'`.tar.xz\nRUN tar xf ./gst-libav-`gst-launch-1.0 --version | head -n1 | awk '{print $NF}'`.tar.xz\nRUN ln -s ./gst-libav-`gst-launch-1.0 --version | head -n1 | awk '{print $NF}'` ./gst-libav\nWORKDIR gst-libav\n#RUN ./configure --with-libav-extra-configure=\"--enable-libx264 --enable-gpl\" --enable-gpl --enable-orc\nRUN ./configure --enable-gpl --enable-orc\nRUN make -j8 && make install\n\nWORKDIR /\n\nRUN wget https://github.com/Kitware/CMake/releases/download/v3.17.3/cmake-3.17.3-Linux-x86_64.sh\nRUN chmod +x cmake-3.17.3-Linux-x86_64.sh\nRUN ./cmake-3.17.3-Linux-x86_64.sh --skip-license --prefix=/usr\n\nRUN git clone --recursive https://github.com/awslabs/amazon-kinesis-video-streams-producer-sdk-cpp.git\nWORKDIR amazon-kinesis-video-streams-producer-sdk-cpp\nRUN mkdir -p build\nWORKDIR build\nRUN cmake -DBUILD_GSTREAMER_PLUGIN=ON ..\nRUN make -j8\nRUN cp ./*.so /usr/local/lib/gstreamer-1.0\n\nWORKDIR /\n\nRUN pip3 install PyGObject numpy==1.16.4 opencv-python\nRUN pip3 install tensorflow==1.14 trafaret greengrasssdk\nRUN pip3 install git+https://github.com/jackersson/gstreamer-python.git#egg=gstreamer-python\n\nENV GST_PLUGIN_PATH=/usr/local/lib/gstreamer-1.0:/myplugins\nENV GST_DEBUG=python:4\n\nCOPY kvs_log_configuration .\n\n#RUN pip3 install Cython contextlib2 pillow lxml matplotlib tf_slim\n#RUN git clone https://github.com/tensorflow/models.git\n#RUN pip3 install pycocotools scipy\n#RUN wget https://github.com/google/protobuf/releases/download/v3.3.0/protoc-3.3.0-linux-x86_64.zip\n#RUN unzip -d /usr protoc-3.3.0-linux-x86_64.zip\n#WORKDIR models/research\n#RUN protoc object_detection/protos/*.proto --python_out=.\n#ENV PYTHONPATH=$PYTHONPATH:/models/research/object_detection:/models/research/slim\n## test object detection api\n#RUN python3 object_detection/builders/model_builder_tf1_test.py\n"
},
{
"alpha_fraction": 0.729411780834198,
"alphanum_fraction": 0.729411780834198,
"avg_line_length": 26.66666603088379,
"blob_id": "690fec79d77d9571c5e55dfdbc2a98d11efd0a11",
"content_id": "ae2781df561f415aa20d34cf2df194a96d82396e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Makefile",
"length_bytes": 85,
"license_type": "no_license",
"max_line_length": 56,
"num_lines": 3,
"path": "/Makefile",
"repo_name": "sabmeua/gg_gst",
"src_encoding": "UTF-8",
"text": "\nlambda-archive:\n\tcd src;\\\n\tzip -r ../function.zip greengrasssdk lambda_function.py\n\n"
},
{
"alpha_fraction": 0.5755395889282227,
"alphanum_fraction": 0.6043165326118469,
"avg_line_length": 18.85714340209961,
"blob_id": "d7a9d8a5acf853b8b27ab4f7342045d43901a833",
"content_id": "bca905e60bf9a0b69f4c20185af3e6c564455afb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 139,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 7,
"path": "/README.md",
"repo_name": "sabmeua/gg_gst",
"src_encoding": "UTF-8",
"text": "# gg_gst\n\n```\n$ docker build -t gg_gst .\n$ xhost +local:\n$ docker run -it -e DISPLAY=$DISPLAY -v /tmp/.X11-unix/:/tmp/.X11-unix gg_gst\n```\n"
},
{
"alpha_fraction": 0.5021644830703735,
"alphanum_fraction": 0.5069745182991028,
"avg_line_length": 34.844825744628906,
"blob_id": "062e83932bf9df2e1c0845a10511e9cfca1b7a86",
"content_id": "7edf3002d29340f8d40b69135d64fa6ca54a5f16",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2079,
"license_type": "no_license",
"max_line_length": 92,
"num_lines": 58,
"path": "/myplugins/python/awsiot_notify.py",
"repo_name": "sabmeua/gg_gst",
"src_encoding": "UTF-8",
"text": "import os\nimport logging\nimport greengrasssdk\nimport json\nimport traceback\n\nimport gi\ngi.require_version('GstBase', '1.0')\ngi.require_version('Gst', '1.0')\ngi.require_version('GLib', '2.0')\ngi.require_version('GObject', '2.0')\n\nfrom gi.repository import GstBase, Gst, GLib, GObject\nfrom gstreamer.gst_objects_info_meta import gst_meta_get\n\nclass AwsIotNotify(GstBase.BaseTransform):\n __gstmetadata__ = ('AWS IoT Notify',\n 'Transform',\n 'AWS IoT Notify plugin',\n 'sabmeua<[email protected]>')\n\n __gsttemplates__ = (Gst.PadTemplate.new('src',\n Gst.PadDirection.SRC,\n Gst.PadPresence.ALWAYS,\n Gst.Caps.from_string('video/x-raw,format=RGB')),\n Gst.PadTemplate.new('sink',\n Gst.PadDirection.SINK,\n Gst.PadPresence.ALWAYS,\n Gst.Caps.from_string('video/x-raw,format=RGB')))\n\n def __init__(self):\n super().__init__()\n self.detection = False\n self.client = greengrasssdk.client('iot-data')\n\n def do_transform_ip(self, buffer: Gst.Buffer) -> Gst.FlowReturn:\n try:\n objects = gst_meta_get(buffer)\n pts = Gst.TIME_ARGS(buffer.pts)\n\n prev, self.detection = self.detection, False\n if objects:\n self.detection = True\n if not prev:\n detection = objects[0]\n detection['pts'] = pts\n self.client.publish(topic='object/detection',\n qos=0,\n payload=json.dumps(detection))\n\n except Exception as err:\n Gst.error(f\"Error {self}: {traceback.format_exc()}\")\n\n return Gst.FlowReturn.OK\n\n\nGObject.type_register(AwsIotNotify)\n__gstelementfactory__ = ('awsiot_notify', Gst.Rank.NONE, AwsIotNotify)\n"
},
{
"alpha_fraction": 0.6392092108726501,
"alphanum_fraction": 0.6482701897621155,
"avg_line_length": 28.609756469726562,
"blob_id": "34ff077e597ee9646835741911b433b197bcf12e",
"content_id": "914c26d4994bb6476a9958953dcb1c2bf5c5c972",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2428,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 82,
"path": "/src/lambda_function.py",
"repo_name": "sabmeua/gg_gst",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\nimport os\nimport sys\nimport traceback\n\nimport gi\ngi.require_version('Gst', '1.0')\ngi.require_version('GLib', '2.0')\ngi.require_version('GObject', '2.0')\nfrom gi.repository import GLib, GObject, Gst\n\nimport logging\nlogger = logging.getLogger(__name__)\nlogging.basicConfig(stream=sys.stdout, level=logging.DEBUG)\n\nSTREAM_NAME = os.environ.get('STREAM_NAME')\nIOT_CREDENTIAL_ENDPOINT = os.environ.get('IOT_CREDENTIAL_ENDPOINT')\nCERT_ID = os.environ.get('CERT_ID')\n\nKVS_CMD = f'kvssink stream-name={STREAM_NAME} framerate=15'\\\n ' aws-region=ap-northeast-1'\\\n ' log-config=/kvs_log_configuration'\\\n ' iot-certificate=\"iot-certificate,'\\\n f'endpoint={IOT_CREDENTIAL_ENDPOINT},'\\\n f'cert-path=/greengrass/certs/{CERT_ID}.cert.pem,'\\\n f'key-path=/greengrass/certs/{CERT_ID}.private.key,'\\\n 'ca-path=/greengrass/certs/root.ca.pem,'\\\n 'role-aliases=KvsCameraIoTRoleAlias\"'\n\nDEFAULT_PIPELINE = 'videotestsrc ! clockoverlay auto-resize=false'\\\n ' ! videorate ! video/x-raw,format=I420,framerate=15/1'\\\n ' ! x264enc tune=zerolatency ! h264parse'\n\ndef on_message(bus: Gst.Bus, message: Gst.Message, loop: GLib.MainLoop):\n mtype = message.type\n if mtype == Gst.MessageType.EOS:\n logger.info(\"End of stream\")\n loop.quit()\n\n elif mtype == Gst.MessageType.ERROR:\n err, debug = message.parse_error()\n logger.error(err, debug)\n loop.quit()\n\n elif mtype == Gst.MessageType.WARNING:\n err, debug = message.parse_warning()\n logger.warning(err, debug)\n\n return True\n\ndef main():\n logger.info('Start gstream pipeline')\n\n Gst.init(sys.argv)\n\n cmd = os.environ.get('PIPELINE', DEFAULT_PIPELINE)\n if os.environ.get('USE_KVS', True):\n cmd += f' ! {KVS_CMD}'\n logger.info('Pipeline : %s', cmd)\n\n pipeline = Gst.parse_launch(cmd)\n bus = pipeline.get_bus()\n bus.add_signal_watch()\n pipeline.set_state(Gst.State.PLAYING)\n loop = GLib.MainLoop()\n bus.connect(\"message\", on_message, loop)\n\n try:\n loop.run()\n except Exception:\n logging.error(traceback.print_exc())\n loop.quit()\n\n pipeline.set_state(Gst.State.NULL)\n\nmain()\n\n# This is a dummy handler and will not be invoked\n# Instead the code above will be executed in an infinite loop for our example\ndef lambda_handler(event, context):\n return\n"
},
{
"alpha_fraction": 0.5416761040687561,
"alphanum_fraction": 0.5486785769462585,
"avg_line_length": 33.5859375,
"blob_id": "3ddaf5bc745d65f5740171e7618a1b300e9c4c76",
"content_id": "abf1fd10ca207a5e3adbba78b909ec332ad35968",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4427,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 128,
"path": "/myplugins/python/object_detection.py",
"repo_name": "sabmeua/gg_gst",
"src_encoding": "UTF-8",
"text": "import os\nimport logging\nimport numpy as np\nimport tensorflow.compat.v1 as tf\nimport gstreamer.utils as utils\nimport cv2\nimport json\nimport traceback\n\nimport gi\ngi.require_version('GstBase', '1.0')\ngi.require_version('Gst', '1.0')\ngi.require_version('GLib', '2.0')\ngi.require_version('GObject', '2.0')\n\nfrom gi.repository import GstBase, Gst, GLib, GObject\nfrom gstreamer.gst_objects_info_meta import gst_meta_write\n\nGst.init(None)\n\nPROC_WIDTH = 300\nPROC_HEIGHT = 300\nPROC_INTERPOLATION = cv2.INTER_NEAREST\n\nPERSON = 1\nTHRESHOLD = 0.9\n\nclass GstObjectDetection(GstBase.BaseTransform):\n __gstmetadata__ = ('Object Detection',\n 'Transform',\n 'Object Detection plugin',\n 'sabmeua<[email protected]>')\n\n __gsttemplates__ = (Gst.PadTemplate.new('src',\n Gst.PadDirection.SRC,\n Gst.PadPresence.ALWAYS,\n Gst.Caps.from_string('video/x-raw,format=RGB')),\n Gst.PadTemplate.new('sink',\n Gst.PadDirection.SINK,\n Gst.PadPresence.ALWAYS,\n Gst.Caps.from_string('video/x-raw,format=RGB')))\n __gproperties__ = {\n \"model\": (str,\n \"Path to model pb file\",\n \"Path to model pb file\",\n None,\n GObject.ParamFlags.READWRITE)\n }\n\n def do_get_property(self, prop: GObject.GParamSpec):\n if prop.name != 'model':\n raise AttributeError('Unknown property %s' % prop.name)\n\n return self.model\n\n def do_set_property(self, prop: GObject.GParamSpec, value):\n logging.debug('do_set_property')\n if prop.name != 'model':\n raise AttributeError('Unknown property %s' % prop.name)\n\n self.model = value\n model_dir = os.environ.get('AWS_GG_RESOURCE_PREFIX', '/')\n model_path = f'{model_dir}/{self.model}'\n\n # Prepare graph\n graph = tf.Graph()\n with graph.as_default():\n graph_def = tf.GraphDef()\n with tf.io.gfile.GFile(model_path, 'rb') as f:\n graph_def.ParseFromString(f.read())\n tf.import_graph_def(graph_def, name='')\n\n # Prepare in/out tensors\n self.output_tensors = {\n 'labels': graph.get_tensor_by_name('detection_classes:0'),\n 'scores': graph.get_tensor_by_name('detection_scores:0')\n }\n self.input_tensors = {\n 'images': graph.get_tensor_by_name('image_tensor:0')\n }\n self.session = tf.Session(graph=graph)\n\n def __init__(self):\n super().__init__()\n self.session = None\n self.input_tensors = None\n self.output_tensors = None\n self.model = None\n logging.debug('init')\n\n def resize(self, image: np.ndarray) -> np.ndarray:\n return cv2.resize(image, (PROC_WIDTH, PROC_HEIGHT), PROC_INTERPOLATION)\n\n def process(self, image: np.ndarray):\n image = self.resize(image)\n image = np.expand_dims(image, axis=0)\n # Run inference\n result = self.session.run(self.output_tensors,\n feed_dict={self.input_tensors['images']: image})\n\n # Select results\n detections = []\n for labels, scores in zip(result['labels'], result['scores']):\n persons = filter(lambda d: d[0] == PERSON and d[1] > THRESHOLD, zip(labels, scores))\n for _, score in persons:\n logging.debug(f'Detect score={score}')\n detections.append({\n 'class_name': 'person',\n 'confidence': float(score),\n 'bounding_box': [0, 0, 0, 0], # dummy\n })\n return detections\n\n def do_transform_ip(self, buffer: Gst.Buffer):\n try:\n caps = self.sinkpad.get_current_caps()\n image = utils.gst_buffer_with_caps_to_ndarray(buffer, caps)\n detections = self.process(image)\n gst_meta_write(buffer, detections)\n\n except Exception as err:\n Gst.error(f'Error {self}: {traceback.format_exc()}')\n\n return Gst.FlowReturn.OK\n\n\nGObject.type_register(GstObjectDetection)\n__gstelementfactory__ = ('object_detection', Gst.Rank.NONE, GstObjectDetection)\n"
}
] | 6 |
franciscoliu/AdversaryLossLandscape
|
https://github.com/franciscoliu/AdversaryLossLandscape
|
06e487de4e64a17d5de40f01c4e3ea969ed5e074
|
73456788b38bebeac40b833f2ac5d6cb2f1530ea
|
afe270f2faccdb143569ab0863f9e8e7d37a5c83
|
refs/heads/master
| 2023-06-05T22:23:47.718976 | 2021-07-03T02:05:35 | 2021-07-03T02:05:35 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6602423191070557,
"alphanum_fraction": 0.6729074716567993,
"avg_line_length": 45.53845977783203,
"blob_id": "575fc5072c5eb2ccc11377cf9ccd342b200435bb",
"content_id": "63bff860d2ae59e096b89393e26b0f1f298c2c25",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1816,
"license_type": "permissive",
"max_line_length": 136,
"num_lines": 39,
"path": "/util/dataset.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import torch\nfrom torchvision import datasets, transforms\n\nimport numpy as np\n\ndef mnist(batch_size, data_augmentation = True, shuffle = True):\n\n transform = transforms.Compose([transforms.ToTensor()])\n\n trainset = datasets.MNIST('./data', train = True, download = True, transform = transform)\n testset = datasets.MNIST('./data', train = False, download = True, transform = transform)\n\n train_loader = torch.utils.data.DataLoader(trainset, batch_size = batch_size, shuffle = shuffle, num_workers = 1, pin_memory = True)\n test_loader = torch.utils.data.DataLoader(testset, batch_size = batch_size, shuffle = False, num_workers = 1, pin_memory = True)\n\n classes = ('0', '1', '2', '3', '4', '5', '6', '7', '8', '9')\n\n return train_loader, test_loader, classes\n\ndef cifar10(batch_size, data_augmentation = True, shuffle = True):\n\n transform_train = transforms.Compose([\n transforms.RandomCrop(32, padding = 4),\n transforms.RandomHorizontalFlip(),\n transforms.ToTensor()\n ]) if data_augmentation == True else transforms.Compose([transforms.ToTensor()])\n transform_test = transforms.Compose([\n transforms.ToTensor()\n ])\n\n trainset = datasets.CIFAR10(root = './data', train = True, download = True, transform = transform_train)\n testset = datasets.CIFAR10(root = './data', train = False, download = True, transform = transform_test)\n\n train_loader = torch.utils.data.DataLoader(trainset, batch_size = batch_size, shuffle = shuffle, num_workers = 4, pin_memory = True)\n test_loader = torch.utils.data.DataLoader(testset, batch_size = batch_size, shuffle = False, num_workers = 4, pin_memory = True)\n\n classes = ('plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck')\n\n return train_loader, test_loader, classes\n\n"
},
{
"alpha_fraction": 0.7267873287200928,
"alphanum_fraction": 0.7647582292556763,
"avg_line_length": 60.85321044921875,
"blob_id": "03b8dd4a65e0f76054e7f518a9fb10c38c7f8edb",
"content_id": "26d2dfaea71fd18fbb9f7c8480c7ced3c39a6ef0",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 6742,
"license_type": "permissive",
"max_line_length": 427,
"num_lines": 109,
"path": "/README.md",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "Code for NeurIPS 2020 Paper [\"On the Loss Landscape of Adversarial Training: Identifying Challenges and How to Overcome Them\"](https://arxiv.org/pdf/2006.08403).\nSuitable to run on NVIDIA GPU machines.\n\n## Requirements\n\n```\npython = 3.7\nnumpy >= 1.16\ntorch >= 1.3\ntorchvision >= 0.4\n```\n\n## Abstract\n\nWe analyze the influence of adversarial training on the loss landscape of machine learning models.\nTo this end, we first provide analytical studies of the properties of adversarial loss functions under different adversarial budgets.\nWe then demonstrate that the adversarial loss landscape is less favorable to optimization, due to increased curvature and more scattered gradients.\nOur conclusions are validated by numerical analyses, which show that training under large adversarial budgets impede the escape from suboptimal random initialization, cause non-vanishing gradients and make the model find sharper minima.\nBased on these observations, we show that a periodic adversarial scheduling (PAS) strategy can effectively overcome these challenges, yielding better results than vanilla adversarial training while being much less sensitive to the choice of learning rate. \n\n## Modules\n\nFolder `util` contains all supporting functions.\nSpecially, `util/attack.py` has implmentations for different attackers, `util/seq_parser.py` has all sequential functions for learning rate scheduling and adversarial budget scheduling; `util/optim_parser.py` has constructors of all supported optimizers; `util/models.py` has all model architectures, parameterized by a width factor w.\n\nFolder `run` contains all scripts to run experiments.\nYou can use `python run/XXX.py -h` to get the information about all command line parameters.\nWe briefly introduce different files below:\n\n```\n# Train and Test (Section 4 and 5)\nrun/train_normal.py: train models by defining different PGD attacks, learning rate scheduling and adversarial budget scheduling.\nrun/test_ensemble.py: test the performance of models or ensemble of models under adversarial attacks.\n# Numerical Analysis (Section 4)\nrun/perturb_param.py: perturb the model parameter given the original model and the perturbation.\nrun/scan_param.py: given the model, two directions in the parameter space and adversarial attacks, test the accuracy and loss of parameters spanned by these two directions.\nrun/generate_adversary.py: generate adversarial examples by PGD given the model and the adversarial budgets.\nrun/calc_hessian.py: estimate the top Hessian eigenvalues given a trained model.\n# Find the flat curves connecting the parameters of two models (Appendix C)\nrun/train_curve.py: train the Bezier curves to connect two given minima.\nrun/scan_curve.py: scan the loss and the accuracy along a trained Bezier curve.\n```\n\nFolder `analysis` contains some functions to analyze the checkpoints produced by scripts under `run`:\n\n```\nanalysis/analyze_adversary.py: calculate the average cosine similarity of the perturbations.\nanalysis/calc_param_distance.py: calculate the distance of two models in the parameter space.\n```\n\n## Examples\n\nBelow are the configurations for different attackers used in the paper, use the following configurations after flag `--attack` in the command when you need to construct the corresponding attacker. For each attacker, the first config is for MNIST and the second is for CIFAR10.\n\n* PGD10: `name=pgd,step_size=0.01,threshold=0.4,iter_num=100,order=-1` `name=pgd,step_size=2,threshold=8,iter_num=10,order=-1`\n* PGD100: `name=pgd,step_size=0.01,threshold=0.4,iter_num=100,order=-1` `name=pgd,step_size=1,threshold=8,iter_num=100,order=-1`\n* APGD100 CE: `name=apgd,threshold=0.4,iter_num=100,order=-1,rho=0.75,loss_type=ce` `name=apgd,threshold=8,iter_num=100,order=-1,rho=0.75,loss_type=ce`\n* APGD100 DLR: `name=apgd,threshold=0.4,iter_num=100,order=-1,rho=0.75,loss_type=dlr` `name=apgd,threshold=8,iter_num=100,order=-1,rho=0.75,loss_type=dlr`\n* Square5K: `name=square,threshold=0.4,iter_num=5000,order=-1,window_size_factor=0` `name=square,threshold=8,iter_num=5000,order=-1,window_size_factor=0`\n\nBelow we give some examples to run the experiments we mentioned in the paper. Replace the name in `$$` with the one you prefer.\n\n1. Calculate the Hessian top 20 eigenvalues of a MNIST model under the adversarial budget $\\epsilon = 0.1$.\n\n```\npython run/calc_hessian.py --model_type lenet --width 16 --model2load $MODEL_TO_LOAD$ --attack name=pgd,step_size=0.01,threshold=0.1,iter_num=20,order=-1 --out_file $OUTPUT_FILE$ --topk 20 --max_iter 50 --gpu $GPU_ID$\n```\n\n2. Using cosine scheduler to train LeNet model on MNIST against adversarial attacks under the budget $\\epsilon = 0.4$.\n\n```\npython run/train_normal.py --dataset mnist --epoch_num 100 --epoch_ckpts 50,75,100 --model_type lenet --width 16 --out_folder $FOLDER$ --model_name $MODEL_NAME$ --optim name=adam,lr=1e-4 --attack name=pgd,step_size=0.01,iter_num=50,order=-1,threshold=0.4 --attack_threshold_schedule name=cycle_cos,eps_min=0,eps_max=0.6,ckpt_list=0:100,max=0.4 --gpu $GPU_ID$ --lr_schedule name=constant,start_v=1e-4\n```\n\n3. Test the ensemble of three ResNet18 models on CIFAR10 against APGD DLR attacks under the budget $\\epsilon = 8 / 255$\n\n```\npython run/test_ensemble.py --batch_size 100 --model2load $MODEL1$,$MODEL2$,$MODEL3$ --out_file $OUT_FILE$ --gpu $GPU_ID$ --attack name=apgd,threshold=8,iter_num=100,order=-1,rho=0.75,loss_type=dlr --model_type resnet --dataset cifar10 --width 8\n```\n\n4. Train a Bezier curve to connect two CIFAR10 models: $MODEL1$ and $MODEL2$. The adversarial budget size is $\\epsilon = 8/255$\n\n```\npython run/train_curve.py --epoch_num 200 --dataset cifar10 --model_type resnet --width 8 --fix_points 1,0,0,0,1 --model2load $MODEL1$,,,,$MODEL2$ --curve_type bezier --out_folder $OUT_FOLDER$ --model_name $MODEL_NAME$ --optim name=sgd,lr=0.1,momentum=0.9,weight_decay=1e-6 --lr_schedule name=jump,min_jump_pt=100,jump_freq=50,power=0.1,start_v=1e-1 --attack name=pgd,step_size=2,threshold=8,iter_num=10,order=-1 --gpu $GPU_ID$\n```\n\n## Model Checkpoints\n\nThe model checkpoints of Table 1 are provided under the folder `models`. All experiments are run for three times.\n\n## Acknowledgments\n\nThe AutoPGD and SquareAttack modules are downloaded from the AutoAttack repo: [https://github.com/fra31/auto-attack](https://github.com/fra31/auto-attack).\nThey are put in the `external` folder.\n\n## Contact\n\nPlease contact Chen Liu ([email protected]) regarding this repository.\n\n## Citation\n\n```\n@inproceedings{liu2020loss,\n title={On the Loss Landscape of Adversarial Training: Identifying Challenges and How to Overcome Them},\n author={Liu, Chen and Salzmann, Mathieu and Lin, Tao and Tomioka, Ryota and S{\\\"u}sstrunk, Sabine},\n booktitle={Advances in Neural Information Processing Systems},\n year={2020}\n}\n```\n"
},
{
"alpha_fraction": 0.5529592633247375,
"alphanum_fraction": 0.5757764577865601,
"avg_line_length": 35.55154037475586,
"blob_id": "0fe0b1d1d9aca62d35ab20d9cb9a0b787c7ed40d",
"content_id": "e20c6c85abb08e65ce8243fa569903972148a496",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 27304,
"license_type": "permissive",
"max_line_length": 147,
"num_lines": 747,
"path": "/util/models.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nclass DataNormalizeLayer(nn.Module):\n\n def __init__(self, bias, scale):\n\n super(DataNormalizeLayer, self).__init__()\n\n self._bias = torch.FloatTensor(1).fill_(bias).view(1, -1, 1, 1)\n self._scale = torch.FloatTensor(1).fill_(scale).view(1, -1, 1, 1)\n\n def forward(self, x):\n\n x = (x - self._bias.to(x.device)) * self._scale.to(x.device)\n\n return x\n\nmnist_normalizer = DataNormalizeLayer(bias = 0., scale = 1.)\ncifar10_normalizer = DataNormalizeLayer(bias = 0., scale = 1.)\n\n## Normal Model\n\nclass MNIST_LeNet(nn.Module):\n\n def __init__(self, width = 1, bias = True):\n\n super(MNIST_LeNet, self).__init__()\n\n self.width = width\n self.bias = bias\n print('MNIST LeNet with width = %d, bias = %s' % (self.width, self.bias))\n\n self.conv1 = nn.Conv2d(1, 2 * self.width, 5, 1, 2, bias = self.bias)\n self.relu1 = nn.ReLU()\n self.conv2 = nn.Conv2d(2 * self.width, 4 * self.width, 5, 1, 2, bias = self.bias)\n self.relu2 = nn.ReLU()\n self.fc1 = nn.Linear(7 * 7 * 4 * self.width, 64 * self.width, bias = self.bias)\n self.relu3 = nn.ReLU()\n self.fc2 = nn.Linear(64 * self.width, 10)\n\n def forward(self, x):\n\n x = mnist_normalizer(x)\n\n x = self.relu1(self.conv1(x))\n x = F.max_pool2d(x, 2)\n x = self.relu2(self.conv2(x))\n x = F.max_pool2d(x, 2)\n x = x.view(-1, 7 * 7 * 4 * self.width)\n x = self.fc2(self.relu3(self.fc1(x)))\n\n return x\n\n def obtain_features(self, x):\n\n maps = []\n maps.append(mnist_normalizer(x))\n maps.append(self.conv1(maps[-1]))\n maps.append(self.relu1(maps[-1]))\n maps.append(F.max_pool2d(maps[-1], 2))\n maps.append(self.conv2(maps[-1]))\n maps.append(self.relu2(maps[-1]))\n maps.append(F.max_pool2d(maps[-1], 2))\n maps.append(self.fc1(maps[-1].view(-1, 7 * 7 * 4 * self.width)))\n maps.append(self.relu3(maps[-1]))\n maps.append(self.fc2(maps[-1]))\n\n return maps\n\nclass CIFAR10_LeNet(nn.Module):\n\n def __init__(self, width = 1, bias = True):\n\n super(CIFAR10_LeNet, self).__init__()\n\n self.width = width\n self.bias = bias\n print('CIFAR10 LeNet with width = %d, bias = %s' % (self.width, self.bias))\n\n self.conv1 = nn.Conv2d(3, 6 * self.width, 5, bias = self.bias)\n self.relu1 = nn.ReLU()\n self.conv2 = nn.Conv2d(6 * self.width, 16 * self.width, 5, bias = self.bias)\n self.relu2 = nn.ReLU()\n self.fc1 = nn.Linear(5 * 5 * 16 * self.width, 120 * self.width, bias = self.bias)\n self.relu3 = nn.ReLU()\n self.fc2 = nn.Linear(120 * self.width, 84 * self.width, bias = self.bias)\n self.relu4 = nn.ReLU()\n self.fc3 = nn.Linear(84 * self.width, 10)\n\n def forward(self, x):\n\n x = cifar10_normalizer(x)\n\n x = self.relu1(self.conv1(x))\n x = F.max_pool2d(x, 2)\n x = self.relu2(self.conv2(x))\n x = F.max_pool2d(x, 2)\n x = x.view(-1, 5 * 5 * 16 * self.width)\n x = self.fc3(self.relu4(self.fc2(self.relu3(self.fc1(x)))))\n\n return x\n\n def obtain_features(self, x):\n\n maps = []\n maps.append(cifar10_normalizer(x))\n maps.append(self.conv1(maps[-1]))\n maps.append(self.relu1(maps[-1]))\n maps.append(F.max_pool2d(maps[-1], 2))\n maps.append(self.conv2(maps[-1]))\n maps.append(self.relu2(maps[-1]))\n maps.append(F.max_pool2d(maps[-1], 2))\n maps.append(self.fc1(maps[-1].view(-1, 5 * 5 * 16 * self.width)))\n maps.append(self.relu3(maps[-1]))\n maps.append(self.fc2(maps[-1]))\n maps.append(self.relu4(maps[-1]))\n maps.append(self.fc3(maps[-1]))\n\n return maps\n\nclass CIFAR10_VGG(nn.Module):\n\n def __init__(self, width = 1, bias = True):\n\n super(CIFAR10_VGG, self).__init__()\n\n self.width = width\n self.bias = bias\n print('CIFAR10 VGG16 with width = %d, bias = %s' % (self.width, self.bias))\n\n layer_template = [4, 4, 'M', 8, 8, 'M', 16, 16, 16, 'M', 32, 32, 32, 'M', 32, 32, 32, 'M']\n layer_list = []\n\n in_planes = 3\n for layer_label in layer_template:\n if layer_label == 'M':\n layer_list += [nn.MaxPool2d(kernel_size = 2, stride = 2),]\n else:\n out_planes = int(layer_label * self.width)\n layer_list += [nn.Conv2d(in_planes, out_planes, kernel_size = 3, padding = 1, bias = self.bias),\\\n nn.BatchNorm2d(out_planes), nn.ReLU()]\n in_planes = out_planes\n layer_list += [nn.AvgPool2d(kernel_size = 1, stride = 1),]\n\n self.feature_extractor = nn.Sequential(*layer_list)\n self.classifier = nn.Linear(32 * self.width, 10)\n\n def forward(self, x):\n\n x = cifar10_normalizer(x)\n\n x = self.feature_extractor(x)\n x = x.view(-1, 32 * self.width)\n x = self.classifier(x)\n\n return x\n\n def obtain_features(self, x):\n\n maps = []\n maps.append(cifar10_normalizer(x))\n for layer in self.feature_extractor:\n maps.append(layer(maps[-1]))\n maps.append(self.classifier(maps[-1].view(-1, 32 * self.width)))\n\n return maps\n\nclass ResNet_Block(nn.Module):\n\n def __init__(self, in_planes, out_planes, stride = 1):\n\n super(ResNet_Block, self).__init__()\n\n self.in_planes = in_planes\n self.out_planes = out_planes\n self.stride = stride\n\n self.conv1 = nn.Conv2d(in_planes, out_planes, kernel_size = 3, stride = self.stride, padding = 1, bias = False)\n self.bn1 = nn.BatchNorm2d(out_planes)\n self.relu1 = nn.ReLU()\n self.conv2 = nn.Conv2d(out_planes, out_planes, kernel_size = 3, stride = 1, padding = 1, bias = False)\n self.bn2 = nn.BatchNorm2d(out_planes)\n self.relu2 = nn.ReLU()\n\n self.shortcut = nn.Sequential()\n if self.stride != 1 or self.in_planes != self.out_planes:\n self.shortcut = nn.Sequential(\n nn.Conv2d(self.in_planes, self.out_planes, kernel_size = 1, stride = self.stride, bias = False),\n nn.BatchNorm2d(self.out_planes)\n )\n\n def forward(self, x):\n\n out = self.relu1(self.bn1(self.conv1(x)))\n out = self.bn2(self.conv2(out))\n out += self.shortcut(x)\n out = self.relu2(out)\n\n return out\n\n def obtain_pre_bn(self, x):\n\n pre1 = self.conv1(x)\n pre2 = self.conv2(self.relu1(self.bn1(pre1)))\n out = self.bn2(pre2)\n\n pre_bn_list = [pre1, pre2]\n layer_bn_list = [self.bn1, self.bn2]\n label_bn_list = ['bn1', 'bn2']\n\n if self.stride != 1 or self.in_planes != self.out_planes:\n sc_x = x\n for layer in self.shortcut:\n if isinstance(nn.BatchNorm2d):\n pre_bn_list.append(sc_x)\n layer_bn_list.append(layer)\n label_bn_list.append('shortcut')\n sc_x = layer(x)\n\n out += self.shortcut(x)\n out = self.relu2(out)\n\n return out, pre_bn_list, layer_bn_list, label_bn_list\n\nclass CIFAR10_ResNet(nn.Module):\n\n def __init__(self, num_block_list = [2, 2, 2, 2], width = 1):\n\n super(CIFAR10_ResNet, self).__init__()\n\n self.width = width\n self.num_block_list = num_block_list\n self.in_planes = int(4 * self.width)\n print('CIFAR10 ResNet: num_block_list = %s, width = %d' % (self.num_block_list, self.width))\n\n self.conv1 = nn.Conv2d(3, int(4 * self.width), kernel_size = 3, stride = 1, padding = 1, bias = False)\n self.bn1 = nn.BatchNorm2d(int(4 * self.width))\n self.relu1 = nn.ReLU()\n\n self.layer1 = self._make_layer(out_planes = int(4 * self.width), num_blocks = num_block_list[0], stride = 1)\n self.layer2 = self._make_layer(out_planes = int(8 * self.width), num_blocks = num_block_list[1], stride = 2)\n self.layer3 = self._make_layer(out_planes = int(16 * self.width), num_blocks = num_block_list[2], stride = 2)\n self.layer4 = self._make_layer(out_planes = int(32 * self.width), num_blocks = num_block_list[3], stride = 2)\n\n self.classifier = nn.Linear(int(32 * self.width), 10)\n\n def _make_layer(self, out_planes, num_blocks, stride):\n\n stride_list = [stride,] + [1,] * (num_blocks - 1)\n layers = []\n for stride in stride_list:\n layers.append(ResNet_Block(in_planes = self.in_planes, out_planes = out_planes, stride = stride))\n self.in_planes = out_planes\n return nn.Sequential(*layers)\n\n def forward(self, x):\n\n x = cifar10_normalizer(x)\n\n x = self.relu1(self.bn1(self.conv1(x)))\n x = self.layer1(x)\n x = self.layer2(x)\n x = self.layer3(x)\n x = self.layer4(x)\n x = F.avg_pool2d(x, 4)\n x = x.view(-1, int(32 * self.width))\n x = self.classifier(x)\n\n return x\n\n def obtain_features(self, x):\n\n maps = []\n maps.append(cifar10_normalizer(x))\n maps.append(self.conv1(maps[-1]))\n maps.append(self.bn1(maps[-1]))\n maps.append(relu(maps[-1]))\n for layer in self.layer1:\n maps.append(layer(maps[-1]))\n for layer in self.layer2:\n maps.append(layer(maps[-1]))\n for layer in self.layer3:\n maps.append(layer(maps[-1]))\n for layer in self.layer4:\n maps.append(layer(maps[-1]))\n maps.append(F.avg_pool2d(maps[-1], 4))\n maps.append(self.classifier(maps[-1].view(-1, int(32 * self.width))))\n\n return maps\n\n def obtain_pre_bn(self, x):\n\n x = cifar10_normalizer(x)\n\n pre_bn_list = []\n layer_bn_list = []\n label_bn_list = []\n\n pre1 = self.conv1(x)\n pre_bn_list.append(pre1)\n layer_bn_list.append(self.bn1)\n label_bn_list.append('in_conv')\n x = self.relu1(self.bn1(pre1))\n\n for idx, layer in enumerate(self.layer1):\n x, pre_list, layer_list, label_list = layer.obtain_pre_bn(x)\n pre_bn_list += pre_list\n layer_bn_list += layer_list\n label_bn_list += list(map(lambda x: 'layer1.%d.' % (idx + 1) + x, label_list))\n\n for idx, layer in enumerate(self.layer2):\n x, pre_list, layer_list, label_list = layer.obtain_pre_bn(x)\n pre_bn_list += pre_list\n layer_bn_list += layer_list\n label_bn_list += list(map(lambda x: 'layer2.%d.' % (idx + 1) + x, label_list))\n\n for idx, layer in enumerate(self.layer3):\n x, pre_list, layer_list, label_list = layer.obtain_pre_bn(x)\n pre_bn_list += pre_list\n layer_bn_list += layer_list\n label_bn_list += list(map(lambda x: 'layer3.%d.' % (idx + 1) + x, label_list))\n\n for idx, layer in enumerate(self.layer4):\n x, pre_list, layer_list, label_list = layer.obtain_pre_bn(x)\n pre_bn_list += pre_list\n layer_bn_list += layer_list\n label_bn_list += list(map(lambda x: 'layer4.%d.' % (idx + 1) + x, label_list))\n\n x = F.avg_pool2d(x, 4)\n x = x.view(-1, int(32 * self.width))\n x = self.classifier(x)\n\n return x, pre_bn_list, layer_bn_list, label_bn_list\n\n## Curve Model\nclass CurveModule(nn.Module):\n\n def __init__(self, param_names, fix_points):\n '''\n >>> param_names: list of string, parameter names\n >>> fix_points: list of boolean, whether the points is fixed\n '''\n\n super(CurveModule, self).__init__()\n self.param_names = param_names\n self.fix_points = fix_points\n self.num_bends = len(fix_points)\n\n def compute_point(self, coeffs):\n '''\n >>> coeffs: list of float, the weights of each points\n '''\n\n param_list = [0.,] * len(self.param_names)\n for param_idx, param_name in enumerate(self.param_names):\n for point_idx, coeff in enumerate(coeffs):\n param = self.__getattr__('%s_%d' % (param_name, point_idx))\n if param is not None:\n param_list[param_idx] += param * coeff\n else:\n param_list[param_idx] = None\n\n return param_list\n\n def init(self, mode = 'interp'):\n\n if mode.lower() in ['interp',]:\n assert self.fix_points[-1] == True and self.fix_points[0] == True\n assert sum(self.fix_points[1:-1]) == 0\n seg_num = len(self.fix_points) - 1\n for idx in range(1, seg_num):\n w1 = idx / seg_num\n w2 = 1. - w1\n for name in self.param_names:\n param = self.__getattr__('%s_%d' % (name, idx))\n if param is None:\n continue\n param.data = w1 * self.__getattr__('%s_%d' % (name, 0)).data + w2 * self.__getattr__('%s_%d' % (name, seg_num))\n else:\n raise ValueError('Unrecognized mode: %s' % mode)\n\n\nclass CurveLinear(CurveModule):\n\n def __init__(self, in_features, out_features, fix_points, bias = True):\n\n super(CurveLinear, self).__init__(param_names = ('weight', 'bias'), fix_points = fix_points)\n\n self.in_features = in_features\n self.out_features = out_features\n self.fix_points = fix_points\n self.bias = bias\n\n for point_idx, fix_point in enumerate(fix_points):\n\n self.register_parameter('weight_%d' % point_idx,\n nn.Parameter(torch.Tensor(self.out_features, self.in_features), requires_grad = not fix_point))\n self.register_parameter('bias_%d' % point_idx,\n None if not self.bias else nn.Parameter(torch.Tensor(self.out_features,), requires_grad = not fix_point))\n\n def forward(self, x, coeffs):\n\n weight, bias = self.compute_point(coeffs)\n return F.linear(x, weight, bias)\n\n def reset_parameters(self,):\n\n stdv = 1. / math.sqrt(self.in_features)\n for idx in range(self.num_bends):\n self.__getattr__('weight_%d' % idx).data.uniform_(-stdv, stdv)\n if self.__getattr__('bias_%d' % idx) is not None:\n self.__getattr__('bias_%d' % idx).data.uniform_(-stdv, stdv)\n\n def load_points(self, name, param, index):\n\n assert index < self.num_bends\n self.__getattr__('%s_%d' % (name, index)).data = param.data\n\nclass CurveConv2d(CurveModule):\n\n def __init__(self, in_channels, out_channels, kernel_size, fix_points, stride = 1,\n padding = 0, dilation = 1, groups = 1, bias = True):\n\n super(CurveConv2d, self).__init__(param_names = ('weight', 'bias'), fix_points = fix_points)\n\n self.in_channels = in_channels\n self.out_channels = out_channels\n self.kernel_size = kernel_size\n self.fix_points = fix_points\n self.stride = stride\n self.padding = padding\n self.dilation = dilation\n self.groups = groups\n self.bias = bias\n\n for point_idx, fix_point in enumerate(fix_points):\n\n self.register_parameter('weight_%d' % point_idx,\n nn.Parameter(torch.Tensor(self.out_channels, self.in_channels // groups, kernel_size, kernel_size), requires_grad = not fix_point))\n self.register_parameter('bias_%d' % point_idx,\n None if not self.bias else nn.Parameter(torch.Tensor(self.out_channels), requires_grad = not fix_point))\n\n def forward(self, x, coeffs):\n\n weight, bias = self.compute_point(coeffs)\n return F.conv2d(x, weight, bias, self.stride, self.padding, self.dilation, self.groups)\n\n def reset_parameters(self,):\n\n stdv = 1. / math.sqrt(self.in_channels * self.kernel_size ** 2)\n for idx in range(self.num_bends):\n self.__getattr__('weight_%d' % idx).data.uniform_(-stdv, stdv)\n if self.__getattr__('bias_%d' % idx) is not None:\n self.__getattr__('bias_%d' % idx).data.uniform_(-stdv, stdv)\n\n def load_points(self, name, param, index):\n\n assert index < self.num_bends\n self.__getattr__('%s_%d' % (name, index)).data = param.data\n\nclass CurveBatchNorm2d(CurveModule):\n\n def __init__(self, num_features, fix_points, eps = 1e-5, momentum = 0.1, affine = True, track_running_stats = True):\n\n super(CurveBatchNorm2d, self).__init__(param_names = ('weight', 'bias'), fix_points = fix_points)\n\n self.num_features = num_features\n self.fix_points = fix_points\n self.eps = eps\n self.momentum = momentum\n self.affine = affine\n self.track_running_stats = track_running_stats\n\n for point_idx, fix_point in enumerate(fix_points):\n\n self.register_parameter('weight_%d' % point_idx,\n None if not self.affine else nn.Parameter(torch.Tensor(self.num_features,), requires_grad = not fix_point))\n self.register_parameter('bias_%d' % point_idx,\n None if not self.affine else nn.Parameter(torch.Tensor(self.num_features,), requires_grad = not fix_point))\n\n self.register_buffer('running_mean', torch.zeros(self.num_features) if self.track_running_stats else None)\n self.register_buffer('running_var', torch.ones(self.num_features) if self.track_running_stats else None)\n self.register_buffer('num_batches_tracked', torch.tensor(0, dtype=torch.long) if self.track_running_stats else None)\n\n def forward(self, x, coeffs):\n\n if self.training and self.track_running_stats:\n self.num_batches_tracked += 1\n exponential_average_factor = 1. / self.num_batches_tracked.item() if self.momentum is None else self.momentum\n else:\n exponential_average_factor = 0.\n\n weight, bias = self.compute_point(coeffs)\n return F.batch_norm(x, self.running_mean, self.running_var, weight, bias,\n self.training or not self.track_running_stats, exponential_average_factor, self.eps)\n\n def reset_running_stats(self,):\n\n if self.track_running_stats:\n self.running_mean.zero_()\n self.running_var.fill_(1)\n self.num_batches_tracked.zero_()\n\n def reset_parameters(self,):\n\n self.reset_running_stats()\n if self.affine:\n for idx in range(self.num_bends):\n self.__getattr__('weight_%d' % idx).data.uniform_()\n self.__getattr__('bias_%d' % idx).data.zero_()\n\n def extra_repr(self,):\n\n return '{num_features}, eps={eps}, momentum={momentum}, affine={affine}, ' \\\n 'track_running_stats={track_running_stats}'.format(**self.__dict__)\n\n def _load_from_state_dict(self, state_dict, prefix, metadata, strict,\n missing_keys, unexpected_keys, error_msgs):\n\n version = metadata.get('version', None)\n\n if (version is None or version < 2) and self.track_running_stats:\n num_batches_tracked_key = prefix + 'num_batches_tracked'\n if num_batches_tracked_key not in state_dict:\n state_dict[num_batches_tracked_key] = torch.tensor(0, dtype=torch.long)\n\n super(CurveBatchNorm2d, self)._load_from_state_dict(\n state_dict, prefix, metadata, strict,\n missing_keys, unexpected_keys, error_msgs)\n\n def load_points(self, name, param, index):\n\n assert index < self.num_bends\n self.__getattr__('%s_%d' % (name, index)).data = param.data\n\nclass Curve_MNIST_LeNet(nn.Module):\n\n def __init__(self, fix_points, width = 1, bias = True):\n\n super(Curve_MNIST_LeNet, self).__init__()\n\n self.fix_points = fix_points\n self.num_bends = len(fix_points)\n self.width = width\n self.bias = bias\n self.num_bends = len(fix_points)\n\n print('Curve MNIST LeNet with width = %d, bias = %s' % (self.width, self.bias))\n\n self.conv1 = CurveConv2d(1, 2 * self.width, 5, fix_points, stride = 1, padding = 2, bias = self.bias)\n self.relu1 = nn.ReLU()\n self.conv2 = CurveConv2d(2 * self.width, 4 * self.width, 5, fix_points, stride = 1, padding = 2, bias = self.bias)\n self.relu2 = nn.ReLU()\n self.fc1 = CurveLinear(7 * 7 * 4 * self.width, 64 * self.width, fix_points, bias = self.bias)\n self.relu3 = nn.ReLU()\n self.fc2 = CurveLinear(64 * self.width, 10, fix_points)\n\n def forward(self, x, coeffs):\n\n x = mnist_normalizer(x)\n\n x = self.relu1(self.conv1(x, coeffs))\n x = F.max_pool2d(x, 2)\n x = self.relu2(self.conv2(x, coeffs))\n x = F.max_pool2d(x, 2)\n x = x.view(-1, 7 * 7 * 4 * self.width)\n x = self.relu3(self.fc1(x, coeffs))\n x = self.fc2(x, coeffs)\n\n return x\n\n def load_points(self, model, index):\n\n self.conv1.load_points('weight', model.conv1.weight, index)\n self.conv2.load_points('weight', model.conv2.weight, index)\n self.fc1.load_points('weight', model.fc1.weight, index)\n self.fc2.load_points('weight', model.fc2.weight, index)\n\n if self.bias == True:\n self.conv1.load_points('bias', model.conv1.bias, index)\n self.conv2.load_points('bias', model.conv2.bias, index)\n self.fc1.load_points('bias', model.fc1.bias, index)\n self.fc2.load_points('bias', model.fc2.bias, index)\n\n def init(self, mode = 'interp'):\n\n self.conv1.init(mode)\n self.conv2.init(mode)\n self.fc1.init(mode)\n self.fc2.init(mode)\n\nclass Curve_ResNet_Block(nn.Module):\n\n def __init__(self, fix_points, in_planes, out_planes, stride = 1):\n\n super(Curve_ResNet_Block, self).__init__()\n\n self.fix_points = fix_points\n self.in_planes = in_planes\n self.out_planes = out_planes\n self.stride = stride\n\n self.conv1 = CurveConv2d(in_planes, out_planes, 3, fix_points, stride = self.stride, padding = 1, bias = False)\n self.bn1 = CurveBatchNorm2d(out_planes, fix_points)\n self.relu1 = nn.ReLU()\n self.conv2 = CurveConv2d(out_planes, out_planes, 3, fix_points, stride = 1, padding = 1, bias = False)\n self.bn2 = CurveBatchNorm2d(out_planes, fix_points)\n self.relu2 = nn.ReLU()\n\n self.shortcut = None\n if self.stride != 1 or self.in_planes != self.out_planes:\n self.shortcut = nn.Sequential(\n CurveConv2d(in_planes, out_planes, 1, fix_points, stride = self.stride, bias = False),\n CurveBatchNorm2d(out_planes, fix_points)\n )\n\n def forward(self, x, coeffs):\n\n out = self.conv1(x, coeffs)\n out = self.relu1(self.bn1(out, coeffs))\n out = self.conv2(out, coeffs)\n out = self.bn2(out, coeffs)\n\n shortcut_out = x if self.shortcut is None else self.shortcut[1](self.shortcut[0](x, coeffs), coeffs)\n out = self.relu2(out + shortcut_out)\n\n return out\n\n def load_points(self, model, index):\n\n self.conv1.load_points('weight', model.conv1.weight, index)\n self.conv2.load_points('weight', model.conv2.weight, index)\n self.bn1.load_points('weight', model.bn1.weight, index)\n self.bn1.load_points('bias', model.bn1.bias, index)\n self.bn2.load_points('weight', model.bn2.weight, index)\n self.bn2.load_points('bias', model.bn2.bias, index)\n\n if self.shortcut is not None:\n self.shortcut[0].load_points('weight', model.shortcut[0].weight, index)\n self.shortcut[1].load_points('weight', model.shortcut[1].weight, index)\n self.shortcut[1].load_points('bias', model.shortcut[1].bias, index)\n\n def init(self, mode = 'interp'):\n\n self.conv1.init(mode)\n self.conv2.init(mode)\n self.bn1.init(mode)\n self.bn2.init(mode)\n\n if self.shortcut is not None:\n self.shortcut[0].init(mode)\n self.shortcut[1].init(mode)\n\nclass Curve_CIFAR10_ResNet(nn.Module):\n\n def __init__(self, fix_points, num_block_list = [2, 2, 2, 2], width = 1):\n\n super(Curve_CIFAR10_ResNet, self).__init__()\n\n self.width = width\n self.fix_points = fix_points\n self.num_block_list = num_block_list\n self.in_planes = int(4 * self.width)\n self.num_bends = len(fix_points)\n\n print('CIFAR10 ResNet: num_block_list = %s, width = %d' % (self.num_block_list, self.width))\n\n self.conv1 = CurveConv2d(3, 4 * self.width, 3, fix_points, stride = 1, padding = 1, bias = False)\n self.bn1 = CurveBatchNorm2d(4 * self.width, fix_points)\n self.relu1 = nn.ReLU()\n\n self.layer1 = self._make_layer(fix_points, out_planes = 4 * self.width, num_blocks = num_block_list[0], stride = 1)\n self.layer2 = self._make_layer(fix_points, out_planes = 8 * self.width, num_blocks = num_block_list[1], stride = 2)\n self.layer3 = self._make_layer(fix_points, out_planes = 16 * self.width, num_blocks = num_block_list[2], stride = 2)\n self.layer4 = self._make_layer(fix_points, out_planes = 32 * self.width, num_blocks = num_block_list[3], stride = 2)\n\n self.classifier = CurveLinear(32 * self.width, 10, fix_points)\n\n def _make_layer(self, fix_points, out_planes, num_blocks, stride):\n\n stride_list = [stride, ] + [1, ] * (num_blocks - 1)\n layers = []\n for stride in stride_list:\n layers.append(Curve_ResNet_Block(fix_points, self.in_planes, out_planes, stride = stride))\n self.in_planes = out_planes\n return nn.Sequential(*layers)\n\n def forward(self, x, coeffs):\n\n x = cifar10_normalizer(x)\n\n x = self.conv1(x, coeffs)\n x = self.relu1(self.bn1(x, coeffs))\n\n for layer in self.layer1:\n x = layer(x, coeffs)\n for layer in self.layer2:\n x = layer(x, coeffs)\n for layer in self.layer3:\n x = layer(x, coeffs)\n for layer in self.layer4:\n x = layer(x, coeffs)\n\n x = F.avg_pool2d(x, 4)\n x = x.view(-1, 32 * self.width)\n x = self.classifier(x, coeffs)\n\n return x\n\n def load_points(self, model, index):\n\n self.conv1.load_points('weight', model.conv1.weight, index)\n self.bn1.load_points('weight', model.bn1.weight, index)\n self.bn1.load_points('bias', model.bn1.bias, index)\n\n for layer, base_layer in zip(self.layer1, model.layer1):\n layer.load_points(base_layer, index)\n for layer, base_layer in zip(self.layer2, model.layer2):\n layer.load_points(base_layer, index)\n for layer, base_layer in zip(self.layer3, model.layer3):\n layer.load_points(base_layer, index)\n for layer, base_layer in zip(self.layer4, model.layer4):\n layer.load_points(base_layer, index)\n\n self.classifier.load_points('weight', model.classifier.weight, index)\n self.classifier.load_points('bias', model.classifier.bias, index)\n\n def init(self, mode = 'interp'):\n\n self.conv1.init(mode)\n self.bn1.init(mode)\n\n for layer in self.layer1:\n layer.init(mode)\n for layer in self.layer2:\n layer.init(mode)\n for layer in self.layer3:\n layer.init(mode)\n for layer in self.layer4:\n layer.init(mode)\n\n self.classifier.init(mode)\n"
},
{
"alpha_fraction": 0.6351132392883301,
"alphanum_fraction": 0.6443365812301636,
"avg_line_length": 40.20000076293945,
"blob_id": "028c63a3397efba31616a35052a7f8a190e72415",
"content_id": "cefc1b85ce8f5cd77eccab05d14a7aec538959bf",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6180,
"license_type": "permissive",
"max_line_length": 126,
"num_lines": 150,
"path": "/run/generate_adversary.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\nsys.path.insert(0, './')\nimport pickle\nimport argparse\nimport numpy as np\nfrom collections import OrderedDict\n\nimport torch\nimport torch.nn as nn\nfrom datetime import datetime\n\nfrom util.io import eigenvec2ckpt\nfrom util.attack import parse_attacker\nfrom util.evaluation import *\nfrom util.models import MNIST_LeNet, CIFAR10_ResNet\nfrom util.dataset import mnist, cifar10\nfrom util.device_parser import config_visible_gpu\nfrom util.param_parser import DictParser, ListParser, IntListParser, FloatListParser, BooleanParser\n\nfrom analysis.param_space_scan import generate_vec, param_scan\n\nif __name__ == '__main__':\n\n parser = argparse.ArgumentParser()\n\n parser.add_argument('--dataset', type = str, default = 'cifar10',\n help = 'The dataset used, default = \"cifar10\"')\n parser.add_argument('--batch_size', type = int, default = 100,\n help = 'The batch size, default = 100')\n\n parser.add_argument('--model_type', type = str, default = 'resnet',\n help = 'The model type, default = \"lenet\", supported = [\"lenet\", \"resnet\"]')\n parser.add_argument('--width', type = int, default = 8,\n help = 'The width of MNIST_LeNet, default = 8')\n parser.add_argument('--bias', action = BooleanParser, default = True,\n help = 'Whether or not use bias term, default = True')\n parser.add_argument('--model2load', type = str, default = None,\n help = 'The models to be loaded as the fix point, default = None')\n\n parser.add_argument('--out_file', type = str, default = None,\n help = 'The output file')\n\n parser.add_argument('--attack', action = DictParser, default = None,\n help = 'Play adversarial attack or not, default = None.')\n\n parser.add_argument('--gpu', type = str, default = None,\n help = 'Specify the GPU to use, default = None')\n\n args = parser.parse_args()\n\n # Configure GPU\n config_visible_gpu(args.gpu)\n use_gpu = args.gpu != 'cpu' and torch.cuda.is_available()\n device = torch.device('cuda:0' if use_gpu else 'cpu')\n\n # Parse IO\n out_folder = os.path.dirname(args.out_file)\n if out_folder != '' and not os.path.exists(out_folder):\n os.makedirs(out_folder)\n\n # Parse model\n if args.dataset.lower() in ['mnist',]:\n train_loader, test_loader, classes = mnist(batch_size = args.batch_size, shuffle = False, data_augmentation = False)\n assert args.model_type.lower() in ['lenet',], 'For MNIST, only LeNet is supported'\n model = MNIST_LeNet(width = args.width, bias = args.bias)\n elif args.dataset.lower() in ['cifar10',]:\n train_loader, test_loader, classes = cifar10(batch_size = args.batch_size, shuffle = False, data_augmentation = False)\n if args.model_type.lower() in ['lenet',]:\n model = CIFAR10_LeNet(width = args.width, bias = args.bias)\n elif args.model_type.lower() in ['vgg',]:\n model = CIFAR10_VGG(width = args.width, bias = args.bias)\n elif args.model_type.lower() in ['resnet',]:\n model = CIFAR10_ResNet(width = args.width)\n if args.bias == True:\n print('WARNING: ResNet18 does not have bias term in its layers.')\n else:\n raise ValueError('Invalid model_type: %s' % args.model_type)\n else:\n raise ValueError('Unrecognized dataset: %s' % args.dataset)\n model = model.cuda() if use_gpu else model\n criterion = nn.CrossEntropyLoss()\n criterion = criterion.cuda() if use_gpu else criterion\n assert os.path.exists(args.model2load), 'File %s does not exist!' % args.model2load\n ckpt2load = torch.load(args.model2load)\n model.load_state_dict(ckpt2load)\n\n # Parse the attacker\n attacker = None if args.attack == None else parse_attacker(**args.attack)\n\n # Prepare the item to save\n configs = {kwargs: value for kwargs, value in args._get_kwargs()}\n tosave = {'model_summary': str(model), 'setup_config': configs, 'train_adv': None, 'test_adv': None,\n 'log': {'cmd': 'python' + ' '.join(sys.argv), 'time': datetime.now().strftime('%Y/%m/%d, %H:%M:%S')}}\n\n # Generate adversary\n model.eval()\n train_adv = []\n test_adv = []\n\n acc_calculator = AverageCalculator()\n\n print('Scan the training set')\n acc_calculator.reset()\n for idx, (data_batch, label_batch) in enumerate(train_loader, 0):\n\n sys.stdout.write('Instance %d\\r' % idx)\n\n data_batch = data_batch.cuda(device) if use_gpu else data_batch\n label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n if attacker != None:\n optimizer = torch.optim.SGD(model.parameters(), lr = 1.)\n data_batch, label_batch = attacker.attack(model, optimizer, data_batch, label_batch, criterion)\n\n logits = model(data_batch)\n acc = accuracy(logits.data, label_batch)\n acc_calculator.update(acc.item(), data_batch.size(0))\n\n data_batch = data_batch.reshape(data_batch.size(0), -1)\n data_batch = data_batch.data.cpu().numpy()\n train_adv.append(data_batch)\n print('Train Accuracy: %.2f%%' % (acc_calculator.average * 100.))\n\n print('Scan the test set')\n acc_calculator.reset()\n for idx, (data_batch, label_batch) in enumerate(test_loader, 0):\n\n sys.stdout.write('Instance %d\\r' % idx)\n\n data_batch = data_batch.cuda(device) if use_gpu else data_batch\n label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n if attacker != None:\n optimizer = torch.optim.SGD(model.parameters(), lr = 1.)\n data_batch, label_batch = attacker.attack(model, optimizer, data_batch, label_batch, criterion)\n\n logits = model(data_batch)\n acc = accuracy(logits.data, label_batch)\n acc_calculator.update(acc.item(), data_batch.size(0))\n\n data_batch = data_batch.reshape(data_batch.size(0), -1)\n data_batch = data_batch.data.cpu().numpy()\n test_adv.append(data_batch)\n print('Test Accuracy: %.2f%%' % (acc_calculator.average * 100.))\n\n tosave['train_adv'] = np.concatenate(train_adv, axis = 0)\n tosave['test_adv'] = np.concatenate(test_adv, axis = 0)\n\n pickle.dump(tosave, open(args.out_file, 'wb'))\n"
},
{
"alpha_fraction": 0.5982189774513245,
"alphanum_fraction": 0.6068622469902039,
"avg_line_length": 36.05825424194336,
"blob_id": "b7a0177f856ce5060aa45d8e51abe08a6299def8",
"content_id": "1331c710ac2aaf3de4fbe0e8553e7852643d997e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3818,
"license_type": "permissive",
"max_line_length": 125,
"num_lines": 103,
"path": "/util/curves.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\nimport math\nimport json\nfrom scipy.special import comb\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom .evaluation import *\nfrom .utility import update_bn_curve\n\ndef poly_chain(t, pt_num):\n\n weight_list = [0,] * pt_num\n seg_index = int(t * (pt_num - 1)) % (pt_num - 1)\n\n weight_p2 = t * (pt_num - 1) - seg_index\n weight_p1 = 1. - weight_p2\n\n weight_list[seg_index] = weight_p1\n weight_list[seg_index] = weight_p2\n\n return weight_list\n\ndef bezier_curve(t, pt_num):\n\n return [(1. - t) ** (pt_num - 1 - idx) * t ** idx * comb(pt_num - 1, idx) for idx in range(pt_num)]\n\ndef curve_scan(model, curve_type, t_list, train_loader, test_loader, attacker,\n out_folder, model_name, device, criterion, tosave):\n\n use_gpu = device != torch.device('cpu') and torch.cuda.is_available()\n\n for t_idx, t in enumerate(t_list):\n\n if curve_type.lower() in ['poly_chain',]:\n coeffs = poly_chain(t, pt_num = model.num_bends)\n elif curve_type.lower() in ['bezier_curve', 'bezier']:\n coeffs = bezier_curve(t, pt_num = model.num_bends)\n else:\n raise ValueError('Unrecognized curve type: %s' % curve_type)\n\n model.train()\n update_bn_curve(model, coeffs, train_loader, attacker, criterion, use_gpu)\n model.eval()\n\n acc_calculator = AverageCalculator()\n loss_calculator = AverageCalculator()\n\n for idx, (data_batch, label_batch) in enumerate(train_loader, 0):\n\n sys.stdout.write('Instance Idx: %d\\r' % idx)\n\n data_batch = data_batch.cuda(device) if use_gpu else data_batch\n label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n if attacker != None:\n optimizer = torch.optim.SGD(model.parameters(), lr = 1.)\n data_batch, label_batch = attacker.attack_curve(model, optimizer, data_batch, label_batch, criterion, coeffs)\n\n logits = model(data_batch, coeffs)\n loss = criterion(logits, label_batch)\n acc = accuracy(logits.data, label_batch)\n\n loss_calculator.update(loss.item(), data_batch.size(0))\n acc_calculator.update(acc.item(), data_batch.size(0))\n\n loss_this_epoch = loss_calculator.average\n acc_this_epoch = acc_calculator.average\n print('Training Set: t = %.2f, loss = %.4f, acc = %.2f%%' % (t, loss_this_epoch, acc_this_epoch * 100.))\n tosave['train_loss'][t] = loss_this_epoch\n tosave['train_acc'][t] = acc_this_epoch\n\n acc_calculator.reset()\n loss_calculator.reset()\n\n for idx, (data_batch, label_batch) in enumerate(test_loader, 0):\n\n sys.stdout.write('Instance Idx: %d\\r' % idx)\n\n data_batch = data_batch.cuda(device) if use_gpu else data_batch\n label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n if attacker != None:\n optimizer = torch.optim.SGD(model.parameters(), lr = 1.)\n data_batch, label_batch = attacker.attack_curve(model, optimizer, data_batch, label_batch, criterion, coeffs)\n\n logits = model(data_batch, coeffs)\n loss = criterion(logits, label_batch)\n acc = accuracy(logits.data, label_batch)\n\n loss_calculator.update(loss.item(), data_batch.size(0))\n acc_calculator.update(acc.item(), data_batch.size(0))\n\n loss_this_epoch = loss_calculator.average\n acc_this_epoch = acc_calculator.average\n print('Test Set: t = %.2f, loss = %.4f, acc = %.2f%%' % (t, loss_this_epoch, acc_this_epoch * 100.))\n tosave['test_loss'][t] = loss_this_epoch\n tosave['test_acc'][t] = acc_this_epoch\n\n json.dump(tosave, open(os.path.join(out_folder, '%s.json' % model_name), 'w'))\n\n"
},
{
"alpha_fraction": 0.5843611359596252,
"alphanum_fraction": 0.5878145098686218,
"avg_line_length": 38.34951400756836,
"blob_id": "1858cbce5679ee599ee0cf226f03ea0547fe974b",
"content_id": "6796a4e35547b6e59c264a710548cac53e325929",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4054,
"license_type": "permissive",
"max_line_length": 144,
"num_lines": 103,
"path": "/util/hessian.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import sys\nimport math\nimport pickle\nimport numpy as np\n\nimport torch\nimport torch.nn as nn\n\nfrom .utility import group_add, group_product, group_normalize, get_param, get_param_grad\n\ndef calc_hessian_eigen_full_dataset(model, loader, criterion, tosave, out_file, use_gpu, attacker = None, topk = 1, max_iter = 50, tol = 1e-3):\n '''\n >>> calculate the top eigenvalues of model parameters\n\n >>> model: the model studied\n >>> criterion: the loss function used\n >>> use_gpu: Boolean, whether or not to use GPU\n >>> attacker: Attacker\n >>> topk: Int, the number of top eigenvalues and eigen vector calculated\n >>> max_iter: Int, the maximum iterations allowed\n >>> tol: float, the precision tolerence\n '''\n\n device = torch.device('cuda:0' if use_gpu == True else 'cpu')\n\n # Dataset\n data_batch_list = []\n label_batch_list = []\n batch_size = None\n for data_batch, label_batch in loader:\n data_batch = data_batch.cuda() if use_gpu else data_batch\n label_batch = label_batch.cuda() if use_gpu else label_batch\n if batch_size is None:\n batch_size = data_batch.size(0)\n if data_batch.size(0) < batch_size:\n continue\n if attacker != None:\n optim = torch.optim.SGD(model.parameters(), lr = 1.)\n data_batch, label_batch = attacker.attack(model, optim, data_batch, label_batch, criterion)\n data_batch_list.append(data_batch)\n label_batch_list.append(label_batch)\n\n eigenvalue_list = []\n eigenvec_list = []\n model.eval()\n for eigen_idx in range(topk):\n\n print('>>> Eigen Index: %d / %d' % (eigen_idx, topk))\n eigenvalue = None\n\n param_list = get_param(model)\n v_list = [torch.randn(p.size()).to(device) for p in param_list]\n v_list = group_normalize(v_list)\n\n for iter_idx in range(max_iter):\n\n if eigenvalue is None:\n print('Iter Index: %d / %d' % (iter_idx, max_iter))\n else:\n print('Iter Index: %d / %d --> %.4f' % (iter_idx, max_iter, eigenvalue))\n Hv_sum = [torch.zeros(p.size()).to(device) for p in param_list]\n counter = 0\n for idx, (data_batch, label_batch) in enumerate(zip(data_batch_list, label_batch_list)):\n\n sys.stdout.write('Instance Idx: %d\\r' % idx)\n model.zero_grad()\n logits = model(data_batch)\n loss = criterion(logits, label_batch)\n loss.backward(create_graph = True)\n\n param_list, grad_list = get_param_grad(model)\n Hv = torch.autograd.grad(grad_list, param_list, grad_outputs = v_list, only_inputs = True, retain_graph = False)\n\n Hv_sum = [Hv_sum_item + Hv_item for Hv_sum_item, Hv_item in zip(Hv_sum, Hv)]\n for value, vector in zip(eigenvalue_list, eigenvec_list):\n inner_prod = group_product(vector, v_list).data.cpu().item()\n Hv_sum = group_add(Hv_sum, 1., vector, - value * float(inner_prod))\n counter += 1\n\n eigenvalue_next = group_product(Hv_sum, v_list).data.cpu().item() / float(counter)\n v_list = group_normalize(Hv_sum)\n\n if eigenvalue != None and abs((eigenvalue_next - eigenvalue) / eigenvalue) < tol:\n break\n else:\n eigenvalue = eigenvalue_next\n print('')\n print('Eigenvalue %d = %.4f' % (eigen_idx, eigenvalue_next))\n eigenvalue_list.append(eigenvalue_next)\n eigenvec_list.append(v_list)\n\n # Convert to Numpy\n eigenvec_list_tosave = [None,] * len(eigenvec_list)\n for eigen_idx, eigenvec in enumerate(eigenvec_list):\n eigenvec_list_tosave[eigen_idx] = [v.data.cpu().numpy() for v in eigenvec]\n\n tosave['eigenvalue_list'] = eigenvalue_list\n tosave['eigenvec_list'] = eigenvec_list_tosave\n\n if out_file != None:\n pickle.dump(tosave, open(out_file, 'wb'))\n\n return eigenvalue_list, eigenvec_list\n\n"
},
{
"alpha_fraction": 0.6281659603118896,
"alphanum_fraction": 0.6427947878837585,
"avg_line_length": 28.535484313964844,
"blob_id": "4980101cfbdfd7314db37d97f29b3ba2f3ad5e11",
"content_id": "6127298add6758505aede68bfbaaa8bbc70ef093",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4580,
"license_type": "permissive",
"max_line_length": 121,
"num_lines": 155,
"path": "/util/utility.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import torch\nimport torch.nn as nn\n\nfrom collections import OrderedDict\n\nfrom .models import CurveBatchNorm2d\n\n## Tensor Operation\ndef group_add(x1_list, mul1, x2_list, mul2):\n '''\n >>> group summation: x1 * mul1 + x2 * mul2\n '''\n\n return [x1 * mul1 + x2 * mul2 for x1, x2 in zip(x1_list, x2_list)]\n\ndef group_product(x1_list, x2_list):\n '''\n >>> x1_list, x2_list: the list of tensors to be multiplied\n\n >>> group dot product\n '''\n\n return sum([torch.sum(x1 * x2) for x1, x2 in zip(x1_list, x2_list)])\n\ndef group_normalize(v_list):\n '''\n >>> normalize the tensor list to make them joint l2 norm be 1\n '''\n\n summation = group_product(v_list, v_list)\n summation = summation ** 0.5\n v_list = [v / (summation + 1e-6) for v in v_list]\n\n return v_list\n\ndef get_param(model):\n '''\n >>> return the parameter list\n '''\n\n return [param for param in model.parameters()]\n\ndef get_param_grad(model):\n '''\n >>> return the parameter and gradient list\n '''\n param_list = []\n grad_list = []\n for param in model.parameters():\n if param.grad is None:\n continue\n param_list.append(param)\n grad_list.append(param.grad)\n return param_list, grad_list\n\n## Model Operation\ndef distance_between_ckpts(ckpt1, ckpt2):\n '''\n >>> Calculate the distance ckpt2 - ckpt1\n '''\n\n assert len(ckpt1) == len(ckpt2), 'The length of ckpt1 should be the same as ckpt2'\n key_list = ckpt1.keys()\n\n distance_dict = OrderedDict()\n for key in key_list:\n param1 = ckpt1[key]\n param2 = ckpt2[key]\n distance_dict[key] = param2.data - param1.data\n\n return distance_dict\n\n## Update BN\ndef reset_bn_stats(module):\n if isinstance(module, (nn.BatchNorm2d, nn.BatchNorm1d)):\n module.reset_running_stats()\n\ndef get_bn_momenta(module, momenta):\n if isinstance(module, (nn.BatchNorm2d, nn.BatchNorm1d)):\n momenta[module] = module.momentum\n\ndef set_bn_momenta(module, momenta):\n if isinstance(module, (nn.BatchNorm2d, nn.BatchNorm1d)):\n module.momentum = momenta[module]\n\ndef update_bn(model, loader, attacker, criterion, use_gpu):\n\n device = torch.device('cpu' if not use_gpu else 'cuda:0')\n\n model.train()\n momenta = {}\n model.apply(reset_bn_stats)\n model.apply(lambda module: get_bn_momenta(module, momenta))\n instance_num = 0\n\n for idx, (data_batch, label_batch) in enumerate(loader, 0):\n\n data_batch = data_batch.cuda(device) if use_gpu else data_batch\n label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n if attacker != None:\n optimizer = torch.optim.SGD(model.parameters(), lr = 1.)\n data_batch, label_batch = attacker.attack(model, optimizer, data_batch, label_batch, criterion)\n\n batch_size = data_batch.data.size(0)\n momentum = batch_size / (instance_num + batch_size)\n for module in momenta.keys():\n module.momentum = momentum\n\n model(data_batch)\n instance_num += batch_size\n\n model.apply(lambda module: set_bn_momenta(module, momenta))\n\n## Update BN in Curve Model\ndef reset_bn_stats_curve(module):\n if isinstance(module, CurveBatchNorm2d):\n module.reset_running_stats()\n\ndef get_bn_momenta_curve(module, momenta):\n if isinstance(module, CurveBatchNorm2d):\n momenta[module] = module.momentum\n\ndef set_bn_momenta_curve(module, momenta):\n if isinstance(module, CurveBatchNorm2d):\n module.momentum = momenta[module] \n\ndef update_bn_curve(model, coeffs, loader, attacker, criterion, use_gpu):\n\n device = torch.device('cpu' if not use_gpu else 'cuda:0')\n\n model.train()\n momenta = {}\n model.apply(reset_bn_stats_curve)\n model.apply(lambda module: get_bn_momenta_curve(module, momenta))\n instance_num = 0\n\n for idx, (data_batch, label_batch) in enumerate(loader, 0):\n\n data_batch = data_batch.cuda(device) if use_gpu else data_batch\n label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n if attacker != None:\n optimizer = torch.optim.SGD(model.parameters(), lr = 1.)\n data_batch, label_batch = attacker.attack_curve(model, optimizer, data_batch, label_batch, criterion, coeffs)\n\n batch_size = data_batch.data.size(0)\n momentum = batch_size / (instance_num + batch_size)\n for module in momenta.keys():\n module.momentum = momentum\n\n model(data_batch, coeffs)\n instance_num += batch_size\n\n model.apply(lambda module: set_bn_momenta_curve(module, momenta))\n\n\n"
},
{
"alpha_fraction": 0.5634408593177795,
"alphanum_fraction": 0.5806451439857483,
"avg_line_length": 30.704545974731445,
"blob_id": "5eb0c6e1c7100cdb79ab98e6f80d0ad171800561",
"content_id": "e24a58c46deb52b88557d629fdd33a5d76d2735c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1395,
"license_type": "permissive",
"max_line_length": 88,
"num_lines": 44,
"path": "/util/evaluation.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import torch\nimport torch.nn as nn\n\nclass AverageCalculator(object):\n\n def __init__(self):\n self.reset()\n\n def reset(self):\n self.value = 0.\n self.sum = 0.\n self.count = 0.\n self.average = 0.\n\n def update(self, value, weight = 1):\n self.value = value\n self.sum += value * weight\n self.count += weight\n self.average = self.sum / self.count\n\ndef accuracy(logits, label_batch, topk = 1, show_full_list = False):\n '''\n >>> calculate the top k accuracy for a mini_batch\n '''\n maxk = max(topk) if isinstance(topk, (tuple, list)) else topk\n batch_size = label_batch.size(0)\n\n _, prediction = logits.topk(maxk, 1, True, True)\n prediction = prediction.t()\n\n correct_mask = prediction.eq(label_batch.view(1, -1).expand_as(prediction))\n\n if isinstance(topk, (list, tuple)):\n full_list = [correct_mask[:k].view(-1).float() for k in topk]\n if show_full_list == False:\n return [item.sum(0).mul_(1.0 / batch_size) for item in full_list]\n else:\n return [item.sum(0).mul_(1.0 / batch_size) for item in full_list], full_list\n else:\n full_list = correct_mask[:topk].view(-1).float()\n if show_full_list == False:\n return full_list.sum(0).mul_(1.0 / batch_size)\n else:\n return full_list.sum(0).mul_(1.0 / batch_size), full_list\n"
},
{
"alpha_fraction": 0.5891796350479126,
"alphanum_fraction": 0.6116960048675537,
"avg_line_length": 50.56989288330078,
"blob_id": "493e81b521066ec94e3679e45b3c7b1c526cd580",
"content_id": "cdf9de77195f62d2f59e077f8699bac1752bca85",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9593,
"license_type": "permissive",
"max_line_length": 180,
"num_lines": 186,
"path": "/util/param_scanner.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\nimport copy\nimport numpy as np\nfrom collections import OrderedDict\n\nimport torch\nimport torch.nn as nn\n\nfrom .evaluation import *\n\ndef generate_vec(model, mode = 'normalized', scale = 1., **kwargs):\n\n vec = OrderedDict()\n for name, param in model.named_parameters():\n\n if mode.lower() in ['random',]:\n vec[name] = torch.randn(param.shape, device = param.device) * scale\n elif mode.lower() in ['normalized',]:\n vec_init = torch.randn(param.shape, device = param.device).view(param.size(0), -1)\n filter_norms = param.view(param.size(0), -1).norm(p = 2, dim = 1, keepdim = True)\n vec_init = vec_init / vec_init.norm(p = 2, dim = 1, keepdim = True) * filter_norms\n vec[name] = vec_init.view(param.shape) * scale\n else:\n raise ValueError('Unrecognized mode: %s' % mode)\n\n return vec\n\ndef move_param(model, vec1, vec2, x1, x2):\n\n model_copy = copy.deepcopy(model)\n\n for (name, param), (name_copy, param_copy) in zip(model.named_parameters(), model_copy.named_parameters()):\n\n assert name == name_copy\n delta = vec1[name] * x1 if vec2 == None else vec1[name] * x1 + vec2[name] * x2\n param_copy.data = param.data + delta\n\n return model_copy\n\ndef param_scan_1d(model, device, attacker, loader, vec, min_pt, max_pt, step_pt, adv_calc_freq = 1):\n\n use_gpu = device != torch.device('cpu') and torch.cuda.is_available()\n criterion = nn.CrossEntropyLoss()\n criterion = criterion.cuda(device) if use_gpu else criterion\n\n x_list = np.arange(min_pt, max_pt + 1e-8, step_pt)\n x_len = len(x_list)\n x_adv_update = np.ceil(x_len / adv_calc_freq).__int__()\n\n acc_value_list = []\n loss_value_list = []\n\n for x_adv_idx in range(x_adv_update):\n\n print('scanning: (%d) in the grid of (%d)' % (x_adv_idx, x_adv_update))\n\n # Prepare the list of model as well as loss & accuracy calculators\n base_x_idx = x_adv_idx * adv_calc_freq\n model_num_this_group = min(adv_calc_freq, len(x_list) - base_x_idx)\n\n model_list = [move_param(model, vec, None, x_list[base_x_idx + x_idx], None) for x_idx in range(model_num_this_group)]\n acc_calc_list = [AverageCalculator() for _ in range(model_num_this_group)]\n loss_calc_list = [AverageCalculator() for _ in range(model_num_this_group)]\n\n for idx, (data_batch, label_batch) in enumerate(loader, 0):\n\n sys.stdout.write('Instance %d\\r' % idx)\n\n data_batch = data_batch.cuda(device) if use_gpu else data_batch\n label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n # Generate adversarial examples based on first model in each group\n optim = torch.optim.SGD(model_list[0].parameters(), lr = 1.)\n data_batch, label_batch = attacker.attack(model_list[0], optim, data_batch, label_batch, criterion)\n\n # Loss and Accuracy are calculated for each model\n logits_list = [model(data_batch) for model in model_list]\n loss_list = [criterion(logits, label_batch) for logits in logits_list]\n acc_list = [accuracy(logits.data, label_batch) for logits in logits_list]\n\n for _idx in range(model_num_this_group):\n loss_calc_list[_idx].update(loss_list[_idx].item(), data_batch.size(0))\n acc_calc_list[_idx].update(acc_list[_idx].item(), data_batch.size(0))\n\n acc_value_this_group = [calc.average for calc in acc_calc_list]\n loss_value_this_group = [calc.average for calc in loss_calc_list]\n\n acc_value_list = acc_value_list + acc_value_this_group\n loss_value_list = loss_value_list + loss_value_this_group\n\n return acc_value_list, loss_value_list\n\ndef param_scan_2d(model, device, attacker, loader, vec1, min_pt1, max_pt1, step_pt1, vec2, min_pt2, max_pt2, step_pt2, adv_calc_freq = 1):\n\n use_gpu = device != torch.device('cpu') and torch.cuda.is_available()\n criterion = nn.CrossEntropyLoss()\n criterion = criterion.cuda(device) if use_gpu else criterion\n\n x1_list = np.arange(min_pt1, max_pt1 + 1e-8, step_pt1)\n x2_list = np.arange(min_pt2, max_pt2 + 1e-8, step_pt2)\n x1_len = len(x1_list)\n x2_len = len(x2_list)\n x1_adv_update = np.ceil(x1_len / adv_calc_freq).__int__()\n x2_adv_update = np.ceil(x2_len / adv_calc_freq).__int__()\n\n acc_value_list = []\n loss_value_list = []\n\n for x1_adv_idx in range(x1_adv_update):\n\n base_x1_idx = x1_adv_idx * adv_calc_freq\n x1_num_this_group = min(adv_calc_freq, len(x1_list) - base_x1_idx)\n\n acc_value_list = acc_value_list + [[] for _ in range(x1_num_this_group)]\n loss_value_list = loss_value_list + [[] for _ in range(x1_num_this_group)]\n\n for x2_adv_idx in range(x2_adv_update):\n\n print('scanning: (%d, %d) in the grid of (%d, %d)' % (x1_adv_idx, x2_adv_idx, x1_adv_update, x2_adv_update))\n\n base_x2_idx = x2_adv_idx * adv_calc_freq\n x2_num_this_group = min(adv_calc_freq, len(x2_list) - base_x2_idx)\n\n # Prepare the list of model as well as loss & accuracy calculators\n model_list = [[move_param(model, vec1, vec2, x1_list[base_x1_idx + x1_idx], x2_list[base_x2_idx + x2_idx]) \\\n for x2_idx in range(x2_num_this_group)] for x1_idx in range(x1_num_this_group)]\n acc_calc_list = [[AverageCalculator() for _2 in range(x2_num_this_group)] for _1 in range(x1_num_this_group)]\n loss_calc_list = [[AverageCalculator() for _2 in range(x2_num_this_group)] for _1 in range(x1_num_this_group)]\n\n for idx, (data_batch, label_batch) in enumerate(loader, 0):\n\n sys.stdout.write('Instance %d\\r' % idx)\n\n data_batch = data_batch.cuda(device) if use_gpu else data_batch\n label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n # Generate adversarial examples based on the first model in each group\n optim = torch.optim.SGD(model_list[0][0].parameters(), lr = 1.)\n data_batch, label_batch = attacker.attack(model_list[0][0], optim, data_batch, label_batch, criterion)\n\n logits_list = [[model_list[_1][_2](data_batch) for _2 in range(x2_num_this_group)] for _1 in range(x1_num_this_group)]\n loss_list = [[criterion(logits_list[_1][_2], label_batch) for _2 in range(x2_num_this_group)] for _1 in range(x1_num_this_group)]\n acc_list = [[accuracy(logits_list[_1][_2].data, label_batch) for _2 in range(x2_num_this_group)] for _1 in range(x1_num_this_group)]\n\n for _1 in range(x1_num_this_group):\n for _2 in range(x2_num_this_group):\n loss_calc_list[_1][_2].update(loss_list[_1][_2].item(), data_batch.size(0))\n acc_calc_list[_1][_2].update(acc_list[_1][_2].item(), data_batch.size(0))\n\n acc_value_this_group = [[acc_calc_list[_1][_2].average for _2 in range(x2_num_this_group)] for _1 in range(x1_num_this_group)]\n loss_value_this_group = [[loss_calc_list[_1][_2].average for _2 in range(x2_num_this_group)] for _1 in range(x1_num_this_group)]\n\n for _1 in range(x1_num_this_group):\n acc_value_list[base_x1_idx + _1] = acc_value_list[base_x1_idx + _1] + acc_value_this_group[_1]\n loss_value_list[base_x1_idx + _1] = loss_value_list[base_x1_idx + _1] + loss_value_this_group[_1]\n\n return acc_value_list, loss_value_list\n\ndef param_scan(model, device, attacker, loader, adv_budget_list, vec1, vec2, vec1_scan, vec2_scan, tosave):\n\n for adv_budget in adv_budget_list:\n\n # Update attacker\n attacker.adjust_threshold(adv_budget)\n\n print('Test under adversarial budget %.3f' % adv_budget)\n print('theshold = %.3f, step_size = %.3f' % (attacker.threshold, attacker.step_size))\n\n if vec2 == None: # 1d scan\n min_pt, max_pt, step_pt, adv_calc_freq = float(vec1_scan['min']), float(vec1_scan['max']), float(vec1_scan['step']), int(vec1_scan['adv_calc_freq'])\n acc_value_list, loss_value_list = param_scan_1d(model = model, device = device, attacker = attacker, loader = loader,\n vec = vec1, min_pt = min_pt, max_pt = max_pt, step_pt = step_pt, adv_calc_freq = adv_calc_freq)\n tosave['results'][adv_budget]['acc_value_list'] = acc_value_list\n tosave['results'][adv_budget]['loss_value_list'] = loss_value_list\n else: # 2d scan\n min_pt1, max_pt1, step_pt1, adv_calc_freq1 = float(vec1_scan['min']), float(vec1_scan['max']), float(vec1_scan['step']), int(vec1_scan['adv_calc_freq'])\n min_pt2, max_pt2, step_pt2, adv_calc_freq2 = float(vec2_scan['min']), float(vec2_scan['max']), float(vec2_scan['step']), int(vec2_scan['adv_calc_freq'])\n assert adv_calc_freq1 == adv_calc_freq2, 'adv_calc_freq should match in both dimensions, but they are %d and %d respectively' % (adv_calc_freq1, adv_calc_freq2)\n adv_calc_freq = adv_calc_freq1\n acc_value_list, loss_value_list = param_scan_2d(model = model, device = device, attacker = attacker, loader = loader, vec1 = vec1, min_pt1 = min_pt1, max_pt1 = max_pt1,\n step_pt1 = step_pt1, vec2 = vec2, min_pt2 = min_pt2, max_pt2 = max_pt2, step_pt2 = step_pt2, adv_calc_freq = adv_calc_freq)\n tosave['results'][adv_budget]['acc_value_list'] = acc_value_list\n tosave['results'][adv_budget]['loss_value_list'] = loss_value_list\n\n return tosave\n\n"
},
{
"alpha_fraction": 0.6112087965011597,
"alphanum_fraction": 0.6175091862678528,
"avg_line_length": 42.05678176879883,
"blob_id": "b94464fa4ba2b2d2241f763ae17c8b8d8aeaea45",
"content_id": "46e0bacc40f39d83115c4e4320c5b58a97db1037",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 13650,
"license_type": "permissive",
"max_line_length": 133,
"num_lines": 317,
"path": "/util/train.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport json\nimport pickle\nimport numpy as np\n\nimport torch\nimport torch.nn as nn\n\nfrom .models import *\nfrom .attack import *\nfrom .evaluation import *\nfrom .curves import poly_chain, bezier_curve\n\ndef vanilla_train(model, train_loader, test_loader, attacker, epoch_num, epoch_ckpts, train_batches, optimizer, lr_func, eps_func,\n schedule_update_mode, out_folder, model_name, device, criterion, tosave, mask, **tricks):\n\n use_gpu = device != torch.device('cpu') and torch.cuda.is_available()\n\n acc_calculator = AverageCalculator()\n loss_calculator = AverageCalculator()\n\n for epoch_idx in range(epoch_num):\n\n acc_calculator.reset()\n loss_calculator.reset()\n\n model.train()\n for idx, (data_batch, label_batch) in enumerate(train_loader, 0):\n\n sys.stdout.write('Instance Idx: %d\\r' % idx)\n epoch_batch_idx = epoch_idx + 1. / train_batches * idx if schedule_update_mode.lower() in ['batch',] else epoch_idx\n\n # Update the learning rate\n lr_this_batch = lr_func(epoch_batch_idx)\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr_this_batch\n if idx == 0:\n print('Learning rate = %1.2e' % lr_this_batch)\n\n data_batch = data_batch.cuda(device) if use_gpu else data_batch\n label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n if attacker != None:\n if eps_func != None and (schedule_update_mode.lower() in ['batch'] or idx == 0):\n next_threshold = eps_func(epoch_batch_idx)\n attacker.adjust_threshold(next_threshold)\n model.eval()\n data_batch, label_batch = attacker.attack(model, optimizer, data_batch, label_batch, criterion)\n model.train()\n\n logits = model(data_batch)\n loss = criterion(logits, label_batch)\n acc = accuracy(logits.data, label_batch)\n optimizer.zero_grad()\n loss.backward()\n if mask != None:\n for n, p in model.named_parameters():\n if p.grad is not None and n in mask:\n p.grad.data = p.grad.data * mask[n].to(p.device)\n optimizer.step()\n optimizer.zero_grad()\n\n loss_calculator.update(loss.item(), data_batch.size(0))\n acc_calculator.update(acc.item(), data_batch.size(0))\n\n loss_this_epoch = loss_calculator.average\n acc_this_epoch = acc_calculator.average\n print('Train loss / acc after epoch %d: %.4f / %.2f%%' % (epoch_idx, loss_this_epoch, acc_this_epoch * 100.))\n tosave['train_loss'][epoch_idx] = loss_this_epoch\n tosave['train_acc'][epoch_idx] = acc_this_epoch\n\n loss_calculator.reset()\n acc_calculator.reset()\n\n model.eval()\n for idx, (data_batch, label_batch) in enumerate(test_loader, 0):\n\n sys.stdout.write('Instance Idx: %d\\r' % idx)\n\n data_batch = data_batch.cuda(device) if use_gpu else data_batch\n label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n if attacker != None:\n data_batch, label_batch = attacker.attack(model, optimizer, data_batch, label_batch, criterion)\n\n logits = model(data_batch)\n loss = criterion(logits, label_batch)\n acc = accuracy(logits.data, label_batch)\n\n loss_calculator.update(loss.item(), data_batch.size(0))\n acc_calculator.update(acc.item(), data_batch.size(0))\n\n loss_this_epoch = loss_calculator.average\n acc_this_epoch = acc_calculator.average\n print('Test loss / acc after epoch %d: %.4f / %.2f%%' % (epoch_idx, loss_this_epoch, acc_this_epoch * 100.))\n tosave['test_loss'][epoch_idx] = loss_this_epoch\n tosave['test_acc'][epoch_idx] = acc_this_epoch\n\n json.dump(tosave, open(os.path.join(out_folder, '%s.json' % model_name), 'w'))\n if (epoch_idx + 1) in epoch_ckpts:\n torch.save(model.state_dict(), os.path.join(out_folder, '%s_%d.ckpt' % (model_name, epoch_idx + 1)))\n\n torch.save(model.state_dict(), os.path.join(out_folder, '%s.ckpt' % model_name))\n\n return model, tosave\n\ndef attack(model, loader, attacker, optimizer, out_file, device, criterion, tosave, **tricks):\n\n use_gpu = device != torch.device('cpu') and torch.cuda.is_available()\n\n clean_acc_calculator = AverageCalculator()\n clean_loss_calculator = AverageCalculator()\n adv_acc_calculator = AverageCalculator()\n adv_loss_calculator = AverageCalculator()\n\n model.eval()\n for idx, (data_batch, label_batch) in enumerate(loader, 0):\n\n sys.stdout.write('Instance Idx: %d\\r' % idx)\n\n clean_data_batch = data_batch.cuda(device) if use_gpu else data_batch\n clean_label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n adv_data_batch, adv_label_batch = attacker.attack(model, optimizer, clean_data_batch, clean_label_batch, criterion)\n\n clean_logits = model(clean_data_batch)\n clean_loss = criterion(clean_logits, clean_label_batch)\n clean_acc = accuracy(clean_logits.data, clean_label_batch)\n\n adv_logits = model(adv_data_batch)\n adv_loss = criterion(adv_logits, adv_label_batch)\n adv_acc = accuracy(adv_logits.data, adv_label_batch)\n\n clean_acc_calculator.update(clean_acc.item(), clean_data_batch.size(0))\n clean_loss_calculator.update(clean_loss.item(), clean_data_batch.size(0))\n adv_acc_calculator.update(adv_acc.item(), adv_data_batch.size(0))\n adv_loss_calculator.update(adv_loss.item(), adv_data_batch.size(0))\n\n clean_acc_this_epoch = clean_acc_calculator.average\n clean_loss_this_epoch = clean_loss_calculator.average\n adv_acc_this_epoch = adv_acc_calculator.average\n adv_loss_this_epoch = adv_loss_calculator.average\n\n print('Clean loss / acc: %.4f / %.2f%%' % (clean_loss_this_epoch, clean_acc_this_epoch * 100.))\n print('Adversarial loss / acc: %.4f / %.2f%%' % (adv_loss_this_epoch, adv_acc_this_epoch * 100.))\n\n tosave['clean_acc'] = clean_acc_this_epoch\n tosave['clean_loss'] = clean_loss_this_epoch\n tosave['adv_acc'] = adv_acc_this_epoch\n tosave['adv_loss'] = adv_loss_this_epoch\n\n if out_file != None:\n json.dump(tosave, open(out_file, 'w'))\n\n return clean_acc_this_epoch, clean_loss_this_epoch, adv_acc_this_epoch, adv_loss_this_epoch\n\ndef attack_list(model_list, loader, attacker, optimizer, out_file, device, criterion, tosave, **tricks):\n\n use_gpu = device != torch.device('cpu') and torch.cuda.is_available()\n\n clean_acc_calculator = AverageCalculator()\n clean_loss_calculator = AverageCalculator()\n adv_acc_calculator = AverageCalculator()\n adv_loss_calculator = AverageCalculator()\n\n for model in model_list:\n model.eval()\n\n for idx, (data_batch, label_batch) in enumerate(loader, 0):\n\n sys.stdout.write('Instance Idx: %d\\r' % idx)\n\n clean_data_batch = data_batch.cuda(device) if use_gpu else data_batch\n clean_label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n adv_data_batch, adv_label_batch = attacker.attack_list(model_list, optimizer, clean_data_batch, clean_label_batch, criterion)\n\n clean_prob = 0.\n adv_prob = 0.\n for model in model_list:\n clean_logits_this_model = model(clean_data_batch)\n clean_prob_this_model = F.softmax(clean_logits_this_model)\n clean_prob = clean_prob + clean_prob_this_model\n adv_logits_this_model = model(adv_data_batch)\n adv_prob_this_model = F.softmax(adv_logits_this_model)\n adv_prob = adv_prob + adv_prob_this_model\n clean_prob = clean_prob / len(model_list)\n _, clean_prediction = clean_prob.max(dim = 1)\n adv_prob = adv_prob / len(model_list)\n _, adv_prediction = adv_prob.max(dim = 1)\n\n clean_loss = - torch.log(clean_prob).gather(dim = 1, index = clean_label_batch.view(-1, 1)).view(-1).mean()\n adv_loss = - torch.log(adv_prob).gather(dim = 1, index = adv_label_batch.view(-1, 1)).view(-1).mean()\n clean_acc = (clean_prediction == clean_label_batch).float().mean()\n adv_acc = (adv_prediction == adv_label_batch).float().mean()\n\n clean_acc_calculator.update(clean_acc.item(), clean_data_batch.size(0))\n clean_loss_calculator.update(clean_loss.item(), clean_data_batch.size(0))\n adv_acc_calculator.update(adv_acc.item(), adv_data_batch.size(0))\n adv_loss_calculator.update(adv_loss.item(), adv_data_batch.size(0))\n\n clean_acc_this_epoch = clean_acc_calculator.average\n clean_loss_this_epoch = clean_loss_calculator.average\n adv_acc_this_epoch = adv_acc_calculator.average\n adv_loss_this_epoch = adv_loss_calculator.average\n\n print('Clean loss / acc: %.4f / %.2f%%' % (clean_loss_this_epoch, clean_acc_this_epoch * 100.))\n print('Adversarial loss / acc: %.4f / %.2f%%' % (adv_loss_this_epoch, adv_acc_this_epoch * 100.))\n\n tosave['clean_acc'] = clean_acc_this_epoch\n tosave['clean_loss'] = clean_loss_this_epoch\n tosave['adv_acc'] = adv_acc_this_epoch\n tosave['adv_loss'] = adv_loss_this_epoch\n\n if out_file != None:\n json.dump(tosave, open(out_file, 'w'))\n\n return clean_acc_this_epoch, clean_loss_this_epoch, adv_acc_this_epoch, adv_loss_this_epoch\n\ndef curve_train(model, curve_type, train_loader, test_loader, train_batches, attacker, epoch_num, optimizer, lr_func,\n out_folder, model_name, device, criterion, tosave, **tricks):\n\n use_gpu = device != torch.device('cpu') and torch.cuda.is_available()\n\n acc_calculator = AverageCalculator()\n loss_calculator = AverageCalculator()\n\n for epoch_idx in range(epoch_num):\n\n acc_calculator.reset()\n loss_calculator.reset()\n\n model.train()\n for idx, (data_batch, label_batch) in enumerate(train_loader, 0):\n\n sys.stdout.write('Instance Idx: %d\\r' % idx)\n\n # Update the learning rate\n if lr_func != None:\n epoch_batch_idx = epoch_idx + 1. / train_batches * idx\n lr_this_batch = lr_func(epoch_batch_idx)\n for param_group in optimizer.param_groups:\n param_group['lr'] = lr_this_batch\n\n data_batch = data_batch.cuda(device) if use_gpu else data_batch\n label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n # Generate coeffs\n t = np.random.uniform(0, 1)\n if curve_type.lower() in ['poly_chain',]:\n coeffs = poly_chain(t, pt_num = model.num_bends)\n elif curve_type.lower() in ['bezier_curve', 'bezier']:\n coeffs = bezier_curve(t, pt_num = model.num_bends)\n else:\n raise ValueError('Unrecognized curve type: %s' % curve_type)\n\n # Attack\n if attacker != None:\n data_batch, label_batch = attacker.attack_curve(model, optimizer, data_batch, label_batch, criterion, coeffs)\n\n logits = model(data_batch, coeffs)\n loss = criterion(logits, label_batch)\n acc = accuracy(logits.data, label_batch)\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n optimizer.zero_grad()\n\n loss_calculator.update(loss.item(), data_batch.size(0))\n acc_calculator.update(acc.item(), data_batch.size(0))\n\n loss_this_epoch = loss_calculator.average\n acc_this_epoch = acc_calculator.average\n print('Train loss / acc after epoch %d: %.4f / %.2f%%' % (epoch_idx, loss_this_epoch, acc_this_epoch * 100.))\n tosave['train_loss'][epoch_idx] = loss_this_epoch\n tosave['train_acc'][epoch_idx] = acc_this_epoch\n\n loss_calculator.reset()\n acc_calculator.reset()\n\n for idx, (data_batch, label_batch) in enumerate(test_loader, 0):\n\n sys.stdout.write('Instance Idx: %d\\r' % idx)\n\n data_batch = data_batch.cuda(device) if use_gpu else data_batch\n label_batch = label_batch.cuda(device) if use_gpu else label_batch\n\n t = np.random.uniform(0, 1)\n if curve_type.lower() in ['poly_chain',]:\n coeffs = poly_chain(t, pt_num = model.num_bends)\n elif curve_type.lower() in ['bezier_curve', 'bezier']:\n coeffs = bezier_curve(t, pt_num = model.num_bends)\n else:\n raise ValueError('Unrecognized curve type: %s' % curve_type)\n\n if attacker != None:\n data_batch, label_batch = attacker.attack_curve(model, optimizer, data_batch, label_batch, criterion, coeffs)\n\n logits = model(data_batch, coeffs)\n loss = criterion(logits, label_batch)\n acc = accuracy(logits.data, label_batch)\n\n loss_calculator.update(loss.item(), data_batch.size(0))\n acc_calculator.update(acc.item(), data_batch.size(0))\n\n loss_this_epoch = loss_calculator.average\n acc_this_epoch = acc_calculator.average\n print('Test loss / acc after epoch %d: %.4f / %.2f%%' % (epoch_idx, loss_this_epoch, acc_this_epoch * 100.))\n tosave['test_loss'][epoch_idx] = loss_this_epoch\n tosave['test_acc'][epoch_idx] = acc_this_epoch\n\n json.dump(tosave, open(os.path.join(out_folder, '%s.json' % model_name), 'w'))\n\n json.dump(tosave, open(os.path.join(out_folder, '%s.json' % model_name), 'w'))\n torch.save(model.state_dict(), os.path.join(out_folder, '%s.ckpt' % model_name))\n\n return model, tosave\n\n"
},
{
"alpha_fraction": 0.569466233253479,
"alphanum_fraction": 0.569858729839325,
"avg_line_length": 29.698795318603516,
"blob_id": "b9bb141ece9edc97ea7557304d3e9b266cbfedd6",
"content_id": "084819ebc04e5279818a67aecbdb7cafee2e0e14",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2548,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 83,
"path": "/util/param_parser.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "# some extra parameter parsers\n\nimport argparse\n\nclass DictParser(argparse.Action):\n\n def __init__(self, *args, **kwargs):\n\n super(DictParser, self).__init__(*args, **kwargs)\n self.local_dict = {}\n\n def __call__(self, parser, namespace, values, option_string = None):\n\n try:\n for kv in values.split(','):\n k, v = kv.split('=')\n try:\n self.local_dict[k] = float(v)\n except:\n self.local_dict[k] = v\n setattr(namespace, self.dest, self.local_dict)\n except:\n raise ValueError('Failed when parsing %s as dict' % values)\n\nclass ListParser(argparse.Action):\n\n def __init__(self, * args, **kwargs):\n\n super(ListParser, self).__init__(*args, **kwargs)\n self.local_list = []\n\n def __call__(self, parser, namespace, values, option_string = None):\n\n try:\n self.local_list = values.split(',')\n setattr(namespace, self.dest, self.local_list)\n except:\n raise ValueError('Failed when parsing %s as str list' % values)\n\nclass IntListParser(argparse.Action):\n\n def __init__(self, *args, **kwargs):\n\n super(IntListParser, self).__init__(*args, **kwargs)\n self.local_list = []\n\n def __call__(self, parser, namespace, values, option_string = None):\n\n try:\n self.local_list = list(map(int, values.split(',')))\n setattr(namespace, self.dest, self.local_list)\n except:\n raise ValueError('Failed when parsing %s as int list' % values)\n\nclass FloatListParser(argparse.Action):\n\n def __init__(self, *args, **kwargs):\n\n super(FloatListParser, self).__init__(*args, **kwargs)\n self.local_list = []\n\n def __call__(self, parser, namespace, values, option_string = None):\n\n try:\n self.local_list = list(map(float, values.split(',')))\n setattr(namespace, self.dest, self.local_list)\n except:\n raise ValueError('Failed when parsing %s as float list' % values)\n\nclass BooleanParser(argparse.Action):\n\n def __init__(self, *args, **kwargs):\n\n super(BooleanParser, self).__init__(*args, **kwargs)\n self.values = None\n\n def __call__(self, parser, namespace, values, option_string = None):\n\n try:\n self.values = False if int(values) == 0 else True\n setattr(namespace, self.dest, self.values)\n except:\n raise ValueError('Failed when parsing %s as boolean list' % values)\n"
},
{
"alpha_fraction": 0.6308582425117493,
"alphanum_fraction": 0.6395371556282043,
"avg_line_length": 42.93220520019531,
"blob_id": "14bcdd89aadea1c48c39a17f810bd3a3527794d6",
"content_id": "d2c9306f693fad408e535175141f079555c82c74",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5185,
"license_type": "permissive",
"max_line_length": 125,
"num_lines": 118,
"path": "/run/perturb_param.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\nsys.path.insert(0, './')\nimport json\nimport argparse\nimport numpy as np\nfrom collections import OrderedDict\n\nimport torch\nimport torch.nn as nn\nfrom datetime import datetime\n\nfrom util.io import eigenvec2ckpt\nfrom util.models import MNIST_LeNet, CIFAR10_ResNet\nfrom util.dataset import mnist, cifar10\nfrom util.device_parser import config_visible_gpu\nfrom util.param_parser import DictParser, ListParser, IntListParser, FloatListParser, BooleanParser\n\nfrom analysis.param_space_scan import generate_vec, param_scan\n\nif __name__ == '__main__':\n\n parser = argparse.ArgumentParser()\n\n parser.add_argument('--dataset', type = str, default = 'cifar10',\n help = 'The dataset used, default = \"cifar10\"')\n parser.add_argument('--model_type', type = str, default = 'resnet',\n help = 'The model type, default = \"lenet\", supported = [\"lenet\", \"resnet\"]')\n parser.add_argument('--width', type = int, default = 8,\n help = 'The width of MNIST_LeNet, default = 8')\n parser.add_argument('--bias', action = BooleanParser, default = True,\n help = 'Whether or not use bias term, default = True')\n parser.add_argument('--model2load', type = str, default = None,\n help = 'The models to be loaded as the fix point, default = None')\n\n parser.add_argument('--out_folder', type = str, default = None,\n help = 'The output folder')\n parser.add_argument('--model_name', type = str, default = None,\n help = 'The name of the model')\n\n parser.add_argument('--vec2load', type = str, default = None,\n help = 'The vector perturbation to be loaded')\n parser.add_argument('--eigenvec_idx', type = int, default = 0,\n help = 'The index of eigenvector used, default = 0')\n parser.add_argument('--vec_scale', type = float, default = 1,\n help = 'The scale of direction vector, default = 1')\n\n parser.add_argument('--gpu', type = str, default = None,\n help = 'Specify the GPU to use, default = None')\n\n args = parser.parse_args()\n\n # Configure GPU\n config_visible_gpu(args.gpu)\n use_gpu = args.gpu != 'cpu' and torch.cuda.is_available()\n device = torch.device('cuda:0' if use_gpu else 'cpu')\n\n # Parse IO\n if args.out_folder != None and os.path.exists(args.out_folder) == False:\n os.makedirs(args.out_folder)\n\n # Parse model\n if args.dataset.lower() in ['mnist',]:\n assert args.model_type.lower() in ['lenet',], 'For MNIST, only LeNet is supported'\n model = MNIST_LeNet(width = args.width, bias = args.bias)\n elif args.dataset.lower() in ['cifar10',]:\n if args.model_type.lower() in ['lenet',]:\n model = CIFAR10_LeNet(width = args.width, bias = args.bias)\n elif args.model_type.lower() in ['vgg',]:\n model = CIFAR10_VGG(width = args.width, bias = args.bias)\n elif args.model_type.lower() in ['resnet',]:\n model = CIFAR10_ResNet(width = args.width)\n if args.bias == True:\n print('WARNING: ResNet18 does not have bias term in its layers.')\n else:\n raise ValueError('Invalid model_type: %s' % args.model_type)\n else:\n raise ValueError('Unrecognized dataset: %s' % args.dataset)\n model = model.cuda() if use_gpu else model\n criterion = nn.CrossEntropyLoss()\n criterion = criterion.cuda() if use_gpu else criterion\n assert os.path.exists(args.model2load), 'File %s does not exist!' % args.model2load\n ckpt2load = torch.load(args.model2load)\n model.load_state_dict(ckpt2load)\n\n # Parse vectors\n if args.vec2load is None:\n vec = OrderedDict({name: torch.randn_like(param) for name, param in model.named_parameters()})\n summation = sum([torch.sum(vec[key] * vec[key]) for key in vec]) ** 0.5\n vec = OrderedDict({key: vec[key] / (summation + 1e-6) for key in vec})\n else:\n if args.vec2load.endswith('ckpt'):\n vec = torch.load(args.vec2load)\n elif args.vec2load.endswith('pkl'):\n vec = eigenvec2ckpt(model = model, eigen_info_file = args.vec2load, index = args.eigenvec_idx, use_gpu = use_gpu)\n else:\n raise ValueError('Unrecognized format: %s' % args.vec2load)\n\n # Prepare the item to save\n configs = {kwargs: value for kwargs, value in args._get_kwargs()}\n tosave = {'model_summary': str(model), 'setup_config': configs,\n 'log': {'cmd': 'python ' + ' '.join(sys.argv), 'time': datetime.now().strftime('%Y/%m/%d, %H:%M:%S')}}\n\n for param in list(sorted(tosave['setup_config'].keys())):\n print('%s\\t=>%s' % (param, tosave['setup_config'][param]))\n\n ori_dict = OrderedDict({name: param for name, param in model.named_parameters()})\n vec_dict = vec\n new_dict = OrderedDict()\n\n assert len(ori_dict.keys()) == len(vec_dict.keys()), 'The length of both dictionaries must be the same.'\n tosave_dict = model.state_dict()\n\n for name in vec_dict:\n tosave_dict[name] = ori_dict[name] + vec_dict[name] * args.vec_scale\n\n json.dump(tosave, open(os.path.join(args.out_folder, '%s.json' % args.model_name), 'w'))\n torch.save(tosave_dict, os.path.join(args.out_folder, '%s.ckpt' % args.model_name))\n\n"
},
{
"alpha_fraction": 0.6248027086257935,
"alphanum_fraction": 0.6338218450546265,
"avg_line_length": 42.910892486572266,
"blob_id": "2c659ab07713ab72b58306a23688500f72103ddd",
"content_id": "cabdba5c2998d98de109ae84a88def22e0655c1c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4435,
"license_type": "permissive",
"max_line_length": 143,
"num_lines": 101,
"path": "/run/test_ensemble.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\nsys.path.insert(0, './')\nimport argparse\nimport numpy as np\n\nimport torch\nimport torch.nn as nn\nfrom datetime import datetime\n\nfrom util.attack import parse_attacker\nfrom util.models import MNIST_LeNet, CIFAR10_LeNet, CIFAR10_VGG, CIFAR10_ResNet\nfrom util.train import attack_list\nfrom util.dataset import mnist, cifar10\nfrom util.device_parser import config_visible_gpu\nfrom util.param_parser import DictParser, ListParser, FloatListParser, BooleanParser\n\nif __name__ == '__main__':\n\n parser = argparse.ArgumentParser()\n\n parser.add_argument('--dataset', type = str, default = 'cifar10',\n help = 'The dataset used, default = \"cifar10\"')\n parser.add_argument('--batch_size', type = int, default = 128,\n help = 'The batch size, default = 128')\n\n parser.add_argument('--model_type', type = str, default = 'lenet',\n help = 'The model type, default = \"lenet\", supported = [\"lenet\", \"vgg\", \"resnet\"]')\n parser.add_argument('--width', type = int, default = 8,\n help = 'The width of the model, default = 8')\n parser.add_argument('--bias', action = BooleanParser, default = True,\n help = 'Whether or not use bias term, default = True')\n parser.add_argument('--model2load', action = ListParser, default = None,\n help = 'The models to be loaded, default = None')\n\n parser.add_argument('--out_file', type = str, default = None,\n help = 'The output file, default = None')\n parser.add_argument('--attack', action = DictParser, default = None,\n help = 'Play adversarial attack or not, default = None')\n\n parser.add_argument('--gpu', type = str, default = None,\n help = 'Specify the GPU to use, default = None')\n\n args = parser.parse_args()\n\n # Configure GPU\n config_visible_gpu(args.gpu)\n use_gpu = args.gpu != 'cpu' and torch.cuda.is_available()\n device = torch.device('cuda:0' if use_gpu else 'cpu')\n\n # Parse IO\n if args.out_file != None:\n out_dir = os.path.dirname(args.out_file)\n if out_dir != '' and os.path.exists(out_dir) == False:\n os.makedirs(out_dir)\n\n # Parse model and dataset\n if args.dataset.lower() in ['cifar10',]:\n train_loader, test_loader, classes = cifar10(batch_size = args.batch_size)\n if args.model_type.lower() in ['lenet',]:\n make_model = lambda: CIFAR10_LeNet(width = args.width, bias = args.bias)\n elif args.model_type.lower() in ['vgg',]:\n make_model = lambda: CIFAR10_VGG(width = args.width, bias = args.bias)\n elif args.model_type.lower() in ['resnet',]:\n make_model = lambda: CIFAR10_ResNet(width = args.width)\n if args.bias == True:\n print('WARNING: ResNet18 does not have bias term in its layers.')\n else:\n raise ValueError('Invalid model_type: %s' % args.model_type)\n elif args.dataset.lower() in ['mnist',]:\n train_loader, test_loader, classes = mnist(batch_size = args.batch_size)\n if args.model_type.lower() in ['lenet',]:\n make_model = lambda: MNIST_LeNet(width = args.width, bias = args.bias)\n else:\n raise ValueError('Invalid model_type: %s' % args.model_type)\n else:\n raise ValueError('Invalid dataset: %s' % args.dataset)\n model_list = []\n for file2load in args.model2load:\n model = make_model()\n model = model.cuda() if use_gpu else model\n ckpt2load = torch.load(file2load)\n model.load_state_dict(ckpt2load)\n model_list.append(model)\n criterion = nn.CrossEntropyLoss()\n criterion = criterion.cuda() if use_gpu else criterion\n\n # Parse the optimizer\n attacker = None if args.attack == None else parse_attacker(**args.attack)\n optimizer = None\n\n # Prepare the item to save\n configs = {kwargs: value for kwargs, value in args._get_kwargs()}\n tosave = {'model_summary': str(model), 'setup_config': configs, 'train_acc': None, 'train_loss': None, 'test_acc': None, 'test_loss': None,\n 'log': {'cmd': 'python ' + ' '.join(sys.argv), 'time': datetime.now().strftime('%Y/%m/%d, %H:%M:%S')}}\n\n for param in list(sorted(tosave['setup_config'].keys())):\n print('%s\\t=>%s' % (param, tosave['setup_config'][param]))\n\n attack_list(model_list = model_list, loader = test_loader, attacker = attacker, optimizer = None, out_file = args.out_file,\n device = device, criterion = criterion, tosave = tosave)\n"
},
{
"alpha_fraction": 0.5874886512756348,
"alphanum_fraction": 0.6155938506126404,
"avg_line_length": 25.90243911743164,
"blob_id": "afec7b0e03cb704528ab8c8583586a1915e7b5dc",
"content_id": "a898e6de79bc799cf9b040e3aefabe3cbc673721",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1103,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 41,
"path": "/analysis/calc_param_distance.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\nsys.path.insert(0, './')\nimport argparse\nimport numpy as np\n\nimport torch\nimport torch.nn as nn\nfrom datetime import datetime\n\nif __name__ == '__main__':\n\n parser = argparse.ArgumentParser()\n\n parser.add_argument('--ckpt_file1', type = str, default = None,\n help = 'The checkpoint1, default = None.')\n parser.add_argument('--ckpt_file2', type = str, default = None,\n help = 'The checkpoint2, default = None.')\n\n args = parser.parse_args()\n\n ckpt1 = torch.load(args.ckpt_file1)\n ckpt2 = torch.load(args.ckpt_file2)\n\n keys1 = ckpt1.keys()\n keys2 = ckpt2.keys()\n\n # Check \n [ckpt2[key] for key in keys1]\n [ckpt1[key] for key in keys2]\n\n distance = 0.\n for key in keys1:\n if 'running_mean' in key or 'running_var' in key or 'num_batches_tracked' in key:\n continue\n param1 = ckpt1[key].view(-1).data.cpu().numpy()\n param2 = ckpt2[key].view(-1).data.cpu().numpy()\n distance = distance + np.linalg.norm(param1 - param2) ** 2\n distance = distance ** 0.5\n\n print('Distance = %.4f' % distance)\n"
},
{
"alpha_fraction": 0.49263501167297363,
"alphanum_fraction": 0.5171849131584167,
"avg_line_length": 29.600000381469727,
"blob_id": "f6be27e2465cf9a5c04615daaf1f45c6f5269c3f",
"content_id": "d93440e440d253ffe90c7fa95e0e58ebe127eff8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 611,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 20,
"path": "/util/color.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import random\n\nglobal_color_map = {}\n\ndef get_color(color_idx):\n if color_idx in global_color_map:\n return global_color_map[color_idx]\n\n base_color = ['b', 'y', 'c', 'm', 'g', 'r']\n if color_idx < 6:\n global_color_map[color_idx] = base_color[color_idx]\n return base_color[color_idx]\n else:\n dex = ['0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f']\n ret_color = '#'\n for _ in range(6):\n token_idx = random.randint(0,15)\n ret_color += dex[token_idx]\n global_color_map[color_idx] = ret_color\n return ret_color"
},
{
"alpha_fraction": 0.524148166179657,
"alphanum_fraction": 0.5311111211776733,
"avg_line_length": 46.528167724609375,
"blob_id": "1058b81182fc2fb16ba4e094ba94723a936e2cc9",
"content_id": "8e59b31da569a9623593604f5fe6549397afa629",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6750,
"license_type": "permissive",
"max_line_length": 167,
"num_lines": 142,
"path": "/util/seq_parser.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import numpy as np\n\nh_message = '''\n>>> You can use \"min\" or \"max\" to control the minimum and maximum values.\n>>> constant\ny = c\n>>> linear\ny = start_v + x * slope\n>>> cycle_linear\ny = start_v + slope * (x - LAST_CKPT) / (NEXT_CKPT - LAST_CKPT)\n>>> exp / exponential\ny = start_v * power ** (x / interval)\n>>> cycle_exp\ny = start_v * power ** ((x - LAST_CKPT) / (NEXT_CKPT - LAST_CKPT))\n>>> jump\ny = start_v * power ** (max(idx - min_jump_pt, 0) // jump_freq)\n>>> cycle_jump\ny = start_v * power ** (#DROP_POINT passed in drop_point_list since the latest CKPT in ckpt_list)\n>>> cos_cycle_jump\ny = (cos ( (max(0, x - min_jump_pt) %% cycle_freq) / cycle_freq * Pi) + 1) * (up_v - low_v) / 2 + low_v\n>>> cycle_cos\ny = 1/2 * (eps_max - eps_min) * [1 - cos((x - LAST_CKPT) / (NEXT_CKPT - LAST_CKPT) * pi)] + eps_min\n'''\n\n\ndef continuous_seq(*args, **kwargs):\n '''\n >>> return a float to float mapping\n '''\n name = kwargs['name']\n max_v = kwargs['max'] if 'max' in kwargs else np.inf\n min_v = kwargs['min'] if 'min' in kwargs else -np.inf\n\n if name.lower() in ['h', 'help']:\n print(h_message)\n exit(0)\n elif name.lower() in ['constant',]:\n start_v = float(kwargs['start_v'])\n return lambda x: np.clip(start_v, a_min = min_v, a_max = max_v)\n elif name.lower() in ['linear',]:\n start_v = float(kwargs['start_v'])\n slope = float(kwargs['slope'])\n return lambda x: np.clip(start_v + x * slope, a_min = min_v, a_max = max_v)\n elif name.lower() in ['cycle_linear',]:\n start_v = float(kwargs['start_v'])\n slope = float(kwargs['slope'])\n ckpt_list = list(map(float, kwargs['ckpt_list'].split(':')))\n ckpt_list = list(sorted(ckpt_list))\n if ckpt_list[0] > 0.:\n ckpt_list = [0.,] + ckpt_list\n def local_linear_warmup(start_v, slope, ckpt_list, min_v, max_v, x):\n if x > ckpt_list[-1]:\n return np.clip(start_v + slope, a_min = min_v, a_max = max_v)\n if x <= ckpt_list[0]:\n return np.clip(start_v, a_min = min_v, a_max = max_v)\n for l_ckpt, r_ckpt in zip(ckpt_list[:-1], ckpt_list[1:]):\n if l_ckpt <= x and x < r_ckpt:\n ratio = (x - l_ckpt) / (r_ckpt - l_ckpt)\n return np.clip(start_v + ratio * slope, a_min = min_v, a_max = max_v)\n return lambda x: local_linear_warmup(start_v, slope, ckpt_list, min_v, max_v, x)\n elif name.lower() in ['exp, exponential',]:\n start_v = float(kwargs['start_v'])\n power = float(kwargs['power'])\n interval = int(kwargs['interval']) if 'interval' in kwargs else 1\n return lambda x: np.clip(start_v * power ** (x / float(interval)), a_min = min_v, a_max = max_v)\n elif name.lower() in ['cycle_exp',]:\n start_v = float(kwargs['start_v'])\n power = float(kwargs['power'])\n ckpt_list = list(map(float, kwargs['ckpt_list'].split(':')))\n ckpt_list = list(sorted(ckpt_list))\n if ckpt_list[0] > 0.:\n ckpt_list = [0, ] + ckpt_list\n def local_cycle_exp(start_v, power, min_v, max_v, x):\n if x > ckpt_list[-1]:\n ratio = 1.\n elif x <= ckpt_list[0]:\n ratio = 0.\n else:\n for l_ckpt, r_ckpt in zip(ckpt_list[:-1], ckpt_list[1:]):\n if l_ckpt <= x and x < r_ckpt:\n ratio = (x - l_ckpt) / (r_ckpt - l_ckpt)\n return np.clip(start_v * power ** ratio, a_min = min_v, a_max = max_v)\n return lambda x: local_cycle_exp(start_v, power, min_v, max_v, x)\n elif name.lower() in ['jump',]:\n start_v = float(kwargs['start_v'])\n power = float(kwargs['power'])\n min_jump_pt = int(kwargs['min_jump_pt'])\n jump_freq = int(kwargs['jump_freq'])\n return lambda x: np.clip(start_v * power ** (max(x - min_jump_pt + jump_freq, 0) // jump_freq), a_min = min_v, a_max = max_v)\n elif name.lower() in ['cycle_jump',]:\n start_v = float(kwargs['start_v'])\n power = float(kwargs['power'])\n drop_point_list = list(map(float, kwargs['drop_point_list'].split(':')))\n ckpt_list = list(map(float, kwargs['ckpt_list'].split(':')))\n drop_point_list = list(sorted(drop_point_list))\n ckpt_list = list(sorted(ckpt_list))\n def local_cycle_jump(start_v, power, drop_point_list, ckpt_list, max_v, min_v, x):\n assert x >= ckpt_list[0] and x < ckpt_list[-1], 'x = %f should be between %.4f and %.4f as defined' % (x, ckpt_list[0], ckpt_list[-1])\n for l_ckpt, r_ckpt in zip(ckpt_list[:-1], ckpt_list[1:]):\n if l_ckpt <= x and x < r_ckpt:\n ratio = (x - l_ckpt) / (r_ckpt - l_ckpt)\n value = start_v\n for drop_point in drop_point_list:\n if ratio >= drop_point:\n value *= power\n return np.clip(value, a_min = min_v, a_max = max_v)\n return lambda x: local_cycle_jump(start_v, power, drop_point_list, ckpt_list, max_v, min_v, x)\n elif name.lower() in ['cos_cycle_jump',]:\n low_v = float(kwargs['low_v'])\n up_v = float(kwargs['up_v'])\n cycle_freq = float(kwargs['cycle_freq'])\n min_jump_pt = float(kwargs['min_jump_pt'])\n return lambda x: np.clip((np.cos((max(x - min_jump_pt, 0) % cycle_freq) / cycle_freq * np.pi) + 1) * (up_v - low_v) / 2. + low_v, a_min = min_v, a_max = max_v)\n elif name.lower() in ['cycle_cos',]:\n eps_min = float(kwargs['eps_min'])\n eps_max = float(kwargs['eps_max'])\n ckpt_list = list(map(float, kwargs['ckpt_list'].split(':')))\n ckpt_list = list(sorted(ckpt_list))\n if ckpt_list[0] > 0.:\n ckpt_list = [0.,] + ckpt_list\n def local_cos_eps_warmup(eps_min, eps_max, ckpt_list, min_v, max_v, x):\n if x > ckpt_list[-1]:\n return np.clip(eps_max, a_min = min_v, a_max = max_v)\n if x <= ckpt_list[0]:\n return np.clip(eps_min, a_min = min_v, a_max = max_v)\n for l_ckpt, r_ckpt in zip(ckpt_list[:-1], ckpt_list[1:]):\n if l_ckpt <= x and x < r_ckpt:\n ratio = (x - l_ckpt) / (r_ckpt - l_ckpt)\n return np.clip(eps_min + 0.5 * (eps_max - eps_min) * (1 - np.cos(ratio * np.pi)), a_min = min_v, a_max = max_v)\n return lambda x: local_cos_eps_warmup(eps_min, eps_max, ckpt_list, min_v, max_v, x)\n else:\n raise ValueError('Unrecognized name: %s'%name)\n\ndef discrete_seq(*args, **kwargs):\n '''\n >>> return a list of values\n '''\n name = kwargs['name']\n func = continuous_seq(*args, **kwargs)\n\n pt_num = int(kwargs['pt_num'])\n return [func(idx) for idx in range(pt_num)]\n\n"
},
{
"alpha_fraction": 0.5705445408821106,
"alphanum_fraction": 0.5816831588745117,
"avg_line_length": 25.064516067504883,
"blob_id": "8cbf7230fe7f35e3e9c887883d144a7b1fc7a07b",
"content_id": "6153c6d7e88b0cab680af88603af760b1a9427e8",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 808,
"license_type": "permissive",
"max_line_length": 51,
"num_lines": 31,
"path": "/util/init.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import torch\nimport torch.nn as nn\n\ninit_func = {\n 'xavier_uniform': nn.init.xavier_uniform_,\n 'xavier_normal': nn.init.xavier_normal_,\n 'uniform': nn.init.uniform_,\n 'normal': nn.init.normal_,\n 'kaiming_uniform': nn.init.kaiming_uniform_,\n 'kaiming_normal': nn.init.kaiming_normal_,\n}\n\ndef init(model, init_type, param_type = ['4d',]):\n\n if init_type == None:\n\n return\n\n for param in model.parameters():\n\n if param.dim() == 4 and '4d' in param_type:\n init_func[init_type](param)\n\n if param.dim() == 3 and '3d' in param_type:\n init_func[init_type](param)\n\n if param.dim() == 2 and '2d' in param_type:\n init_func[init_type](param)\n\n if param.dim() == 1 and '1d' in param_type:\n init_func[init_type](param)\n"
},
{
"alpha_fraction": 0.631114661693573,
"alphanum_fraction": 0.640737771987915,
"avg_line_length": 43.92792892456055,
"blob_id": "e651d685943383330090a79912abb5406b6e7e65",
"content_id": "57c4abc7d332df5b4c5adadc28ffd680a7c32055",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4988,
"license_type": "permissive",
"max_line_length": 172,
"num_lines": 111,
"path": "/run/calc_hessian.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\nsys.path.insert(0, './')\nimport pickle\nimport argparse\nimport numpy as np\n\nimport torch\nimport torch.nn as nn\nfrom datetime import datetime\n\nfrom util.attack import parse_attacker\nfrom util.models import MNIST_LeNet, CIFAR10_LeNet, CIFAR10_VGG, CIFAR10_ResNet\nfrom util.dataset import mnist, cifar10\nfrom util.device_parser import config_visible_gpu\nfrom util.hessian import calc_hessian_eigen_full_dataset\nfrom util.param_parser import DictParser, IntListParser, FloatListParser, BooleanParser\n\nif __name__ == '__main__':\n\n parser = argparse.ArgumentParser()\n\n parser.add_argument('--dataset', type = str, default = 'mnist',\n help = 'Which dataset to use, default = \"mnist\".')\n parser.add_argument('--subset', type = str, default = 'train',\n help = 'Use training or test set, default = \"train\".')\n parser.add_argument('--batch_size', type = int, default = 500,\n help = 'The batch size, default = 500.')\n\n parser.add_argument('--model_type', type = str, default = 'lenet',\n help = 'The model type, default = \"lenet\", supported = [\"lenet\", \"vgg\", \"resnet\"].')\n parser.add_argument('--width', type = int, default = 8,\n help = 'The width of MNIST_LeNet, default = 8.')\n parser.add_argument('--bias', action = BooleanParser, default = True,\n help = 'Whether or not use bias term, default = True.')\n parser.add_argument('--model2load', type = str, default = None,\n help = 'The model to be loaded, default = None.')\n\n parser.add_argument('--out_file', type = str, default = None,\n help = 'The output file.')\n\n parser.add_argument('--attack', action = DictParser, default = None,\n help = 'Play adversarial attack or not, default = None.')\n\n parser.add_argument('--topk', type = int, default = 1,\n help = 'The number of hessian vectors & values to calculate, default = 1.')\n parser.add_argument('--max_iter', type = int, default = 50,\n help = 'The number of maximum iterations in power iterations, default = 50.')\n parser.add_argument('--tol', type = float, default = 1e-3,\n help = 'The precise tolerence, default = 1e-3.')\n\n parser.add_argument('--gpu', type = str, default = None,\n help = 'Specify the GPU to use, default = None.')\n\n args = parser.parse_args()\n\n # Configure GPU\n config_visible_gpu(args.gpu)\n use_gpu = args.gpu != 'cpu' and torch.cuda.is_available()\n device = torch.device('cuda:0' if use_gpu else 'cpu')\n\n # Dataset\n if args.dataset.lower() in ['mnist',]:\n train_loader, test_loader, classes = mnist(batch_size = args.batch_size)\n elif args.dataset.lower() in ['cifar10',]:\n train_loader, test_loader, classes = cifar10(batch_size = args.batch_size)\n else:\n raise ValueError('Unrecognized dataset: %s' % args.dataset)\n loader = {'train': train_loader, 'test': test_loader}[args.subset.lower()]\n\n # Parse IO\n if args.out_file != None:\n dir_name = os.path.dirname(args.out_file)\n if dir_name != '' and not os.path.exists(dir_name):\n os.makedirs(dir_name)\n else:\n print('WARNING: The results will NOT be saved!')\n\n # Parse model\n if args.dataset.lower() in ['mnist',]:\n assert args.model_type.lower() in ['lenet',], 'For MNIST, only LeNet is supported'\n model = MNIST_LeNet(width = args.width, bias = args.bias)\n elif args.dataset.lower() in ['cifar10',]:\n if args.model_type.lower() in ['lenet',]:\n model = CIFAR10_LeNet(width = args.width, bias = args.bias)\n elif args.model_type.lower() in ['vgg',]:\n model = CIFAR10_VGG(width = args.width, bias = args.bias)\n elif args.model_type.lower() in ['resnet',]:\n model = CIFAR10_ResNet(width = args.width)\n if args.bias == True:\n print('WARNING: ResNet18 does not have bias term in its layers.')\n else:\n raise ValueError('Invalid model_type: %s' % args.model_type)\n else:\n raise ValueError('Unrecognized dataset: %s' % args.dataset)\n model = model.cuda() if use_gpu else model\n criterion = nn.CrossEntropyLoss()\n criterion = criterion.cuda() if use_gpu else criterion\n assert os.path.exists(args.model2load), 'File %s does not exist!' % args.model2load\n ckpt2load = torch.load(args.model2load)\n model.load_state_dict(ckpt2load)\n\n # Parse the optimizer\n attacker = None if args.attack == None else parse_attacker(**args.attack)\n\n # Prepare the item to save\n configs = {kwargs: value for kwargs, value in args._get_kwargs()}\n tosave = {'model_summary': str(model), 'setup_config': configs, 'eigenvalue_list': None, 'eigenvec_list': None,\n 'log': {'cmd': 'python ' + ' '.join(sys.argv), 'time': datetime.now().strftime('%Y/%m/%d, %H:%M:%S')}}\n\n eigenvalue_list, eigenvec_list = calc_hessian_eigen_full_dataset(model, loader, criterion, tosave, args.out_file, use_gpu, attacker, args.topk, args.max_iter, args.tol)\n\n"
},
{
"alpha_fraction": 0.6357515454292297,
"alphanum_fraction": 0.6594265699386597,
"avg_line_length": 43.25,
"blob_id": "250867ecd0b2d14b4ec3d8c27766e18fafcb0395",
"content_id": "d51b5220099e2fe2b863002e663a40e3d19c7db3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4604,
"license_type": "permissive",
"max_line_length": 139,
"num_lines": 104,
"path": "/analysis/analyze_adversary.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\nsys.path.insert(0, './')\nimport pickle\nimport argparse\nimport numpy as np\n\nimport torch\nimport torch.nn as nn\nfrom datetime import datetime\n\nfrom util.dataset import mnist, cifar10\nfrom util.param_parser import DictParser, ListParser, IntListParser, FloatListParser, BooleanParser\n\ndef compare_adversary(train_ori_data, test_ori_data, adv1_info, adv2_info):\n\n train_adv1_data = adv1_info['train_adv']\n train_adv2_data = adv2_info['train_adv']\n test_adv1_data = adv1_info['test_adv']\n test_adv2_data = adv2_info['test_adv']\n\n train_delta1_data = train_adv1_data - train_ori_data\n train_delta2_data = train_adv2_data - train_ori_data\n test_delta1_data = test_adv1_data - test_ori_data\n test_delta2_data = test_adv2_data - test_ori_data\n\n train_delta1_sign_data = np.sign(train_delta1_data)\n train_delta2_sign_data = np.sign(train_delta2_data)\n test_delta1_sign_data = np.sign(test_delta1_data)\n test_delta2_sign_data = np.sign(test_delta2_data)\n\n # File1\n train_delta1_norm = np.linalg.norm(train_delta1_data, axis = 1)\n test_delta1_norm = np.linalg.norm(test_delta1_data, axis = 1)\n train_delta1_sign_norm = np.linalg.norm(np.sign(train_delta1_data), axis = 1)\n test_delta1_sign_norm = np.linalg.norm(np.sign(test_delta1_data), axis = 1)\n\n # File2\n train_delta2_norm = np.linalg.norm(train_delta2_data, axis = 1)\n test_delta2_norm = np.linalg.norm(test_delta2_data, axis = 1)\n train_delta2_sign_norm = np.linalg.norm(np.sign(train_delta2_data), axis = 1)\n test_delta2_sign_norm = np.linalg.norm(np.sign(test_delta2_data), axis = 1)\n\n # Compare\n train_cosine = np.sum(train_delta1_data * train_delta2_data, axis = 1) / train_delta1_norm / train_delta2_norm\n test_cosine = np.sum(test_delta1_data * test_delta2_data, axis = 1) / test_delta1_norm / test_delta2_norm\n train_sign_cosine = np.sum(train_delta1_sign_data * train_delta2_sign_data, axis = 1) / train_delta1_sign_norm / train_delta2_sign_norm\n test_sign_cosine = np.sum(test_delta1_sign_data * test_delta2_sign_data, axis = 1) / test_delta1_sign_norm / test_delta2_sign_norm\n\n print('The cosine similarity in the training set: %.4f' % (train_cosine.mean()))\n print('The cosine similarity in the test set: %.4f' % (test_cosine.mean()))\n print('The signed cosine similarity in the training set: %.4f' % (train_sign_cosine.mean()))\n print('The signed cosine similarity in the test set: %.4f' % (test_sign_cosine.mean()))\n\n\nif __name__ == '__main__':\n\n parser = argparse.ArgumentParser()\n\n parser.add_argument('--batch_size', type = int, default = 100,\n help = 'The batch size, default = 100')\n parser.add_argument('--dataset', type = str, default = 'mnist',\n help = 'The dataset we use')\n\n parser.add_argument('--folder', type = str, default = None,\n help = 'The folder to be scanned')\n\n args = parser.parse_args()\n\n if args.dataset.lower() in ['mnist',]:\n train_loader, test_loader, classes = mnist(batch_size = args.batch_size, shuffle = False, data_augmentation = False)\n elif args.dataset.lower() in ['cifar10',]:\n train_loader, test_loader, classes = cifar10(batch_size = args.batch_size, shuffle = False, data_augmentation = False)\n else:\n raise ValueError('Unrecognized dataset: %s' % args.dataset)\n\n train_ori_data = []\n test_ori_data = []\n\n for idx, (data_batch, label_batch) in enumerate(train_loader, 0):\n data_batch = data_batch.reshape(data_batch.size(0), -1)\n train_ori_data.append(data_batch.data.cpu().numpy())\n for idx, (data_batch, label_batch) in enumerate(test_loader, 0):\n data_batch = data_batch.reshape(data_batch.size(0), -1)\n test_ori_data.append(data_batch.data.cpu().numpy())\n\n train_ori_data = np.concatenate(train_ori_data, axis = 0)\n test_ori_data = np.concatenate(test_ori_data, axis = 0)\n\n adv_info_list = []\n adv_f_list = []\n for f in os.listdir(args.folder):\n if os.path.isfile(args.folder + os.sep + f) and f.endswith('pkl'):\n adv_info = pickle.load(open(args.folder + os.sep + f, 'rb'))\n adv_info_list.append(adv_info)\n adv_f_list.append(args.folder + os.sep + f)\n\n adv_info_list_len = len(adv_info_list)\n for idx1 in range(adv_info_list_len):\n for idx2 in range(idx1 + 1, adv_info_list_len):\n print('File 1 = %s' % adv_f_list[idx1])\n print('File 2 = %s' % adv_f_list[idx2])\n\n compare_adversary(train_ori_data, test_ori_data, adv1_info = adv_info_list[idx1], adv2_info = adv_info_list[idx2])\n\n\n"
},
{
"alpha_fraction": 0.6392714381217957,
"alphanum_fraction": 0.6481564044952393,
"avg_line_length": 43.56435775756836,
"blob_id": "42240c359b21059c38f279bda79681c5c94e2c56",
"content_id": "acbe40618d28f33f368dfba776acfc840463140e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4502,
"license_type": "permissive",
"max_line_length": 153,
"num_lines": 101,
"path": "/run/scan_curve.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\nsys.path.insert(0, './')\nimport argparse\nimport numpy as np\n\nimport torch\nimport torch.nn as nn\nfrom datetime import datetime\n\nfrom util.attack import parse_attacker\nfrom util.models import MNIST_LeNet, CIFAR10_ResNet, Curve_MNIST_LeNet, Curve_CIFAR10_ResNet\nfrom util.curves import curve_scan\nfrom util.dataset import mnist, cifar10\nfrom util.optim_parser import parse_optim\nfrom util.device_parser import config_visible_gpu\nfrom util.param_parser import DictParser, ListParser, IntListParser, FloatListParser, BooleanParser\n\nif __name__ == '__main__':\n\n parser = argparse.ArgumentParser()\n\n parser.add_argument('--dataset', type = str, default = 'cifar10',\n help = 'The dataset used, default = \"cifar10\"')\n parser.add_argument('--batch_size', type = int, default = 100,\n help = 'The batch size, default = 100')\n\n parser.add_argument('--model_type', type = str, default = 'resnet',\n help = 'The model type, default = \"lenet\", supported = [\"lenet\", \"resnet\"]')\n parser.add_argument('--width', type = int, default = 8,\n help = 'The width of MNIST_LeNet, default = 8')\n parser.add_argument('--bias', action = BooleanParser, default = True,\n help = 'Whether or not use bias term, default = True')\n parser.add_argument('--fix_points', action = IntListParser, default = None,\n help = 'The fix points flag in the curve, 0 means unfixed and non-zero means fixed.')\n parser.add_argument('--model2load', type = str, default = None,\n help = 'The model to be loaded as the fix point, default = None')\n\n parser.add_argument('--curve_type', type = str, default = 'bezier',\n help = 'The type of the curve, default = \"bezier\".')\n\n parser.add_argument('--out_folder', type = str, default = None,\n help = 'The output folder')\n parser.add_argument('--model_name', type = str, default = None,\n help = 'The name of the model')\n\n parser.add_argument('--step_size', type = float, default = 0.02,\n help = 'The size of the step, default = 0.02')\n\n parser.add_argument('--attack', action = DictParser, default = None,\n help = 'Play adversarial attack or not, default = None.')\n\n parser.add_argument('--gpu', type = str, default = None,\n help = 'Specify the GPU to use, default = None')\n\n args = parser.parse_args()\n\n # Configure GPU\n config_visible_gpu(args.gpu)\n use_gpu = args.gpu != 'cpu' and torch.cuda.is_available()\n device = torch.device('cuda:0' if use_gpu else 'cpu')\n\n # Dataset and model\n fix_points = [pt != 0 for pt in args.fix_points]\n criterion = nn.CrossEntropyLoss()\n if args.dataset.lower() in ['mnist',]:\n train_loader, test_loader, classes = mnist(batch_size = args.batch_size)\n if args.model_type.lower() in ['lenet',]:\n model = Curve_MNIST_LeNet(fix_points, width = args.width, bias = args.bias)\n else:\n raise ValueError('Unrecognized model type: %s' % args.model_type)\n elif args.dataset.lower() in ['cifar10',]:\n train_loader, test_loader, classes = cifar10(batch_size = args.batch_size)\n if args.model_type.lower() in ['resnet',]:\n model = Curve_CIFAR10_ResNet(fix_points, width = args.width)\n else:\n raise ValueError('Unrecognized model type: %s' % args.model_type)\n else:\n raise ValueError('Invalid dataset: %s' % args.dataset)\n\n model = model.cuda() if use_gpu else model\n criterion = criterion.cuda() if use_gpu else criterion\n\n ckpt2load = torch.load(args.model2load)\n model.load_state_dict(ckpt2load)\n\n # Parse IO\n if not os.path.exists(args.out_folder):\n os.makedirs(args.out_folder)\n\n # Parse the attacker and t_list\n attacker = None if args.attack == None else parse_attacker(**args.attack)\n t_list = np.arange(0, 1 + args.step_size, args.step_size)\n\n # Prepare the item to save\n configs = {kwargs: value for kwargs, value in args._get_kwargs()}\n tosave = {'model_summary': str(model), 'setup_config': configs, 'train_loss': {}, 'test_loss': {}, 'train_acc': {}, 'test_acc':{},\n 'log': {'cmd': 'python ' + ' '.join(sys.argv), 'time': datetime.now().strftime('%Y/%m/%d, %H:%M:%S')}}\n\n curve_scan(model = model, curve_type = args.curve_type, t_list = t_list, train_loader = train_loader, test_loader = test_loader, attacker = attacker,\n out_folder = args.out_folder, model_name = args.model_name, device = device, criterion = criterion, tosave = tosave)\n\n"
},
{
"alpha_fraction": 0.5697950720787048,
"alphanum_fraction": 0.5834286212921143,
"avg_line_length": 37.53898239135742,
"blob_id": "c5bce13bfe1797e18c955449b59047386c36b6ce",
"content_id": "b2e741bbd4174fd7dc6d6494fe0e92c14ab8761c",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 11369,
"license_type": "permissive",
"max_line_length": 149,
"num_lines": 295,
"path": "/util/attack.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\nsys.path.insert(0, './')\n\nimport numpy as np\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom external.auto_attack.autopgd_pt import APGDAttack\nfrom external.auto_attack.fab_pt import FABAttack\nfrom external.auto_attack.square_pt import SquareAttack\n\nh_message = '''\n>>> PGD(step_size, threshold, iter_num, order = np.inf)\n>>> APGD(threshold, iter_num, rho, loss_type, alpha = 0.75, order = np.inf)\n>>> Square(threshold, window_size_factor, iter_num, order = np.inf)\n'''\n\ndef parse_attacker(name, **kwargs):\n\n if name.lower() in ['h', 'help']:\n print(h_message)\n exit(0)\n elif name.lower() in ['pgd',]:\n return PGD(**kwargs)\n elif name.lower() in ['apgd',]:\n return APGD(**kwargs)\n elif name.lower() in ['square',]:\n return Square(**kwargs)\n else:\n raise ValueError('Unrecognized name of the attacker: %s' % name)\n\ndef project(ori_pt, threshold, order = np.inf):\n '''\n Project the data into a norm ball\n\n >>> ori_pt: the original point\n >>> threshold: maximum norms allowed\n >>> order: norm used\n '''\n\n if order in [np.inf,]:\n prj_pt = torch.clamp(ori_pt, min = - threshold, max = threshold) \n elif order in [2,]:\n ori_shape = ori_pt.size()\n pt_norm = torch.norm(ori_pt.view(ori_shape[0], -1), dim = 1, p = 2)\n pt_norm_clip = torch.clamp(pt_norm, max = threshold)\n prj_pt = ori_pt.view(ori_shape[0], -1) / (pt_norm.view(-1, 1) + 1e-8) * (pt_norm_clip.view(-1, 1) + 1e-8)\n prj_pt = prj_pt.view(ori_shape)\n else:\n raise ValueError('Invalid norms: %s' % order)\n\n return prj_pt\n\nclass PGD(object):\n\n def __init__(self, step_size, threshold, iter_num, order = np.inf):\n\n self.step_size = step_size if step_size < 1. else step_size / 255.\n self.threshold = threshold if threshold < 1. else threshold / 255.\n self.iter_num = int(iter_num)\n self.order = order if order > 0 else np.inf\n\n self.meta_threshold = self.threshold\n self.meta_step_size = self.step_size\n\n print('Create a PGD attack')\n print('step_size = %1.2e, threshold = %1.2e, iter_num = %d, order = %f' % (\n self.step_size, self.threshold, self.iter_num, self.order))\n\n def adjust_threshold(self, threshold):\n\n threshold = threshold if threshold < 1. else threshold / 255.\n\n self.step_size = self.meta_step_size * threshold / (self.meta_threshold + 1e-6)\n self.threshold = threshold\n\n print('Attacker adjusted, threshold = %1.2e, step_size = %1.2e' % (self.threshold, self.step_size))\n\n def attack(self, model, optim, data_batch, label_batch, criterion):\n\n data_batch = data_batch.detach()\n label_batch = label_batch.detach()\n device = data_batch.device\n\n if self.threshold < 1e-6:\n return data_batch, label_batch\n\n ori_batch = data_batch.detach()\n\n # Initial perturbation\n step_size = self.step_size\n noise = project(ori_pt = (torch.rand(data_batch.shape, device = device) * 2 - 1) * step_size, threshold = self.threshold, order = self.order)\n data_batch = torch.clamp(data_batch + noise, min = 0., max = 1.)\n data_batch = data_batch.detach().requires_grad_()\n\n for iter_idx in range(self.iter_num):\n\n logits = model(data_batch)\n loss = criterion(logits, label_batch)\n _, prediction = logits.max(dim = 1)\n indicator_vec = (prediction == label_batch).float()\n\n loss.backward()\n grad = data_batch.grad.data\n\n step_size = self.step_size\n if self.order == np.inf:\n next_point = data_batch + step_size * torch.sign(grad)\n elif self.order == 2:\n ori_shape = data_batch.size()\n grad_norm = torch.norm(grad.view(ori_shape[0], -1), dim = 1, p = 2)\n perb = step_size * (grad.view(ori_shape[0], -1) + 1e-8) / (grad_norm.view(-1, 1) + 1e-8)\n next_point = data_batch + perb.view(ori_shape)\n else:\n raise ValueError('Invalid norm: %s' % str(self.order))\n\n next_point = ori_batch + project(ori_pt = next_point - ori_batch, threshold = self.threshold, order = self.order)\n next_point = torch.clamp(next_point, min = 0., max = 1.)\n\n data_batch = next_point.detach().requires_grad_()\n\n model.zero_grad()\n\n return data_batch, label_batch\n\n def attack_list(self, model_list, optim, data_batch, label_batch, criterion):\n\n data_batch = data_batch.detach()\n label_batch = label_batch.detach()\n device = data_batch.device\n\n if self.threshold < 1e-6:\n return data_batch, label_batch\n\n ori_batch = data_batch.detach()\n\n # Initial perturbation\n step_size = self.step_size\n noise = project(ori_pt = (torch.rand(data_batch.shape, device = device) * 2 - 1) * step_size, threshold = self.threshold, order = self.order)\n data_batch = torch.clamp(data_batch + noise, min = 0., max = 1.)\n data_batch = data_batch.detach().requires_grad_()\n\n for iter_idx in range(self.iter_num):\n\n prob_sum = 0.\n for model in model_list:\n logits = model(data_batch)\n prob_sum = prob_sum + F.softmax(logits)\n\n prob = prob_sum / len(model_list)\n loss = - torch.log(prob).gather(dim = 1, index = label_batch.view(-1, 1)).view(-1).mean()\n _, prediction = prob.max(dim = 1)\n indicator_vec = (prediction == label_batch).float()\n\n loss.backward()\n grad = data_batch.grad.data\n\n step_size = self.step_size\n\n if self.order == np.inf:\n next_point = data_batch + step_size * torch.sign(grad)\n elif self.order == 2:\n ori_shape = data_batch.size()\n grad_norm = torch.norm(grad.view(ori_shape[0], -1), dim = 1, p = 2)\n perb = step_size * (grad.view(ori_shape[0], -1) + 1e-8) / (grad_norm.view(-1, 1) + 1e-8)\n next_point = data_batch + perb.view(ori_shape)\n else:\n raise ValueError('Invalid norm: %s' % str(self.order))\n\n next_point = ori_batch + project(ori_pt = next_point - ori_batch, threshold = self.threshold, order = self.order)\n next_point = torch.clamp(next_point, min = 0., max = 1.)\n\n data_batch = next_point.detach().requires_grad_()\n\n for model in model_list:\n model.zero_grad()\n\n return data_batch, label_batch\n\n def attack_curve(self, model, optim, data_batch, label_batch, criterion, coeffs):\n\n data_batch = data_batch.detach()\n label_batch = label_batch.detach()\n device = data_batch.device\n\n if self.threshold < 1e-6:\n return data_batch, label_batch\n\n ori_batch = data_batch.detach()\n\n # Initial perturbation\n step_size = self.step_size\n noise = project(ori_pt = (torch.rand(data_batch.shape, device = device) * 2 - 1) * step_size, threshold = self.threshold, order = self.order)\n data_batch = torch.clamp(data_batch + noise, min = 0., max = 1.)\n data_batch = data_batch.detach().requires_grad_()\n\n for iter_idx in range(self.iter_num):\n\n logits = model(data_batch, coeffs)\n loss = criterion(logits, label_batch)\n _, prediction = logits.max(dim = 1)\n indicator_vec = (prediction == label_batch).float()\n\n loss.backward()\n grad = data_batch.grad.data\n\n step_size = self.step_size\n if self.order == np.inf:\n next_point = data_batch + step_size * torch.sign(grad)\n elif self.order ==2:\n ori_shape = data_batch.size()\n grad_norm = torch.norm(grad.view(ori_shape[0], -1), dim = 1, p = 2)\n perb = step_size * (grad.view(ori_shape[0], -1) + 1e-8) / (grad_norm.view(-1, 1) + 1e-8)\n next_point = data_batch + perb.view(ori_shape)\n else:\n raise ValueError('Invalid norm: %s' % str(self.order))\n\n next_point = ori_batch + project(ori_pt = next_point - ori_batch, threshold = self.threshold, order = self.order)\n next_point = torch.clamp(next_point, min = 0., max = 1.)\n\n data_batch = next_point.detach().requires_grad_()\n\n optim.zero_grad()\n\n return data_batch, label_batch\n\nclass APGD(object):\n\n def __init__(self, threshold, iter_num, rho, loss_type = 'ce', alpha = 0.75, order = np.inf):\n\n self.order = order if order > 0 else np.inf\n self.threshold = threshold if threshold < 1. or self.order != np.inf else threshold / 255.\n self.step_size = self.threshold * 2\n self.iter_num = int(iter_num)\n self.rho = rho\n self.alpha = alpha\n self.loss_type = loss_type\n\n self.meta_threshold = self.threshold\n self.meta_step_size = self.step_size\n\n print('Create a Auto-PGD attacker')\n print('step_size = %1.2e, threshold = %1.2e, iter_num = %d, rho = %.4f, alpha = %.4f, order = %f' % (\n self.step_size, self.threshold, self.iter_num, self.rho, self.alpha, self.order))\n print('loss type = %s' % self.loss_type)\n\n def adjust_threshold(self, threshold):\n\n threshold = threshold if threshold < 1. or self.order != np.inf else threshold / 255.\n\n self.step_size = self.meta_step_size * threshold / (self.meta_threshold + 1e-6)\n self.threshold = threshold\n\n def attack(self, model, optim, data_batch, label_batch, criterion):\n\n norm = {np.inf: 'Linf', 2: 'L2'}[self.order]\n\n attacker = APGDAttack(model, n_restarts = 5, n_iter = self.iter_num, verbose=False, eps = self.threshold,\n norm = norm, eot_iter = 1, rho = self.rho, seed = time.time(), loss = self.loss_type, device = data_batch.device)\n\n _, adv_data_batch = attacker.perturb(data_batch, label_batch, cheap = True)\n\n return adv_data_batch.detach(), label_batch\n\nclass Square(object):\n\n def __init__(self, threshold, window_size_factor, iter_num, order = np.inf):\n\n self.order = order if order > 0 else np.inf\n self.threshold = threshold if threshold < 1. or self.order != np.inf else threshold / 255.\n self.window_size_factor = window_size_factor\n self.iter_num = int(iter_num)\n\n print('Create a Square attacker')\n print('threshold = %1.2e, window_size_factor = %d, iter_num = %d, order = %s' % (\n self.threshold, self.window_size_factor, self.iter_num, self.order))\n\n def adjust_threshold(self, threshold):\n\n threshold = threshold if threshold < 1. or self.order != np.inf else threshold / 255.\n self.threshold = threshold\n \n def attack(self, model, optim, data_batch, label_batch, criterion):\n\n norm = {np.inf: 'Linf', 2: 'L2'}[self.order]\n\n attacker = SquareAttack(model, p_init = 0.8, n_queries = self.iter_num, eps = self.threshold, norm = norm,\n n_restarts = 1, seed = time.time(), verbose = False, device = data_batch.device, resc_schedule = False)\n\n adv_data_batch = attacker.perturb(data_batch, label_batch)\n\n return adv_data_batch.detach(), label_batch\n"
},
{
"alpha_fraction": 0.6642857193946838,
"alphanum_fraction": 0.668571412563324,
"avg_line_length": 25.884614944458008,
"blob_id": "6145cc9ba9ab073c2d798f4a4cd48fc3047f69b5",
"content_id": "9b4890a431ddc3ae2a26c9d998b66719f4d38bae",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 700,
"license_type": "permissive",
"max_line_length": 88,
"num_lines": 26,
"path": "/util/io.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\nimport copy\nimport pickle\nimport numpy as np\nfrom collections import OrderedDict\n\nimport torch\nimport torch.nn as nn\n\ndef eigenvec2ckpt(model, eigen_info_file, index, use_gpu):\n\n device = torch.device('cuda:0' if use_gpu == True else 'cpu')\n\n eigen_info = pickle.load(open(eigen_info_file, 'rb'))\n eigenvec = eigen_info['eigenvec_list'][index]\n\n vec = OrderedDict()\n for idx, (name, param) in enumerate(model.named_parameters()):\n tensor = torch.from_numpy(eigenvec[idx]).float().to(device)\n vec[name] = tensor\n\n saved_name = ''.join(eigen_info_file.rsplit('pkl', 1)) + '_vec_%d' % index + '.ckpt'\n torch.save(vec, saved_name)\n\n return vec\n\n"
},
{
"alpha_fraction": 0.6186957359313965,
"alphanum_fraction": 0.6360689997673035,
"avg_line_length": 46.30674743652344,
"blob_id": "ed721f2c06698420a3543ca73b915cd222abaa8e",
"content_id": "e5d069dd4d70da3d68f112bd9720aba594b9156b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 7713,
"license_type": "permissive",
"max_line_length": 161,
"num_lines": 163,
"path": "/run/scan_param.py",
"repo_name": "franciscoliu/AdversaryLossLandscape",
"src_encoding": "UTF-8",
"text": "import os\nimport sys\nsys.path.insert(0, './')\nimport json\nimport pickle\nimport argparse\nimport numpy as np\n\nimport torch\nimport torch.nn as nn\nfrom datetime import datetime\n\nfrom util.io import eigenvec2ckpt\nfrom util.attack import parse_attacker\nfrom util.models import MNIST_LeNet, CIFAR10_LeNet, CIFAR10_VGG, CIFAR10_ResNet\nfrom util.dataset import mnist, cifar10\nfrom util.seq_parser import discrete_seq\nfrom util.device_parser import config_visible_gpu\nfrom util.param_scanner import generate_vec, param_scan\nfrom util.param_parser import DictParser, IntListParser, FloatListParser, BooleanParser\n\nif __name__ == '__main__':\n\n parser = argparse.ArgumentParser()\n\n parser.add_argument('--dataset', type = str, default = 'cifar10',\n help = 'The dataset to use, default = \"cifar10\"')\n parser.add_argument('--batch_size', type = int, default = 100,\n help = 'The batch size, default = 100')\n parser.add_argument('--subset', type = str, default = 'train',\n help = 'Which subset is used, default = \"train\"')\n\n parser.add_argument('--model_type', type = str, default = 'resnet',\n help = 'The type of the model, default = \"resnet\"')\n parser.add_argument('--width', type = int, default = 16,\n help = 'The width of MNIST_LeNet, default = 16')\n parser.add_argument('--bias', type = BooleanParser, default = True,\n help = 'Whether or not to use bias term, default = True')\n\n parser.add_argument('--out_folder', type = str, default = None,\n help = 'The output folder')\n parser.add_argument('--model_name', type = str, default = None,\n help = 'The name of the model')\n\n parser.add_argument('--attack', action = DictParser,\n default = {'step_size': 2, 'threshold': 8, 'iter_num': 10, 'order': -1},\n help = 'Play adversarial attack or not, default = step_size=2,threshold=8,iter_num=10,order=-1')\n parser.add_argument('--adv_budget_list', action = DictParser, default = None,\n help = 'The list of adversarial budget used, default = None.')\n\n parser.add_argument('--model2load', type = str, default = None,\n help = 'The model to be loaded, default = None')\n\n parser.add_argument('--vec1_scan', action = DictParser,\n default = {'min': -1., 'max': 1. , 'step': 0.05, 'adv_calc_freq': 1},\n help = 'The configuration of vec1, default = min=-1.,max=1.,step=0.1,adv_calc_freq=1')\n parser.add_argument('--vec2_scan', action = DictParser, default = None,\n help = 'The configuration of vec2, default = None')\n parser.add_argument('--load_vec1', type = str, default = None,\n help = 'The file to load vec1, default = None')\n parser.add_argument('--load_vec2', type = str, default = None,\n help = 'The file to load vec2, default = None')\n parser.add_argument('--vec_sample_mode', type = str, default = 'normalized',\n help = 'The way to generate random directions, default = \"normalized\"')\n parser.add_argument('--vec_scale', type = float, default = 1.,\n help = 'The scale of direction vector, default = 1.')\n\n parser.add_argument('--gpu', type = str, default = None,\n help = 'Specify the GPU to use, default = None')\n\n args = parser.parse_args()\n\n # Configure GPU\n config_visible_gpu(args.gpu)\n use_gpu = args.gpu != 'cpu' and torch.cuda.is_available()\n device = torch.device('cuda:0' if use_gpu else 'cpu')\n\n # Parse IO\n if not os.path.exists(args.out_folder):\n os.makedirs(args.out_folder)\n\n # Parse model and dataset\n if args.dataset.lower() in ['cifar10',]:\n train_loader, test_loader, classes = cifar10(batch_size = args.batch_size)\n loader = {'train': train_loader, 'test': test_loader}[args.subset]\n if args.model_type.lower() in ['lenet',]:\n model = CIFAR10_LeNet(width = args.width, bias = args.bias)\n elif args.model_type.lower() in ['vgg',]:\n model = CIFAR10_VGG(width = args.width, bias = args.bias)\n elif args.model_type.lower() in ['resnet',]:\n model = CIFAR10_ResNet(width = args.width)\n if args.bias == True:\n print('WARNING: ResNet18 does not have bias term in its layers.')\n else:\n raise ValueError('Invalid model_type: %s' % args.model_type)\n elif args.dataset.lower() in ['mnist',]:\n train_loader, test_loader, classes = mnist(batch_size = args.batch_size)\n loader = {'train': train_loader, 'test': test_loader}[args.subset]\n if args.model_type.lower() in ['lenet',]:\n model = MNIST_LeNet(width = args.width, bias = args.bias)\n else:\n raise ValueError('Invalid model_type: %s' % args.model_type)\n else:\n raise ValueError('Invalid dataset: %s' % args.dataset)\n model = model.cuda() if use_gpu else model\n criterion = nn.CrossEntropyLoss()\n criterion = criterion.cuda() if use_gpu else criterion\n assert os.path.exists(args.model2load), 'File %s does not exist!' % args.model2load\n ckpt2load = torch.load(args.model2load)\n model.load_state_dict(ckpt2load)\n\n # Parse vectors\n if args.load_vec1 is None:\n vec1 = generate_vec(model = model, mode = args.vec_sample_mode, scale = args.vec_scale)\n print('vec1 is generated!')\n else:\n if args.load_vec1.endswith('ckpt'):\n vec1 = torch.load(args.load_vec1)\n elif args.load_vec1.endswith('pkl'):\n vec1 = eigenvec2ckpt(model = model, eigen_info_file = args.load_vec1, index = 0, use_gpu = use_gpu)\n else:\n raise ValueError('Unrecognized format: %s' % args.load_vec1)\n print('vec1 is loaded!')\n\n if args.load_vec2 is None and args.vec2_scan != None:\n vec2 = generate_vec(model = model, mode = args.vec_sample_mode, scale = args.vec_scale)\n print('vec2 is generated!')\n elif args.vec2_scan != None:\n if args.load_vec2.endswith('ckpt'):\n vec2 = torch.load(args.load_vec2)\n elif args.load_vec2.endswith('pkl'):\n vec2 = eigenvec2ckpt(model = model, eigen_info_file = args.load_vec2, index = 1, use_gpu = use_gpu)\n else:\n raise ValueError('Unrecognized format: %s' % args.load_vec2)\n print('vec2 is loaded!')\n else:\n vec2 = None\n\n print('1D scanning' if vec2 is None else '2D scanning')\n\n # Parse the attacker\n attacker = parse_attacker(**args.attack)\n adv_budget_list = [attacker.threshold,] if args.adv_budget_list is None else discrete_seq(**args.adv_budget_list)\n\n # Prepare the item to save\n configs = {kwargs: value for kwargs, value in args._get_kwargs()}\n tosave = {'model_summary': str(model), 'setup_config': configs, 'results': {adv_budget: {} for adv_budget in adv_budget_list},\n 'log': {'cmd': 'python ' + ' '.join(sys.argv), 'time': datetime.now().strftime('%Y/%m/%d, %H:%M:%S')}}\n\n tosave = param_scan(model = model, device = device, attacker = attacker, loader = loader, adv_budget_list = adv_budget_list,\n vec1 = vec1, vec2 = vec2, vec1_scan = args.vec1_scan, vec2_scan = args.vec2_scan, tosave = tosave)\n\n vec1_file = args.load_vec1 if args.load_vec1 != None else os.path.join(args.out_folder, '%s_vec1.ckpt' % args.model_name)\n vec2_file = None if args.vec2_scan == None else os.path.join(args.out_folder, '%s_vec2.ckpt' % args.model_name) if args.load_vec2 is None else args.load_vec2\n tosave['vec1_file'] = vec1_file\n tosave['vec2_file'] = vec2_file\n\n if args.load_vec1 is None:\n torch.save(vec1, vec1_file)\n if args.load_vec2 is None and args.vec2_scan != None:\n torch.save(vec2, vec2_file)\n\n json.dump(tosave, open(os.path.join(args.out_folder, '%s_results.json' % args.model_name), 'w'))\n\n\n"
}
] | 23 |
garbear/DNSwitch
|
https://github.com/garbear/DNSwitch
|
c0c4244b8e3e434f9f2abaa3c26f8d5c09f5b67b
|
157ede2cbc05b87191708a878ce66b0e7b881f24
|
a593583daabdf0232a386d0ff460a34462325b5e
|
refs/heads/master
| 2021-01-18T20:16:27.559862 | 2017-04-02T00:55:53 | 2017-04-02T00:55:53 | 86,950,737 | 0 | 0 | null | 2017-04-02T00:02:08 | 2017-03-31T19:37:22 | 2017-03-12T21:08:08 | null |
[
{
"alpha_fraction": 0.5520362257957458,
"alphanum_fraction": 0.5662572979927063,
"avg_line_length": 35.83333206176758,
"blob_id": "514a641bdeb5a392da28a3d984d8397b78f66f1f",
"content_id": "71bc9900ec8782af0c78314de20b888e160f3177",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1547,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 42,
"path": "/http_server.py",
"repo_name": "garbear/DNSwitch",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python3\n\nfrom os.path import isfile, join\nfrom http.server import BaseHTTPRequestHandler, HTTPServer\n\nSERVER_ADDR = \"0.0.0.0\"\nSERVER_PORT = 80\n\nSERVER_DIR = \"http\"\n\nclass HTTPHandler(BaseHTTPRequestHandler):\n def do_GET(self):\n file_path = join(SERVER_DIR, self.path.split(\"/\")[-1])\n if(file_path == SERVER_DIR + \"/\"): #requesting the index page\n print(\"Sending index page\")\n self.send_response(200)\n self.send_header('Content-Type', 'text/html')\n self.end_headers()\n self.wfile.write(open(file_path + \"index.html\", \"rb\").read())\n elif isfile(file_path): #requesting another page\n print(\"Sending {}\".format(file_path))\n self.send_response(200)\n file_name = file_path.split(\"\\\\\")[-1]\n file_ext = file_name.split(\".\")[-1]\n content_type = None\n if file_ext == \"html\":\n content_type = \"text/html\"\n elif file_ext == \"css\":\n content_type = \"text/css\"\n elif file_ext == \"js\":\n content_type = \"text/javascript\"\n self.send_header('Content-Type', content_type)\n self.end_headers()\n self.wfile.write(open(file_path, \"rb\").read())\n else:\n print(\"Sending 404 for {}\".format(file_path))\n self.send_response(404)\n self.end_headers()\n\nhttpd = HTTPServer((SERVER_ADDR, SERVER_PORT), HTTPHandler)\nprint(\"Serving on port {}\".format(SERVER_PORT))\nhttpd.serve_forever()\n"
}
] | 1 |
eduherraiz/foowill
|
https://github.com/eduherraiz/foowill
|
74613b7fa794c427350704c80d8b357cfc3de3c8
|
e22e8030562818f8a3963b3672c11054b0540979
|
02f411df2fc8b9f63b1bf13f041b84a4aecf7f11
|
refs/heads/master
| 2021-01-10T21:37:06.227792 | 2013-12-13T18:17:00 | 2013-12-13T18:17:00 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6929260492324829,
"alphanum_fraction": 0.6929260492324829,
"avg_line_length": 35.52941131591797,
"blob_id": "edc436278e3fbac2dfc4ad34ac016f956e94586a",
"content_id": "1dd37bbeac64c5192331d43d4df78704350d9b53",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 622,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 17,
"path": "/admin.py",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom app.models import CustomUser, Tweet\n\nclass CustomUserAdmin(admin.ModelAdmin):\n \n list_display = ('admin_thumbnail','username','email','last_update','last_login',\n 'activity_interval','publish_interval','mail_interval','half_dead', 'dead', 'admin_posts')\n search_fields = ('username', 'email')\n list_display_links = ('admin_thumbnail','username','email')\n\nadmin.site.register(CustomUser, CustomUserAdmin) \n \nclass TweetAdmin(admin.ModelAdmin):\n list_display = ('text','pub_date','user')\n list_filter = ('user',)\n \nadmin.site.register(Tweet, TweetAdmin) \n"
},
{
"alpha_fraction": 0.6379310488700867,
"alphanum_fraction": 0.642241358757019,
"avg_line_length": 34.730770111083984,
"blob_id": "d2bdc04ba98d9943e20829b2155f67096108dcc8",
"content_id": "513e97608f125c4ace5509790be67cd932983110",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 928,
"license_type": "permissive",
"max_line_length": 82,
"num_lines": 26,
"path": "/app/urls.py",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": "from django.conf.urls.defaults import patterns, include, url\nfrom django.conf.urls.i18n import i18n_patterns\nfrom django.utils.translation import ugettext_lazy as _\n\n\n## Uncomment the next two lines to enable the admin:\nfrom django.contrib import admin\n\nfrom app.views import *\n\n#admin.autodiscover()\n\nurlpatterns = patterns('',\n url(r'^$', home, name='home'),\n url(r'^admin/', include(admin.site.urls)),\n url(r'^contact/$', contact, name='contact'),\n url(r'^about/$', about, name='about'),\n url(r'^config/$', config, name='config'), \n url(r'^done/$', done, name='done'),\n url(r'^error/$', error, name='error'),\n url(r'^logout/$', logout, name='logout'),\n url(r'^form/$', form, name='form'),\n url(r'^update_status/$', update_status, name='update_status'),\n #url(r'^tweet/add/$',add_tweet, name='add_tweet'),\n url(r'^tweet/delete/(?P<id_tweet>(\\d+))/$',delete_tweet, name='delete_tweet'),\n)"
},
{
"alpha_fraction": 0.5713770985603333,
"alphanum_fraction": 0.5861572027206421,
"avg_line_length": 27.90625,
"blob_id": "b96efc43e88bdc1bbbc405af423c3ea67e8e4337",
"content_id": "4b526b896ddb40b4c123afcee116b5f8f7c41408",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2774,
"license_type": "permissive",
"max_line_length": 127,
"num_lines": 96,
"path": "/app/templatetags/extras.py",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": "from django import template\nfrom django.core.urlresolvers import reverse\nfrom django.conf import settings\n#from django.utils.dateformat import format\nfrom django.utils.translation import ungettext, ugettext\nfrom datetime import datetime, timedelta\n#from time import mktime, localtime\nimport re\n\nregister = template.Library()\n\[email protected]_tag\ndef add_active(request, name, by_path=False):\n \"\"\" Return the string 'active' current request.path is same as name\n \n Keyword aruguments:\n request -- Django request object\n name -- name of the url or the actual path\n by_path -- True if name contains a url instead of url name\n \"\"\"\n if by_path:\n path = name\n else:\n path = reverse(name)\n\n if request.path == path:\n return ' active '\n\n return ''\n\[email protected]_tag\ndef parse_tweet(text):\n text = re.sub(r'((mailto\\:|(news|(ht|f)tp(s?))\\://){1}\\S+)', '<a href=\"\\g<0>\" rel=\"external\">\\g<0></a>', text)\n text = re.sub(r'http://(yfrog|twitpic).com/(?P<id>\\w+/?)', '', text)\n text = re.sub(r'#(?P<tag>\\w+)', '<a href=\"http://search.twitter.com/search?tag=\\g<tag>\" rel=\"external\">#\\g<tag></a>', text)\n text = re.sub(r'@(?P<username>\\w+)', '@<a href=\"http://twitter.com/\\g<username>/\" rel=\"external\">\\g<username></a>', text)\n return text\n \n \[email protected]_tag\ndef tuple2dict(ts):\n ts = settings.__getattr__(ts)\n a = []\n for t in ts:\n a.append(list(t)[0])\n return a\n \[email protected]_tag\ndef values(ts):\n ts = settings.__getattr__(ts)\n lts = len(ts)\n step = 100/lts\n a = []\n added = 0\n for t in ts:\n a.append(added)\n added = added + step\n a[-1] = 100\n return a\n \ndef create_time_string(seconds):\n minutes = (seconds % 3600) / 60\n hours = seconds / 3600\n days = hours/24\n weeks = days/7\n months = days/30\n years = months/12\n \n if years > 0:\n return ungettext('a year','%(years)d years', years) % {'years': years }\n \n if months > 0:\n return ungettext('a month','%(months)d months', months) % {'months': months } \n \n if weeks > 0:\n return ungettext('a week','%(weeks)d weeks', weeks) % {'weeks': weeks }\n \n if days > 0:\n return ungettext('a day','%(days)d days', days) % {'days': days }\n \n if hours > 0:\n return ungettext('a hour','%(hours)d hours', hours) % {'hours': hours }\n \n if minutes > 0:\n return ungettext('a minute','%(minutes)d minutes', minutes) % {'minutes': minutes }\n \n return ugettext('moments')\n \[email protected]\ndef relative_date(data):\n now = datetime.utcnow()\n if data > now:\n v = data - now\n else:\n v = now - data\n return create_time_string(v.seconds+(v.days * 86400))"
},
{
"alpha_fraction": 0.46693122386932373,
"alphanum_fraction": 0.4722222089767456,
"avg_line_length": 23.419355392456055,
"blob_id": "3e9f0f51bd9e3e21e45a1afe7e17fc9d0aa9cc7d",
"content_id": "45050b4ae8800c34f54f8bea47a462cc0e5ab5a1",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 756,
"license_type": "permissive",
"max_line_length": 124,
"num_lines": 31,
"path": "/templates/done.html",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": "{% extends \"base.html\" %}\n\n{% load extras %}\n{% load i18n %}\n\n{% block content %}\n {% include \"modal-newposttweet.html\" %}\n {% include \"user-box.html\" %}\n\n <div class='header-inner'>\n <h2>Post-tweets</h2>\n </div>\n\n <div class='content-box'>\n {% for tweet in tweets %}\n <div class='tweet'>\n <p>{% parse_tweet tweet.text %}</p>\n <div class='tweet-info'>\n <i>{{ tweet.pub_date|relative_date }}</i>\n <a href=\"/tweet/delete/{{tweet.pk}}\"><i class=\"icon-trash icon-black\"></i> {% trans 'Delete' %}</a><br/>\n </div>\n </div>\n {% endfor %}\n </div>\n </div>\n\n</div>\n\n{% include \"modal-newposttweet.html\" %}\n\n{% endblock %}"
},
{
"alpha_fraction": 0.7085427045822144,
"alphanum_fraction": 0.715242862701416,
"avg_line_length": 32.11111068725586,
"blob_id": "3722daeb1ee8a67fca3fb7e92ac8d6cc68bc760c",
"content_id": "871dd49844e9025c236b5b6197fcdeef345c3f46",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 597,
"license_type": "permissive",
"max_line_length": 126,
"num_lines": 18,
"path": "/urls.py",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": "from django.conf.urls.defaults import patterns, include, url\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.http import HttpResponse\n\n\n# Uncomment the next two lines to enable the admin:\nfrom django.contrib import admin\nfrom admin import *\nfrom app.views import *\n\nadmin.autodiscover()\n\nurlpatterns = patterns('',\n url(r'^robots\\.txt$', lambda r: HttpResponse(\"User-agent: *\\nDisallow: /config\\nDisallow: /done\", mimetype=\"text/plain\")),\n url(r'', include('app.urls')),\n url(r'', include('social_auth.urls')),\n url(r'^i18n/', include('django.conf.urls.i18n')),\n)\n\n"
},
{
"alpha_fraction": 0.6100307106971741,
"alphanum_fraction": 0.6105424761772156,
"avg_line_length": 28.39097785949707,
"blob_id": "30106d70df5ba36b0311a3e5cc31d9a1be57d39b",
"content_id": "8a94f43a4a2f0052891153939356fe51ae843bef",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": true,
"language": "Python",
"length_bytes": 3908,
"license_type": "permissive",
"max_line_length": 113,
"num_lines": 133,
"path": "/fabfile.py",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": "from fabric.api import env, run, cd, prefix,local\nenv.use_ssh_config = True\n\ndef prod():\n env.server = 'prod'\n env.vcs = 'git'\n env.hosts = ['foowill.com', ]\n env.user = 'root'\n env.app = 'foowill'\n env.APP_DIR = '/var/pywww/foowill/'\n env.virtualenv = 'foowill'\n\ndef pre():\n env.server = 'pre'\n env.vcs = 'git'\n env.hosts = ['eduherraiz.no-ip.org', ]\n env.user = 'root'\n env.app = 'foowill'\n env.APP_DIR = '/var/pywww/foowill/'\n env.virtualenv = 'foowill'\n \ndef requirements():\n \"\"\" install requeriments on app \"\"\"\n with cd(env.APP_DIR):\n with prefix(\"source /usr/local/bin/virtualenvwrapper.sh\"):\n with prefix('workon %s' % env.virtualenv):\n run('pip install -r requirements.txt')\n\ndef lessc():\n 'Compile lessc to the final css file'\n APP_DIR = '/mnt/xuflus/Webs/foowill/'\n local(\"cd %s; lessc app/static/css/less/bootstrap.less > app/static/css/bootstrap.css\" % APP_DIR)\n local(\"cd %s; lessc app/static/css/less/responsive.less > app/static/css/bootstrap-responsive.css\" % APP_DIR)\n local(\"cd %s; lessc app/static/css/lessless.css > app/static/css/final.css\" % APP_DIR)\n \ndef get_requeriments():\n with cd(env.APP_DIR):\n local('pip freeze > requirements.txt')\n \ndef push():\n 'Local push to the repository.'\n with cd('/mnt/xuflus/Webs/foowill'):\n local('git add app/static')\n try:\n local('git commit -m \"Auto push on deploy - small changes\"')\n except:\n pass\n local('git push -u origin master')\n \ndef pull():\n 'Updates the repository.'\n local(\"ssh -A %s 'cd %s; git pull'\" % (env.hosts[0], env.APP_DIR))\n\ndef redis_restart():\n 'Restart the redis server.'\n local(\"ssh -A %s '/etc/init.d/redis-server restart'\" % (env.hosts[0]))\n \ndef syncdb():\n with cd(env.APP_DIR):\n with prefix(\"source /usr/local/bin/virtualenvwrapper.sh\"):\n with prefix('workon %s' % env.virtualenv):\n\t\trun('python manage.py syncdb')\ndef migrate():\n with cd(env.APP_DIR):\n with prefix(\"source /usr/local/bin/virtualenvwrapper.sh\"):\n with prefix('workon %s' % env.virtualenv):\n\t\trun('python manage.py migrate')\n\ndef collectstatic():\n with cd(env.APP_DIR):\n with prefix(\"source /usr/local/bin/virtualenvwrapper.sh\"):\n with prefix('workon %s' % env.virtualenv):\n\t\trun('python manage.py collectstatic --noinput')\n\ndef supervisor():\n with cd(env.APP_DIR):\n with prefix(\"source /usr/local/bin/virtualenvwrapper.sh\"):\n with prefix('workon %s' % env.virtualenv):\n run('python manage.py supervisor --daemonize --project-dir=%s' % env.APP_DIR)\ndef stop():\n with cd(env.APP_DIR):\n with prefix(\"source /usr/local/bin/virtualenvwrapper.sh\"):\n with prefix('workon %s' % env.virtualenv):\n\t\trun('python manage.py supervisor stop all')\n\ndef start():\n with cd(env.APP_DIR):\n with prefix(\"source /usr/local/bin/virtualenvwrapper.sh\"):\n with prefix('workon %s' % env.virtualenv):\n\t\trun('python manage.py supervisor start all')\n\ndef restart():\n stop()\n redis_restart()\n start()\n\ndef newserver():\n #install nginx\n #configure nginx\n #install git\n #install redis-server\n #install mkvirtualenv\n #install python-crypto\n #install python-dev\n #first git on /var/pywww/foowill\n #git clone [email protected]:eduherraiz/foowill.git\n #Adding key ssh on bitbucket\n\n requirements()\n \ndef update():\n 'Update all'\n pull()\n requirements()\n syncdb()\n migrate()\n collectstatic()\n restart()\n\ndef updatelessc():\n 'No changes in DB or requeriments'\n lessc()\n push()\n pull()\n collectstatic()\n restart()\n \ndef updatefast():\n 'No changes in DB or requeriments'\n pull()\n #requirements()\n #collectstatic()\n restart()"
},
{
"alpha_fraction": 0.7419354915618896,
"alphanum_fraction": 0.7589133977890015,
"avg_line_length": 41.07143020629883,
"blob_id": "12588ca14e04b27f50589ebd1ac5f16af5270558",
"content_id": "12dd709632324bc8386df7587e7dffcd7c19668a",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 589,
"license_type": "permissive",
"max_line_length": 211,
"num_lines": 14,
"path": "/README.md",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": "=======\nFoowill\n=======\n\nFoowill was my first full-stack django project.\n\nDjango 1.4 + Celery (Periodic Tasks) + Redis (Cache)\n\nIt's a twitter app, that looking periodically in your account if you publish tweets. In the case that the configurable time limit is raised the stored tweets in the app will be published in your twitter account.\nUseful to save the last thoughts and share it if you die.\nThe project starts in 2012, and in October 2013 was closed.\n\nIf you want to know more read this post on my blog:\nhttp://www.eduherraiz.com/blog/foowill-un-proyecto-inutil-que-no-lo-fue-tanto\n"
},
{
"alpha_fraction": 0.6399586200714111,
"alphanum_fraction": 0.6417679190635681,
"avg_line_length": 37.689998626708984,
"blob_id": "f9e9744b3a189f163865290de97162900ae0c954",
"content_id": "a2ac4e1d727a3f207e99f367391e57d5d5aba9bc",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3869,
"license_type": "permissive",
"max_line_length": 159,
"num_lines": 100,
"path": "/tasks.py",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": " #-*- coding: UTF-8 -*-\nfrom django.db.models import F\nfrom celery.task import *\nfrom datetime import timedelta, datetime\nfrom app.models import CustomUser\nfrom django.utils.translation import activate,deactivate\n\"\"\"\nNote: the api of twitter retards 1 minute (more or less) the update of the update_date\n\"\"\"\n@task\ndef forensic():\n logger = forensic.get_logger(logfile='tasks.log')\n users = CustomUser.objects.filter(next_check__lt=datetime.utcnow(),half_dead=False,dead=False,configured=True,posts__gt=0)\n \n for user in users:\n\tlogger.info(\"User %s, act: %d, mail: %d, lu: %s - [%s]\" % (user.username, user.activity_interval, user.mail_interval, user.last_update, datetime.now()))\t \n\t\n\t#Get the last update date for the user\n\tif user.update_date():\n\t logger.info(\"User %s, update her date update (on twitter) - [%s]\" % (user.username, datetime.utcnow()))\t \n\n #Which is bigger? login or update date?\n date_substract = user.bigger_date()\n\n nowdate = datetime.utcnow()\n #time from last update or login on foowill\n t = nowdate - date_substract\n\t\n\t#Check if the user is half-dead\n\tif t.seconds >= user.activity_interval:\n\t user.half_dead = True\n\t user.save()\n\t False\n\t logger.info(\"User %s, is HALF-DEAD (on twitter) - [%s]\" % (user.username, datetime.utcnow()))\n\t activate(user.language)\n\t user.send_email_halfdead()\n\t deactivate()\n\t \n@task\ndef killer_saver():\n logger = killer_saver.get_logger(logfile='tasks.log')\n \n users = CustomUser.objects.filter(half_dead=True, dead=False, configured=True, posts__gt=0)\n \n for user in users:\n\tlogger.info(\"User %s, act: %d, mail: %d, lu: %s - [%s]\" % (user.username, user.activity_interval, user.mail_interval, user.last_update, datetime.now()))\t \n\t\n\t#Get the last update date for the user\n\tif user.update_date():\n\t logger.info(\"User %s, update the last date update (on twitter) - [%s]\" % (user.username, datetime.utcnow()))\t \n\t \n #Which is bigger? login or update date?\n date_substract = user.bigger_date()\n\n nowdate = datetime.utcnow()\n\t#time from last update or login on foowill\n\tif nowdate > date_substract: #Correction for a date_substract in future (synchronization problems)\n t = nowdate - date_substract\n else:\n t = timedelta(seconds=0)\n \n\t#Check if the user status\n\tif t.seconds < user.activity_interval:\n\t #Is not still half_dead -> save it\n\t user.half_dead = False\n\t user.last_update = nowdate\n\t user.next_check = nowdate + timedelta(seconds=user.activity_interval)\n\t user.save()\n\t logger.info(\"User %s, is SAVED (on twitter) - [%s]\" % (user.username, datetime.utcnow()))\n\t activate(user.language)\n\t user.send_email_still_alive()\n\t deactivate()\n\t #user.update_twitter_status(\"Sigo vivo, no os preocupeis. http://foowill.com %s\" % datetime.now() )\n\t \n\telif t.seconds >= user.activity_interval + user.mail_interval:\n\t user.dead = True\n\t user.save()\n\t logger.info(\"User %s, is DEAD (on twitter) - [%s]\" % (user.username, datetime.utcnow()))\n\t activate(user.language)\n\t user.send_email_hope_to_read()\n if user.mail_interval == 0: \n user.deliver_all_to_twitter()\n else:\n user.posts_sended = user.posts\n user.deliver_one_to_twitter()\n deactivate()\n \n\telse:\n\t logger.info(\"User %s, is STILL HALF-DEAD (on twitter) - [%s]\" % (user.username, datetime.utcnow()))\n\t #TODO: if email: Send email for another reminder.\n\n\t \n@task\ndef tweet_sender():\n logger = killer_saver.get_logger(logfile='tasks.log')\n \n users = CustomUser.objects.filter(half_dead=True, dead=True, configured=True, posts_sended__gt=0, next_check_mail__lt=datetime.utcnow())\n \n for user in users:\n user.deliver_one_to_twitter()"
},
{
"alpha_fraction": 0.5333333611488342,
"alphanum_fraction": 0.7049382925033569,
"avg_line_length": 17.409090042114258,
"blob_id": "e8b2ac5d5365cf4b73b5dcf170654e477475610f",
"content_id": "5dce903e900f83e78f9ac0084cf6662dbd44e06f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 810,
"license_type": "permissive",
"max_line_length": 67,
"num_lines": 44,
"path": "/requirements.txt",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": "Babel==0.9.6\nDjango==1.4\nPyYAML==3.10\nSouth==0.7.5\namqplib==1.0.2\nanyjson==0.3.1\nargh==0.15.1\ncelery==2.5.3\ncertifi==0.0.8\nchardet==1.0.1\ndistribute==0.6.27\ndjango-celery==2.5.5\ndjango-contact-form==0.3.1\ndjango-fields==0.2.0\ndjango-picklefield==0.2.1\ndjango-redis-cache==0.9.5\ndjango-redis-sessions==0.3.1\ndjango-social-auth==0.6.9\ndjango-supervisor==0.2.7\ndjango-tinymce==1.5.1b2\ngunicorn==0.14.3\nhttplib2==0.7.4\nkombu==2.1.8\n-e git://github.com/brad/mailsnake.git@503dd6a8ba1426edf13fff13be2ff9c0354f35ae#egg=mailsnake-dev\nmeld3==0.6.8\noauth2==1.5.211\noauthlib==0.1.3\npathtools==0.1.2\npyasn1==0.1.3\npycrypto==2.6\npygeoip==0.2.4\npython-dateutil==1.5\npython-openid==2.2.5\npython-twitter==0.8.2\npytz==2012d\nredis==2.4.13\nrequests==0.13.1\nrsa==3.0.1\nsimplejson==2.5.2\nsix==1.1.0\nsupervisor==3.0a12\ntweepy==1.9\nwatchdog==0.6.0\nwsgiref==0.1.2\n"
},
{
"alpha_fraction": 0.6468297839164734,
"alphanum_fraction": 0.6551724076271057,
"avg_line_length": 30.561403274536133,
"blob_id": "e8480606c38f64bb2fe4ecb597380f5d4c373459",
"content_id": "387fc30024a996fa21be7defc7a928b89ceb2ade",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1798,
"license_type": "permissive",
"max_line_length": 89,
"num_lines": 57,
"path": "/app/utils.py",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": "#-*- coding: UTF-8 -*-\nfrom django.conf import settings\nimport tweepy\nimport html2text\n\ndef send_email_mandrill(subject, html_content, from_email, from_name, email_to, name_to):\n from mailsnake import MailSnake\n from django.conf import settings\n\n 'Send email using mandrill.com API'\n mapi = MailSnake(settings.MANDRILL_KEY, api='mandrill')\n message={\n 'subject':subject, \n 'text': html2text.html2text(html_content),\n 'html': html_content,\n 'from_email': from_email, \n 'from_name':from_name, \n 'to':[{\n 'email':email_to, \n 'name': name_to,\n }]\n }\n mapi.messages.send(message=message) \n return True\n \n\ndef connect_tweepy(user):\n # == OAuth Authentication ==\n #\n # This mode of authentication is the new preferred way\n # of authenticating with Twitter.\n\n # The consumer keys can be found on your application's Details\n # page located at https://dev.twitter.com/apps (under \"OAuth settings\")\n consumer_key = settings.TWITTER_CONSUMER_KEY\n consumer_secret= settings.TWITTER_CONSUMER_SECRET\n\n # The access tokens can be found on your applications's Details\n # page located at https://dev.twitter.com/apps (located \n # under \"Your access token\")\n access_token = user.tokens[\"oauth_token\"]\n access_token_secret = user.tokens[\"oauth_token_secret\"]\n\n auth = tweepy.OAuthHandler(consumer_key, consumer_secret)\n auth.set_access_token(access_token, access_token_secret)\n\n return tweepy.API(auth)\n\n#def get_possible_country_code(ip):\n ##ip = \"74.125.230.211\" #google.com\n #from django.contrib.gis.geoip import GeoIP\n #g = GeoIP(path=settings.GEOIP_PATH)\n #c = g.country(ip)['country_code']\n #if not c:\n #return 'ES'\n #else:\n #return c"
},
{
"alpha_fraction": 0.7099999785423279,
"alphanum_fraction": 0.7099999785423279,
"avg_line_length": 32,
"blob_id": "1b191a939eec82da41bd1b048b43a317c418ac5d",
"content_id": "4671f2c1158089b6b213fb1e3ca68d170b133dc3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 100,
"license_type": "permissive",
"max_line_length": 36,
"num_lines": 3,
"path": "/app/context_processors.py",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": "def debug_mode(request):\n from django.conf import settings\n return {'DEBUG': settings.DEBUG} \n"
},
{
"alpha_fraction": 0.6184419989585876,
"alphanum_fraction": 0.6287758350372314,
"avg_line_length": 35.97058868408203,
"blob_id": "242a324fdc314eab0b3fb11a3d17a18549c27433",
"content_id": "fd41888e6cdb5efd4ead439bbfb62992cf722b0b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1258,
"license_type": "permissive",
"max_line_length": 109,
"num_lines": 34,
"path": "/app/forms.py",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": " #-*- coding: UTF-8 -*-\nfrom django.forms import ModelForm, Textarea, Form, CharField, EmailField\nfrom app.models import Tweet, CustomUser\nfrom django.utils.translation import ugettext_lazy as _\n\nclass TweetForm(ModelForm):\n class Meta:\n model = Tweet\n fields = ('text', )\n widgets = {\n 'text': Textarea(),\n }\n\nclass ConfigForm(ModelForm):\n class Meta:\n model = CustomUser\n fields = ('email','activity_interval', 'publish_interval', 'mail_interval' )\n #widgets = {\n #'text': Textarea(attrs={'cols': 40, 'rows': 5}),\n #}\t \n \nclass ContactForm(Form):\n subject = CharField(label=_(\"Message subject\"),max_length=200,required=True)\n message = CharField(label=_(\"Your message\"),widget=Textarea(),required=True )\n sender = EmailField(label=_(\"Email address\"), required=True)\n name = CharField(label=_(\"Your name or company\"),max_length=200,required=True)\n #cc_myself = forms.BooleanField(required=False)\n \nclass UpdateTweetForm(Form):\n updatetweet = CharField(\n required=True, \n max_length=140, \n widget=Textarea(), \n initial=_(\"I saved a tweet that will be published when I die with http://foowill.com @foo_will\"))\n"
},
{
"alpha_fraction": 0.5881302356719971,
"alphanum_fraction": 0.5886264443397522,
"avg_line_length": 33.50342559814453,
"blob_id": "90185fd4bb29f6691777cf180bc9b1ad1a8b7f14",
"content_id": "8c1e650b8d1d9a52fcbda7ec206a5712eedc5d30",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 10076,
"license_type": "permissive",
"max_line_length": 119,
"num_lines": 292,
"path": "/app/views.py",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": " #-*- coding: UTF-8 -*-\n\nfrom django.http import HttpResponseRedirect\nfrom django.contrib.auth import logout as auth_logout\nfrom django.contrib.auth.decorators import login_required\nfrom django.template import RequestContext\nfrom django.shortcuts import render_to_response, redirect\nfrom django.contrib.messages.api import get_messages\nfrom django.conf import settings\nfrom django.utils.translation import get_language\nfrom django.utils.translation import ugettext as _\nfrom datetime import datetime,timedelta\n\nfrom tweepy.error import TweepError\n\nfrom social_auth import __version__ as version\nfrom social_auth.utils import setting\nfrom social_auth.models import UserSocialAuth\n\nfrom app.models import Tweet, CustomUser\nfrom app.utils import send_email_mandrill\nfrom app.forms import *\n\n#from pytz import country_timezones\n\ndef get_user(userg):\n try: \n instance = UserSocialAuth.objects.filter(provider='twitter',user=userg).get()\n except UserSocialAuth.DoesNotExist:\n return None\n try:\n user = CustomUser.objects.filter(user=instance).get()\n except: #Not user defined\n user = CustomUser.objects.create(user=instance, username=userg.username)\n user.update_date()\n user.update_twitter_photo()\n user.save()\n user.update_login_date()\n return user\n\ndef contact(request):\n if request.user.is_authenticated():\n user = get_user(request.user)\n if not user.configured:\n return HttpResponseRedirect('/config/') # Redirect after POST\n else:\n user = ()\n \n from_email = \"\"\n sended = False\n \n if request.method == 'POST': # If the form has been submitted...\n form = ContactForm(request.POST) # A form bound to the POST data\n if form.is_valid(): # All validation rules pass\n #Send the email\n subject = form.cleaned_data['subject']\n html_content = form.cleaned_data['message']\n from_email = form.cleaned_data['sender']\n from_name = form.cleaned_data['name']\n infomail = send_email_mandrill(subject,html_content, from_email, from_name, settings.ADMIN_EMAIL, 'Admin foowill')\n #infomail = info[0]\n sended = True\n else:\n infomail = False\n else:\n form = ContactForm() # An unbound form\n infomail = {}\n \n ctx = {\n 'form': form,\n 'tweetform': TweetForm(),\n 'user': user,\n 'from_email' : from_email,\n 'infomail' : infomail,\n 'sended' : sended,\n }\n \n return render_to_response('contact.html', ctx, RequestContext(request))\n \ndef home(request):\n \"\"\"Home view, displays login mechanism\"\"\"\n if request.user.is_authenticated():\n user = get_user(request.user)\n #Initial login have to configure the app -> redirecting\n if not user.configured:\n return HttpResponseRedirect('/config/') # Redirect after POST\n else:\n\tuser = ()\n\t\n ctx = {\n 'tweetform': TweetForm(),\n 'user': user,\n } \n return render_to_response('home.html', ctx, RequestContext(request))\n\ndef about(request):\n \"\"\"Home view, displays login mechanism\"\"\"\n if request.user.is_authenticated():\n user = get_user(request.user)\n #Initial login have to configure the app -> redirecting\n if not user.configured:\n return HttpResponseRedirect('/config/') # Redirect after POST\n else:\n user = ()\n \n ctx = {\n 'tweetform': TweetForm(),\n 'user': user,\n } \n return render_to_response('about.html', ctx, RequestContext(request))\n \n@login_required\ndef config(request):\n \"\"\"Login complete view, displays user data\"\"\"\n user = get_user(request.user)\n #ip = request.META.get('REMOTE_ADDR', None)\n #countrycode = get_possible_country_code(ip)\n #timezones = country_timezones(countrycode)\n \n saved = False\n \n if request.method == 'POST': # If the form has been submitted...\n form = ConfigForm(request.POST) # A form bound to the POST data\n if form.is_valid(): # All validation rules pass\n #Save the user config in the table\n user.username = request.user.username\n user.email = form.cleaned_data['email']\n user.publish_interval = form.cleaned_data['publish_interval']\n user.mail_interval = form.cleaned_data['mail_interval']\n \n if user.activity_interval <> form.cleaned_data['activity_interval']:\n force = True\n else:\n force = False\n \n user.activity_interval = form.cleaned_data['activity_interval']\n #user.timezone = form.cleaned_data['timezone']\n user.language = get_language()\n #user.countrycode = countrycode\n user.update_twitter_photo()\n user.save()\n user.update_date(force)\n\n ##USED FOR TESTING \n #user.posts_sended = user.posts\n #user.deliver_one_to_twitter()\n #user.send_email_halfdead()\n #user.send_email_still_alive()\n #user.send_email_hope_to_read()\n #user.deliver_all_to_twitter()\n \n if not user.configured:\n #user.update_date() #Not necessary yet, updated on first save\n user.configured = True\n user.save()\n saved = True\n else:\n form = ConfigForm(instance=user) # An unbound form\n\n \n ctx = {\n 'form': form,\n 'tweetform': TweetForm(),\n 'user': user,\n 'bigger': user.bigger_date(),\n 'bigger2': user.bigger_date() + timedelta(seconds=(user.activity_interval+user.mail_interval)),\n 'saved' : saved,\n #'timezones' : timezones,\n }\n \n return render_to_response('config.html', ctx, RequestContext(request))\n\n@login_required\ndef done(request):\n \"\"\"Login complete view, displays user data\"\"\"\n user = get_user(request.user)\n if not user.configured:\n return HttpResponseRedirect('/config/') # Redirect after POST\n\n new_posttweet = False\n if request.method == 'POST': # If the form has been submitted...\n tweetf = TweetForm(request.POST) # A form bound to the POST data\n if tweetf.is_valid(): # All validation rules pass\n #Save the tweet in the table\n text = tweetf.cleaned_data['text']\n pub_date = datetime.utcnow() \n \n t = Tweet(text=text, pub_date=pub_date, user=user)\n t.save()\n new_posttweet = user.show_modal_new_tweet()\n if user.alwaysupdate:\n tweet = _(\"I saved a tweet that will be published when I die with http://foowill.com @foo_will\")\n try:\n user.update_twitter_status(tweet)\n except TweepError:\n count = Tweet.objects.filter(user=user).count()\n user.update_twitter_status(\"%s (%d)\" % (tweet, count))\n except:\n pass\n user.posts = user.posts + 1\n user.save()\n else:\n tweetf = TweetForm()\n \n tweets = Tweet.objects.filter(user=user).order_by('-pub_date')\n updatetweetform = UpdateTweetForm()\n \n ctx = {\n 'tweetform': tweetf,\n 'tweets': tweets,\n 'user': user,\n 'updatetweetform': updatetweetform,\n 'new_posttweet': new_posttweet,\n }\n \n return render_to_response('done.html', ctx, RequestContext(request))\n\n \n@login_required\ndef delete_tweet(request, id_tweet):\n \"\"\"Delete tweet\"\"\"\n user = get_user(request.user)\n try:\n t = Tweet.objects.filter(pk=id_tweet, user=user).get()\n t.delete()\n user.posts = user.posts - 1\n user.save()\n except:\n pass\n\n \n return HttpResponseRedirect('/done/') # Redirect after POST\n\n@login_required\ndef update_status(request):\n \"\"\"Update user status in her twitter account\"\"\"\n user = get_user(request.user)\n sendupdate = False\n #Saving user option for future updates\n if request.method == 'POST': # If the form has been submitted...\n if 'never' in request.POST:\n user.neverupdate = True\n user.save()\n elif 'nottoday' in request.POST:\n user.nottodayupdate = datetime.utcnow()\n user.save()\n elif 'ever' in request.POST:\n user.alwaysupdate = True\n user.save()\n sendupdate = True\n elif 'now' in request.POST:\n sendupdate = True\n\n if sendupdate:\n form = UpdateTweetForm(request.POST) # A form bound to the POST data\n if form.is_valid(): # All validation rules pass\n #Send the tweet to twitter\n tweet = form.cleaned_data['updatetweet']\n \n try:\n user.update_twitter_status(tweet)\n except TweepError:\n try:\n count = Tweet.objects.filter(user=user).count()\n user.update_twitter_status(\"%s (%d)\" % (tweet, count))\n except:\n pass\n \n return HttpResponseRedirect('/done/') # Redirect after POST\n \n \ndef error(request):\n \"\"\"Error view\"\"\"\n messages = get_messages(request)\n return render_to_response('error.html', {'version': version,\n 'messages': messages},\n RequestContext(request))\n\n\ndef logout(request):\n \"\"\"Logs out user\"\"\"\n auth_logout(request)\n return HttpResponseRedirect('/')\n\n\ndef form(request):\n if request.method == 'POST' and request.POST.get('username'):\n name = setting('SOCIAL_AUTH_PARTIAL_PIPELINE_KEY', 'partial_pipeline')\n request.session['saved_username'] = request.POST['username']\n backend = request.session[name]['backend']\n return redirect('socialauth_complete', backend=backend)\n return render_to_response('form.html', {}, RequestContext(request))\n"
},
{
"alpha_fraction": 0.650692343711853,
"alphanum_fraction": 0.6553440093994141,
"avg_line_length": 35.97600173950195,
"blob_id": "635ae8f8beb55f294506c69bb4ab4f7dd94cc656",
"content_id": "7407c30a570610a642a8ee32c735517d4c922b5b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 9248,
"license_type": "permissive",
"max_line_length": 140,
"num_lines": 250,
"path": "/app/models.py",
"repo_name": "eduherraiz/foowill",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\nfrom django.db import models\nfrom django.conf import settings\nfrom social_auth.models import UserSocialAuth\nfrom datetime import datetime,timedelta\nfrom django_fields.tests import EncryptedCharField\nfrom django.core.mail import EmailMultiAlternatives\nfrom django.utils.translation import ugettext as _\nfrom django.shortcuts import render\nfrom app.utils import send_email_mandrill, connect_tweepy\nfrom django.template.loader import render_to_string\n\nimport twitter\nimport tweepy\n\n#from django.db.models.signals import post_save\n#from django.dispatch import receiver\n\n# Define a custom User class to work with django-social-auth\nclass CustomUserManager(models.Manager):\n def create_user(self, username, email):\n return self.model._default_manager.create(username=username)\n\n\nclass CustomUser(models.Model):\n configured = models.BooleanField(default=False)\n user = models.OneToOneField(UserSocialAuth)\n username = models.CharField(max_length=128)\n photo = models.URLField(blank=True, null=True)\n language = models.CharField(max_length=128, default='en', blank=True)\n #timezone = models.CharField(max_length=128, default='Europe/Madrid', blank=True)\n #countrycode = models.CharField(max_length=4, default='ES', blank=True)\n\n last_login = models.DateTimeField(blank=True, null=True)\n \n email = models.EmailField(blank=True)\n\n #intervalo de periodo de inactividad para considerar al usuario muerto/medio-muerto (segun si email ping o no)\n activity_interval = models.IntegerField(choices=settings.ACTIVITY_CHOICES, default=2419200, blank=False)\n \n #intervalo de tiempo entre emisiones de tweets una vez muerto\n publish_interval = models.IntegerField(choices=settings.PUBLISH_CHOICES, default=0, blank=False)\n\n #tiempo a esperar desde half-dead hasta dead\n mail_interval = models.IntegerField(choices=settings.ACTIVITY_CHOICES, default=1209600, blank=False)\n \n #fecha de la última publicación en twitter\n last_update = models.DateTimeField(blank=True, null=True)\n \n #fecha para la siguiente comprobación del estado de twitter\n next_check = models.DateTimeField(blank=True, null=True)\n \n #fecha para la siguiente envío de un post-tweet con interval_mail > 0\n next_check_mail = models.DateTimeField(blank=True, null=True)\n \n #if (now() - last_update) > activity_interval: \n\t#half_dead = True\n half_dead = models.BooleanField(default=False)\n dead = models.BooleanField(default=False)\n\n #if we are waiting for a mail ping\n wait_mail = models.BooleanField(default=False)\n \n new_posttweet = models.BooleanField(default=False)\n \n #For the ask in the modal\n neverupdate = models.BooleanField(default=False)\n alwaysupdate = models.BooleanField(default=False)\n nottodayupdate = models.DateTimeField(blank=True, null=True)\n\n posts = models.IntegerField(default=0, blank=True, null=True)\n posts_sended = models.IntegerField(default=0, blank=True, null=True)\n \n objects = CustomUserManager()\n \n #def save(self, *args, **kwargs):\n #if not self.pk:\n #if not self.last_update:\n #self.update_date()\n #if not self.photo:\n #self.update_twitter_photo()\n #super(CustomUser, self).save(*args, **kwargs)\n \n\n def update_date(self, force=False):\n\t\"Save the last update date in twitter for the user\"\n api = twitter.Api(settings.TWITTER_CONSUMER_KEY,settings.TWITTER_CONSUMER_SECRET,settings.ACCESS_TOKEN,settings.ACCESS_TOKEN_SECRET)\n statuses = api.GetUserTimeline(self.username, count=1)\n if len(statuses) > 0:\n new_date = datetime.utcfromtimestamp(statuses[0].created_at_in_seconds)\n else:\n new_date = datetime.utcnow()\n \n if not self.activity_interval:\n self.activity_interval = 3600\n next_check = new_date + timedelta(seconds=self.activity_interval) \n \n if not self.last_update or (self.last_update < new_date) or force:\n if force:\n self.last_update = datetime.utcnow()\n self.next_check = datetime.utcnow() + timedelta(seconds=self.activity_interval)\n else:\n self.last_update = new_date\n self.next_check = next_check\n self.save()\n return self.last_update\n \n def update_login_date(self):\n ##Checkin login in last day to update last_login, used in tasks\n if not self.last_login or self.last_login < (datetime.utcnow() - timedelta(seconds=settings.TIME_LOGIN)):\n self.last_login = datetime.utcnow()\n self.next_check = datetime.utcnow() + timedelta(seconds=self.activity_interval)\n self.save()\n\n def update_twitter_status(self, text):\n\tif text:\n api = connect_tweepy(self.user)\n\t api.update_status(text)\n\n def update_twitter_photo(self):\n api = connect_tweepy(self.user)\n user = api.get_user(self.username)\n self.photo = user.profile_image_url\n self.save()\n\t \n def get_twitter_friends(self):\n api = connect_tweepy(self.user)\n friends =[]\n for friend in tweepy.Cursor(api.friends).items():\n friends.append(friend.screen_name)\n return friends\n \n\n def show_modal_new_tweet(self):\n\tif self.alwaysupdate:\n\t return False\n\tif self.neverupdate:\n\t return False\n\tif self.nottodayupdate and (self.nottodayupdate > (datetime.utcnow() - timedelta(days=1))):\n\t return False\n\treturn True\n\n def bigger_date(self):\n if self.last_update < self.last_login:\n return self.last_login\n else:\n return self.last_update\n\t\n def send_email(self, subject, html_content):\n send_email_mandrill(subject, html_content,settings.EMAIL_PROJECT ,settings.NAME_PROJECT,self.email, self.username)\n\n def number_posts(self):\n return Tweet.objects.filter(user=self).count()\n\n def send_email_halfdead(self):\n\tsubject = _(\"Are you still alive?\")\n\t\n\thtml_content = render_to_string('half_dead.html', \n { \n 'userlanguage': self.language,\n 'username': self.username,\n 'time_without_update': self.bigger_date(),\n 'half_dead_time_mail_interval': self.bigger_date() + timedelta(seconds=(self.activity_interval+self.mail_interval)),\n 'number_posts': self.number_posts(),\n 'link_for_config': 'http://www.foowill.com/config'\n }\n\t)\n\n\tself.send_email(subject, html_content)\n\t\n def send_email_still_alive(self):\n subject = _(\"We are glad you are okay!\")\n \n html_content = render_to_string('still_alive.html', \n { \n 'username': self.username,\n 'userlanguage': self.language,\n }\n )\n\n self.send_email(subject, html_content)\n\n def send_email_hope_to_read(self):\n subject = _(\"The time interval waiting for your twitter status update is exceeded\")\n \n html_content = render_to_string('hope_to_read.html', \n { \n 'username': self.username,\n 'userlanguage': self.language,\n }\n )\n\n self.send_email(subject, html_content)\n \n def deliver_all_to_twitter(self):\n tweets = Tweet.objects.filter(user=self)\n for tweet in tweets:\n self.update_twitter_status(tweet.text)\n\n def deliver_one_to_twitter(self): \n if self.posts_sended > 0:\n self.next_check_mail = datetime.utcnow() + timedelta(seconds=self.mail_interval)\n ts = self.posts - self.posts_sended\n self.posts_sended = self.posts_sended - 1\n tweets = Tweet.objects.filter(user=self)\n self.update_twitter_status(tweets[ts].text)\n self.save()\n \n def admin_thumbnail(self):\n return u'<img src=\"%s\" />' % (self.photo)\n admin_thumbnail.short_description = 'Thumbnail'\n admin_thumbnail.allow_tags = True\n \n def admin_posts(self):\n return u'<a href=\"/admin/app/tweet/?q=1&user__id__exact=%d\">%d Post-tweets</a>' % (self.id, self.posts)\n admin_posts.short_description = 'Post-tweets'\n admin_posts.allow_tags = True\n \n def is_authenticated(self):\n return True\n \n def __unicode__(self):\n return self.username\n \n# Define Tweet \nclass Tweet(models.Model):\n text = EncryptedCharField(max_length=140, unique=True, blank=False)\n user = models.ForeignKey(CustomUser)\n pub_date = models.DateTimeField('date published')\n \n def __str__(self):\n return self.text\n\n#@receiver(post_save, sender=CustomUser)\n#def init_user(sender, instance, created, **kwargs):\n #\"\"\"Create a matching profile whenever a user object is created.\"\"\"\n #if created: \n #instance.update_twitter_photo()\n #instance.update_date()\n ##profile, new = UserProfile.objects.get_or_create(user=instance)\n\n \nfrom social_auth.signals import pre_update\nfrom social_auth.backends.facebook import FacebookBackend\n\n\ndef facebook_extra_values(sender, user, response, details, **kwargs):\n return False\n\npre_update.connect(facebook_extra_values, sender=FacebookBackend)\n"
}
] | 14 |
Bakhoj/lora_master
|
https://github.com/Bakhoj/lora_master
|
2cceb6a4e191631f35232672bc4dc2f021d676f9
|
c578dcf1a66c9076f76d6fb5c68f2c1246bce7cf
|
86657229b5265050348af16e3ae8700cab7aedd4
|
refs/heads/master
| 2020-03-12T00:12:43.706242 | 2018-05-27T13:21:51 | 2018-05-27T13:21:51 | 130,343,882 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.8292682766914368,
"alphanum_fraction": 0.8292682766914368,
"avg_line_length": 40,
"blob_id": "131212c20e4c0e808d80070ea74929833d0386d9",
"content_id": "52f95d706b1939fe7710712ea3374d40303bdde8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 82,
"license_type": "no_license",
"max_line_length": 44,
"num_lines": 2,
"path": "/README.md",
"repo_name": "Bakhoj/lora_master",
"src_encoding": "UTF-8",
"text": "# LoRa Master module Weather Station\nCode for master modules of weathers stations\n"
},
{
"alpha_fraction": 0.48455286026000977,
"alphanum_fraction": 0.5121951103210449,
"avg_line_length": 25.7391300201416,
"blob_id": "8a9a04588d5dc845ac1130684d670184702b238b",
"content_id": "c76f3c23d3056f286c99f2044f11fe739cb7a3be",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 615,
"license_type": "no_license",
"max_line_length": 68,
"num_lines": 23,
"path": "/simple_time.py",
"repo_name": "Bakhoj/lora_master",
"src_encoding": "UTF-8",
"text": "import time\n\nclass SimpleTime():\n\n def __init__(self, day = 0, hour = 0, minute = 0):\n self.day = day\n self.hour = hour\n self.minute = minute\n self.time = None\n\n def byte_to_time(self, first = 0, second = 0):\n self.day = ((first & 0xF8) >> 3)\n self.hour = (((first & 0x07) << 2) + ((second & 0xC0) >> 6))\n self.minute = (second & 0x3F)\n self.time = time.time()\n\n def print_time(self):\n print(\"Day: \\t\", self.day)\n print(\"Hour: \\t\", self.hour)\n print(\"Min: \\t\", self.minute)\n\n def to_float_time(self):\n return self.time\n"
},
{
"alpha_fraction": 0.6090273261070251,
"alphanum_fraction": 0.6244966983795166,
"avg_line_length": 26.928993225097656,
"blob_id": "c8eaf5ca6d886a0b1f96ce43afc7fd001d3a93ab",
"content_id": "5735c0aedf1db85b94409270fce1a1918d15b8f2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4719,
"license_type": "no_license",
"max_line_length": 136,
"num_lines": 169,
"path": "/weather_master.py",
"repo_name": "Bakhoj/lora_master",
"src_encoding": "UTF-8",
"text": "from simple_time import SimpleTime\nfrom data_pack import DataPack\n\n\"\"\"\nTime to connect the Module to the DB http://www.fhilitski.com/2016/11/temperature-sensor-with-raspberry-pi-3-and-aws/ \n\"\"\"\n\n\nclass PackageReader():\n\tdef __init__(self):\n\t\tpass\n\n\tdef read_package(self, payload, verbose = False):\n\t\tif(type(payload) is not list):\n\t\t\tprint(\"Wrong payload type\")\n\t\t\treturn\n\t\tself.data_pack = DataPack()\n\t\tself.verbose = verbose\n\t\tself.index = 0\n\t\tself.payload = payload\n\t\tself.chex_sum = self.payload[self.inc()]\n\t\tself.cmd = self.payload[self.inc()]\n\t\tself.accepted_package = self.chex_sum == len(payload)\n\n\t\tif(self.verbose):\n\t\t\tprint(\"\\n==================================\")\n\t\t\tprint(\"chex_sum: \\t\", self.chex_sum)\n\t\t\tprint(\"command: \\t\", self.cmd)\n\n\t\tif(self.accepted_package == False):\n\t\t\tif(self.verbose):\n\t\t\t\tprint(\"Package not accepted\")\n\t\t\t\tprint(\"==================================\\n\")\n\t\t\treturn\n\t\t\n\t\tself.__cmd_lookup(self.cmd)\n\n\tdef read_package_async(self, payload):\n\t\tself.read_package(payload, verbose=False)\n\n\tdef is_accepted(self):\n\t\treturn self.accepted_package\n\n\t\n\tdef __cmd_lookup(self, x):\n\t\tif x == 0x01:\n\t\t\t#Local Station ID Request S -> M\n\t\t\tpass\n\t\telif x == 0x02:\n\t\t\t#Local Station ID Response M -> S\n\t\t\tpass\n\t\telif x == 0x03:\n\t\t\t#Local Time ID Request S -> M\n\t\t\tpass\n\t\telif x == 0x04:\n\t\t\t#Local Time ID Respons M -> S\n\t\t\tpass\n\t\telif x == 0x05:\n\t\t\t#Send measured station data\n\t\t\tself.__cmd_sensor_data()\n\t\telif x == 0x06:\n\t\t\t#Received measured station data\n\t\t\tpass\n\t\telif x == 0x07:\n\t\t\t#Send Undifined data?\n\t\t\tpass\n\t\telif x == 0x08:\n\t\t\t#Send Station Status\n\t\t\tpass\n\t\telif x == 0x09:\n\t\t\t#Received Station Status\n\t\t\tpass\n\t\telse:\n\t\t\tself.__invalid_command()\n\n\tdef __cmd_sensor_data(self):\n\t\tif(self.verbose):\n\t\t\tprint(\"\\n==================================\")\n\t\t\tprint(\"\\tSensor Data command\")\n\t\tself.__record_station_id()\n\t\tself.__record_time_id()\n\t\tself.__check_available_sensors()\n\t\tself.__record_sensor_data()\n\n\t\tif(self.verbose):\n\t\t\tprint(\"==================================\\n\")\n\n\tdef __record_station_id(self):\n\t\tself.data_pack.station_id = (self.payload[2] << 8)+ self.payload[3]\n\t\tif(self.verbose):\n\t\t\tprint(\"Local Station Identification: \\t\", self.data_pack.station_id)\n\n\tdef __record_time_id(self):\n\t\tself.data_pack.time.byte_to_time(self.payload[4], self.payload[5])\n\t\tif(self.verbose):\n\t\t\tprint(\"Local Times Identification: \\t{}:{}:{}\".format(self.data_pack.time.day, self.data_pack.time.hour, self.data_pack.time.minute))\n\n\tdef __check_available_sensors(self):\n\t\t\"\"\"\n\t\twill set the booleans for sensor data, \n\t\tand set the self.index to 8 and makes it ready for __record_sensor_data.\n\t\tself.verbose will print sensors availability.\n\t\t\"\"\"\n\t\tsensor_byte_one = self.payload[6]\n\t\tsensor_byte_two = self.payload[7]\n\t\tself.index = 8\n\n\t\tself.data_pack.has_bat_lvl = self.__bit_check(sensor_byte_one)(8)\n\n\t\tself.data_pack.has_air_temp = self.__bit_check(sensor_byte_one)(4)\n\t\tself.data_pack.has_air_hum = self.__bit_check(sensor_byte_one)(3)\n\t\tself.data_pack.has_air_pres = self.__bit_check(sensor_byte_one)(2)\n\t\t\n\t\tif(self.verbose):\n\t\t\tprint(\"Has battery level: \\t\", self.data_pack.has_bat_lvl)\n\t\t\tprint(\"Has air temperature: \\t\", self.data_pack.has_air_temp)\n\t\t\tprint(\"Has air humidity: \\t\", self.data_pack.has_air_hum)\n\t\t\tprint(\"Has air Pressure: \\t\", self.data_pack.has_air_pres)\n\n\tdef __record_sensor_data(self):\n\t\tverbose = self.verbose\n\t\t# 1. Battery level\n\t\tif self.data_pack.has_bat_lvl: \n\t\t\tself.data_pack.bat_lvl = self.payload[self.inc()]\n\t\t\tif verbose:\n\t\t\t\tprint(\"Battery level: \\t\\t\", self.data_pack.bat_lvl)\n\n\t\t# 5. Air Temperature\n\t\tif self.data_pack.has_air_temp:\n\t\t\tnum = self.payload[self.inc()]\n\t\t\tdecimal = self.payload[self.inc()] / 100\n\n\t\t\tself.data_pack.air_temp = num + decimal\n\t\t\tif verbose:\n\t\t\t\tprint(\"Air temperature: \\t\", self.data_pack.air_temp)\n\n\t\t# 6. Air Humidity\n\t\tif self.data_pack.has_air_hum:\n\t\t\tnum = self.payload[self.inc()]\n\t\t\tdecimal = self.payload[self.inc()] / 100\n\t\t\tself.data_pack.air_hum = num + decimal\n\t\t\tif verbose:\n\t\t\t\tprint(\"Air humidity: \\t\\t\", self.data_pack.air_hum)\n\n\t\tif self.data_pack.has_air_pres:\n\t\t\tnum1 = self.payload[self.inc()] << 16\n\t\t\tnum2 = self.payload[self.inc()] << 8\n\t\t\tnum3 = self.payload[self.inc()]\n\t\t\tself.data_pack.air_pres = num1 + num2 + num3\n\t\t\tif verbose:\n\t\t\t\tprint(\"Air pressure: \\t\\t\", self.data_pack.air_pres)\n\n\n\tdef __invalid_command(self):\n\t\tif(self.verbose):\n\t\t\tprint(\"\\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\")\n\t\t\tprint(\"\\tInvalid command\")\n\t\t\tprint(\"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\\n\\n\")\n\n\tdef __bit_check(self, x): \n\t\t\"\"\" \n\t\tx: Number to chekc\n\t\tn: bit to check from right most bit to left (1-8)\n\t\t\"\"\"\t\t\n\t\treturn lambda n: 0 < (x & (1 << (n - 1)))\n\t\n\tdef inc(self):\n\t\tself.index += 1\n\t\treturn self.index - 1"
},
{
"alpha_fraction": 0.5514563322067261,
"alphanum_fraction": 0.7281553149223328,
"avg_line_length": 20.5,
"blob_id": "6417b4b4bf34ffbfab11add99e3cfe4862c6aa89",
"content_id": "4711bf66a877096c7befd782047666c0912144f5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 515,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 24,
"path": "/pc_test.py",
"repo_name": "Bakhoj/lora_master",
"src_encoding": "UTF-8",
"text": "from weather_master import PackageReader\nfrom aws import AWS\n\nprint(\"Start Master Module Simulator\")\n\nreader = PackageReader()\n\nprint(\"\\nRxDone\")\n\n#payload = [0x0A, 0x05, 0x2E, 0x95, 0xF3, 0x71, 0x84, 0x00, 0x62, 0x11]\npayload = [0x10, 0x05, 0x2E, 0x8F, 0xED, 0x8C, 0x8E, 0x00, 0x64, 0x17, 0x3F, 0x2C, 0x23, 0x01, 0x89, 0x44]\n\n#10052E8FED8C8E0064173F2C23018944\n\n\nprint(bytes(payload).hex())\n\nreader.read_package(payload, True)\n\ndb = AWS()\n\ndb.connect()\ndb.publish_sensor_data(reader.data_pack, True)\ndb.disconnect()"
},
{
"alpha_fraction": 0.664850115776062,
"alphanum_fraction": 0.6805933713912964,
"avg_line_length": 20.730262756347656,
"blob_id": "183e8ccebc46b5ac43b45a0c54c96ff29e9ec25d",
"content_id": "ee3f45764ee4205c9a04146fd6a6cba7d299d47d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3303,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 152,
"path": "/main.py",
"repo_name": "Bakhoj/lora_master",
"src_encoding": "UTF-8",
"text": "import time\nimport sys\nfrom SX127x.LoRa import *\nfrom SX127x.board_config import BOARD\nfrom weather_master import PackageReader\nfrom aws import AWS\n\nprint(\"Start Master Module\")\n\nBOARD.DIO0 = 4\nBOARD.DIO3 = 1\n#BOARD.DIO3 = 26\nBOARD.SWITCH = 21\nBOARD.setup()\n\nreader = PackageReader()\n\n\nclass LoRaMaster(LoRa):\n\tdef __init(self):\n\t\tsuper(LoRaRcvCont, self).__init__(verbose)\n\t\tself.set_mode(MODE.STDBY)\n\t\t#self.set_dio_mapping([0] * 6)\n\t\tself.db = AWS()\n\n\tdef on_rx_done(self):\n\t\tBOARD.led_on()\n\n\t\tself.clear_irq_flags(RxDone=1)\n\t\treader.read_package(self.read_payload(nocheck=True), True)\n\n\t\tself.set_mode(MODE.SLEEP)\n\n\t\tself.reset_ptr_rx()\n\t\tBOARD.led_off()\n\t\tself.set_mode(MODE.RXCONT)\n\n\t\tif(reader.is_accepted):\n\t\t\tself.db.connect()\n\t\t\tself.db.publish_sensor_data(reader.data_pack, True)\n\t\t\tself.db.disconnect()\n\n\n\tdef on_txdone(self):\n\t\tprint(\"\\nTxDone\")\n\t\tprint(self.get_irq_flags())\n\n\tdef on_cad_done(self):\n\t\tprint(\"\\non_CadDone\")\n\t\tprint(self.get_irq_flags())\n\n\tdef on_rx_timeout(self):\n\t\tprint(\"\\non_RxTimeout\")\n\t\tprint(self.get_irq_flags())\n\t\ttime.sleep(.5)\n\t\tself.set_mode(MODE.SLEEP)\n\t\tself.reset_ptr_rx()\n\t\tself.set_mode(MODE.RXCONT)\n\n\tdef on_valid_header(self):\n\t\tprint(\"\\non_ValidHeader\")\n\t\tprint(self.get_irq_flags())\n\n\tdef on_payload_crc_error(self):\n\t\tprint(\"\\non_PayloadCrcError\")\n\t\tprint(self.get_irq_flags())\n\n\tdef on_fhss_change_channel(self):\n\t\tprint(\"\\non_Fhss_changeChannel\")\n\t\tprint(self.get_irq_flags())\n\n\tdef print_payload(self, payload):\n\t\tchex_sum = payload[0]\n\t\tcmd = payload[1]\n\n\t\tprint(\"chex_sum: \\t{}\".format(chex_sum))\n\t\tprint(\"payload len: \\t{}\".format(len(payload)))\n\n\t\tif (chex_sum == len(payload)):\n\t\t\tprint(\"payload of correct length\")\n\t\telse:\n\t\t\tprint(\"payload of incorrect length\")\n\t\tprint(\"command: \\t{}\".format(cmd))\n\n\n\tdef start(self):\n\t\tself.db = AWS()\n\t\tself.reset_ptr_rx()\n\t\tself.set_mode(MODE.RXCONT)\n\t\t\n\t\twhile True:\n\t\t\ttime.sleep(.5)\n\t\t\trssi_value = self.get_rssi_value()\n\t\t\tsnr_value = self.get_pkt_snr_value()\n\t\t\tstatus = self.get_modem_status()\n\t\t\tsys.stdout.flush()\n\t\t\tsys.stdout.write(\"\\r%d %d %d %d\" % (rssi_value, snr_value, status['rx_ongoing'], status['modem_clear']))\n\nlora = LoRaMaster()\n\ntry:\n\t#lora = LoRa(verbose=False, do_calibration=False)\n\tlora.set_mode(MODE.STDBY)\n\n#\tlora.set_freq(868.0)\n\tlora.set_freq(868.25)\n\tlora.set_coding_rate(CODING_RATE.CR4_5)\n\tlora.set_bw(BW.BW125)\n\tlora.set_spreading_factor(9)\n\tlora.set_pa_config(output_power=5)\n\tlora.set_preamble(12)\n\tlora.set_rx_crc(0)\n\tlora.set_implicit_header_mode(0)\n\tlora.set_max_payload_length(250)\n\t#lora.set_invert_iq(0)\n\n\ttime.sleep(2)\nexcept:\n\tprint(\"Error in setup\")\n\tprint(\"Closing\")\n\tBOARD.teardown()\n\tprint(\"END\")\n\n#print(\"Version: \\t{}\".format(lora.get_version()))\n#print(\"Frequency: \\t{}MHz\".format(lora.get_freq()))\n#print(\"Modem Config 1: {}\".format(lora.get_modem_config_1()))\n#print(\"Modem Config 2: {}\".format(lora.get_modem_config_2()))\n#print(\"PA Config: \\t{}\".format(lora.get_pa_config()))\n\n#lora.set_mode(MODE.RXSINGLE)\n\n#print(lora)\n#assert(lora.get_agc_auto_on() == 1)\n\n# Start Listening\n\ntry:\n\tlora.start()\nexcept KeyboardInterrupt:\n\tsys.stdout.flush()\n\tprint(\"\")\n\tsys.stderr.write(\"KeyboardInterrupt\\n\")\nfinally:\n\tsys.stdout.flush()\n\tprint(\"\")\n\tlora.set_mode(MODE.SLEEP)\n\tprint(lora)\n\tBOARD.teardown()\n\tlora.db.disconnect()\n\n#BOARD.teardown()\n#print(\"END\")\n"
},
{
"alpha_fraction": 0.5483490824699402,
"alphanum_fraction": 0.5483490824699402,
"avg_line_length": 25.53125,
"blob_id": "f222b9ef61eff487cfb052da67c8a94b75a33605",
"content_id": "1d0380f6e5b3b62d3c84e547830dc5e9e821960c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 848,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 32,
"path": "/data_pack.py",
"repo_name": "Bakhoj/lora_master",
"src_encoding": "UTF-8",
"text": "from simple_time import SimpleTime\n\nclass DataPack():\n def __init__(self):\n self.reset()\n\n def reset(self):\n self.station_id = None\n self.time = SimpleTime()\n\n self.has_bat_lvl = False\n self.has_air_temp = False\n self.has_air_hum = False\n self.has_air_pres = False\n\n self.bat_lvl = None\n self.air_temp = None\n self.air_hum = None\n self.air_pres = None\n\n self.dict = None\n \n def buildDict(self):\n self.dict = dict()\n if self.has_bat_lvl:\n self.dict[\"batteryLevel\"] = self.bat_lvl\n if self.has_air_temp:\n self.dict[\"airTemperatur\"] = self.air_temp\n if self.has_air_hum:\n self.dict[\"airHumidity\"] = self.air_hum\n if self.has_air_pres:\n self.dict[\"airPressure\"] = self.air_pres"
},
{
"alpha_fraction": 0.6841530203819275,
"alphanum_fraction": 0.7054644823074341,
"avg_line_length": 31.105262756347656,
"blob_id": "883ed7c0c010d505da936274294b37cf3f807064",
"content_id": "1bbe8629a2208243242734a54636590b0e95f6e0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1830,
"license_type": "no_license",
"max_line_length": 167,
"num_lines": 57,
"path": "/aws.py",
"repo_name": "Bakhoj/lora_master",
"src_encoding": "UTF-8",
"text": "import time\nfrom AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient\nfrom random import randint\nfrom data_pack import DataPack\n#from simple_time import SimpleTime\n\n\n\nclass AWS():\n\t__host = \"a3867rpfz9hgy1.iot.eu-central-1.amazonaws.com\"\n\t__rootCAPath = \"deviceSDK/root_CA.pem\"\n\t__privateKeyPath = \"deviceSDK/private_key.pem.key\"\n\t__certificatePath = \"deviceSDK/certificate.pem.crt\"\n\n\tdef __init__(self):\n\t\tself.client = None\n\t\tself.master_id = '123456783245'\n\n\tdef connect(self, verbose = False):\n\t\tif verbose:\n\t\t\tprint(\"Connecting...\")\n\t\ttry:\n\t\t\tself.client = AWSIoTMQTTClient(\"RaspPiTest_01\")\n\t\t\tself.client.configureEndpoint(AWS.__host, 8883)\n\t\t\tself.client.configureCredentials(AWS.__rootCAPath, AWS.__privateKeyPath, AWS.__certificatePath)\n\t\t\t#self.client.configureAutoReconnectBackoffTime(1, 32, 20)\n\t\t\tself.client.configureOfflinePublishQueueing(-1)\n\t\t\tself.client.configureDrainingFrequency(2)\n\t\t\tself.client.configureConnectDisconnectTimeout(10)\n\t\t\tself.client.configureMQTTOperationTimeout(5)\n\t\t\tself.client.connect()\n\t\t\ttime.sleep(2)\n\t\texcept Exception:\n\t\t\tif verbose:\n\t\t\t\tprint(\"Connection failed\")\n\n\tdef publish_sensor_data(self, data_pack: DataPack, verbose = False):\n\t\t#timestamp = time.time()\n\n\t\ttopic = 'RaspPiTest_01/sensor_data'\n\t\tdataID = \"SD_{}_{}_{}\".format(data_pack.time.to_float_time(), self.master_id, data_pack.station_id)\n\n\t\tmsg = '\"dataID\": \"{:s}\", \"timestamp\": \"{}\", \"stationID\": \"{}\", \"masterID\": \"{}\"'.format(dataID, data_pack.time.to_float_time(), data_pack.station_id, self.master_id)\n\n\t\tdata_pack.buildDict()\n\t\tfor key, value in data_pack.dict.items():\n\t\t\tmsg += ', \"{:s}\": \"{}\"'.format(key, value)\n\t\tmsg = '{'+msg+'}'\n\n\t\tif verbose: print(\"Sending Data...\")\n\n\t\tself.client.publish(topic, msg, 1)\n\n\t\tif verbose: print(\"Data send\")\n\n\tdef disconnect(self, verbose = False):\n\t\tself.client.disconnect()\n"
}
] | 7 |
AutumnPh3n0m3n0n/Election-Analysis
|
https://github.com/AutumnPh3n0m3n0n/Election-Analysis
|
ef2765c32ee8e60247381d840fe17528ed5d7989
|
83637bb84770931a8fc714bed994d91231bee9aa
|
f5996f3ec1edc23a8a4d9bdbb7f15b1e237e5947
|
refs/heads/main
| 2023-02-11T14:13:47.217067 | 2021-01-12T06:43:59 | 2021-01-12T06:43:59 | 328,891,021 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.49671053886413574,
"alphanum_fraction": 0.5418233275413513,
"avg_line_length": 25.986841201782227,
"blob_id": "30842b8ea0b67af2a2e06f7549cfe6eb2df18ed0",
"content_id": "524de1e176126e9a67f120f48b9da86b1960afa1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2128,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 76,
"path": "/PyRoll/pyroll.py",
"repo_name": "AutumnPh3n0m3n0n/Election-Analysis",
"src_encoding": "UTF-8",
"text": "import os\r\nimport csv\r\n\r\ntotal_votes = 0\r\nKhan = 0\r\nCorrey = 0\r\nLai = 0\r\nOTooley = 0\r\n\r\n\r\nelection_csv = os.path.join('', 'Resources', 'election_data.csv')\r\n\r\n#can is short for candidate\r\n#pol is short for politician\r\ndef gather_results(pol1, pol2, pol3, pol4, can1, can2, can3, can4, can_total):\r\n\tcandidate1 = (can1 / can_total) * 100\r\n\tcandidate2 = (can2 / can_total) * 100\r\n\tcandidate3 = (can3 / can_total) * 100\r\n\tcandidate4 = (can4 / can_total) * 100\r\n\t#round to nearest 1/1000th\r\n\tcandidate1 = round(candidate1, 3)\r\n\tcandidate2 = round(candidate2, 3)\r\n\tcandidate3 = round(candidate3, 3)\r\n\tcandidate4 = round(candidate4, 3)\r\n\t\r\n\t#have some blank space\r\n\tprint(f\"\")\r\n\tprint(f\"\")\r\n\t\r\n\tprint(f\"------------------------------------\")\r\n\tprint(f\"Election Polling Results\")\r\n\tprint(f\"------------------------------------\")\r\n\tprint(f\"Total Votes: {can_total}\")\r\n\tprint(f\"------------------------------------\")\r\n\tprint(f\"{pol1} : {candidate1} % ({can1})\")\r\n\tprint(f\"{pol2} : {candidate2} % ({can2})\")\r\n\tprint(f\"{pol3} : {candidate3} % ({can3})\")\r\n\tprint(f\"{pol4} : {candidate4} % ({can4})\")\r\n\tprint(f\"------------------------------------\")\r\n\t\r\n\t#can1 is Khan\r\n\t#can2 is Correy\r\n\t#can3 is Lai\r\n\t#Can4 is OTooley\r\n\tif ((can1 > can2) or (can1 > can3) or (can1 > can4)):\r\n\t\tprint(f\"WINNER: {pol1}\")\r\n\telif ((can2 > can1) or (can2 > can3) or (can2 > can4)):\r\n\t\tprint(f\"WINNER: {pol2}\")\r\n\telif ((can3 > can1) and (can3 > can2) and (can3 > can4)):\r\n\t\tprint(f\"WINNER: {pol3}\")\r\n\telse:\r\n\t\tprint(f\"WINNER: {pol4}\")\r\n\t\t\r\n\tprint(f\"------------------------------------\")\r\n\r\n\r\n# Read in the CSV file\r\nwith open(election_csv, 'r') as csvfile:\r\n\r\n # Split the data on commas\r\n election_reader = csv.reader(csvfile, delimiter=',')\r\n\r\n\t#skip a row for the header arguments\r\n header = next(election_reader)\r\n for row in election_reader:\r\n\t total_votes += 1\r\n\t if (row[2] == \"Khan\"):\r\n\t\t Khan += 1\r\n\t elif (row[2] == \"Correy\"):\r\n\t\t Correy += 1\r\n\t elif (row[2] == \"Li\"):\r\n\t\t Lai += 1\r\n\t else:\r\n\t\t OTooley += 1\r\n\t\t\t\r\n gather_results(\"Khan\", \"Correy\", \"Li\", \"OTooley\", Khan, Correy, Lai, OTooley, total_votes)\r\n\t"
},
{
"alpha_fraction": 0.6082848906517029,
"alphanum_fraction": 0.6213662624359131,
"avg_line_length": 24,
"blob_id": "6854a9138fb5c39c4185b5fd6e38a21391364330",
"content_id": "f66dc992a5762d576938c1279404c996ae328a14",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1376,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 53,
"path": "/PyBank/pybank.py",
"repo_name": "AutumnPh3n0m3n0n/Election-Analysis",
"src_encoding": "UTF-8",
"text": "import os\r\nimport csv\r\n\r\nnum_months = 0\r\ntotal_sum = 0\r\nmax_gain = 0\r\nmax_loss = 0\r\n#initializing these variables to null string values\r\ngain_month = None\r\nloss_month = None\r\n\r\nbudget_csv = os.path.join('', 'Resources', 'budget_data.csv')\r\n\r\n\r\ndef display_data(month_gain, month_loss, maximum_gain, maximum_loss, sum_total, num_of_months):\r\n\r\n\tmean_amount = (sum_total / num_of_months)\r\n\t#round to the nearest 1/1000th\r\n\tmean_amount = round(mean_amount, 3)\r\n\t\r\n\t#have some blank space\r\n\tprint(f\"\")\r\n\tprint(f\"\")\r\n\tprint(f\"Financial Analysis\")\r\n\tprint(f\"------------------------------------\")\r\n\tprint(f\"Total Months: {num_of_months}\")\r\n\tprint(f\"Total: {sum_total}\")\r\n\tprint(f\"Average Change: {mean_amount} \")\r\n\tprint(f\"Greatest Gain in Profits: {month_gain} (${maximum_gain})\")\r\n\tprint(f\"Greatest Loss in Profits: {month_loss} (${maximum_loss})\")\r\n\r\n\t\r\n\t\t\r\nwith open(budget_csv, 'r') as csvfile:\r\n\tbank_reader = csv.reader(csvfile, delimiter=',')\r\n\r\n\theader = next(bank_reader)\r\n\tfor row in bank_reader:\r\n\t\tnum_months += 1\r\n\t\ttotal_sum += int(row[1])\r\n\t\tif (max_loss > int(row[1])):\r\n\t\t\tmax_loss = int(row[1])\r\n\t\t\tloss_month = str(row[0])\r\n\r\n\t\telif (max_gain < int(row[1])):\r\n\t\t\tmax_gain = int(row[1])\r\n\t\t\tgain_month = str(row[0])\r\n\t\t\r\n\t\telse:\r\n\t\t\tmax_loss = max_loss\r\n\t\t\tmax_gain = max_gain\r\n\t\r\n\tdisplay_data(gain_month, loss_month, max_gain, max_loss, total_sum, num_months)"
}
] | 2 |
rasielmoon/project15
|
https://github.com/rasielmoon/project15
|
9cb659c70633cf86ec6b4d4420872170eaf5aa89
|
3cea110908f9a9a372e807bce4254c6a1837189a
|
d8cf84e1b95e63bf2dfc946a2a2ba6d1ee7f1ba9
|
refs/heads/master
| 2021-01-10T05:00:19.991279 | 2015-12-01T04:52:46 | 2015-12-01T04:52:46 | 45,082,256 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.601995587348938,
"alphanum_fraction": 0.623059868812561,
"avg_line_length": 22.710525512695312,
"blob_id": "60bebb67dfeb69b2a0e5470bd6a31c433dfd2f14",
"content_id": "de449298217645cca2fd6aba85611367455a5418",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 902,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 38,
"path": "/src/enter.py",
"repo_name": "rasielmoon/project15",
"src_encoding": "UTF-8",
"text": "\n#:coding: utf-8\n#tmax -shell script/tmax_tdf_atpg_loc.tcl 669\n#tmax -shell script/tmax_fsim.tcl 709\n#-40\n\n\n\nimport os\nimport re\nimport random\nimport argparse\n\n\ndef rep(file1,file2):\n\tf = open(file2,'w')\n\n\told = \"1\"\n\tfor line in open(file1, 'r'):\n\t\tx = re.search(r'; \"',line)\n\t\tif x :\n\t\t\tline = line.replace('; \"',';\\n \"')\n\n\t\tf.write(line)\n\nif __name__ == '__main__':\n\tparser = argparse.ArgumentParser(description = 'clk or clock serch')\n\tparser.add_argument('-i','--i',dest = 'file_name',type=str,default='')\n\tparser.add_argument('-c','--c',dest = 'chain_count',type=str,default='')\n\n\targs = parser.parse_args()\n\tInput_filename = args.file_name\n\tchain = args.chain_count\n\t\n\thoge = re.split(r'/|\\.', Input_filename)\n\tz = hoge[-2]\n\tfilename1 = \"./tmax_output/\" + z +\"_scan\" + chain + \"_tdf_loc.stil\"\n\tfilename2 = \"./tmax_output/\" + z +\"_scan\" + chain + \"_tdf_loc_en.stil\"\n\trep(filename1,filename2)\n"
},
{
"alpha_fraction": 0.5784615278244019,
"alphanum_fraction": 0.5923076868057251,
"avg_line_length": 23.074073791503906,
"blob_id": "e863b82c912d3086b89e96298bbc251f19545718",
"content_id": "979df08cd96d53b2f2225fec8b17ed4abd75e643",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1300,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 54,
"path": "/src/name.py",
"repo_name": "rasielmoon/project15",
"src_encoding": "UTF-8",
"text": "#:coding: utf-8\n\nimport os\nimport re\nimport argparse\n\ndef find_out(file1,file2,chain,period):\n\tflg = 0\n\tfor line in open(file1,'r'):\n\t\tif flg == 0:\n\t\t\tx = re.search(r'process',line)\n\t\t\tif x:\n\t\t\t\tif \"clock\" in line :\n\t\t\t\t\tclk = \"clock\"\n\t\t\t\telse :\n\t\t\t\t\tclk = \"CLK\"\n\n\t\t\t\tif \"reset\" in line :\n\t\t\t\t\trst = \"reset\"\n\t\t\t\telse :\n\t\t\t\t\trst = \"RESET\"\n\n\t\t\t\tflg = 1\n\n\t\n#\tz = file1[4:-4]\t\n\thoge = re.split(r'/|\\.', file1)\n\tz = hoge[-2]\n#\tprint (hoge[-2])\n\t\n\tf_write = open(file2,'w')\n\tf_write.write('set RTL \"' + file1 + '\"\\n')\t\n\tf_write.write('set TOP \"' + z + '\"\\n')\t\t\n\n\tf_write.write('set CLOCK_PORT \"' + clk + '\"\\n')\n\tf_write.write('set CLOCK_PERIOD \"' + period + '\"\\n')\n\tf_write.write('set RESET_PORT \"' + rst + '\"\\n')\n\tf_write.write('set CHAIN_COUNT \"' + chain + '\"\\n')\n\tf_write.close()\n\nif __name__ == '__main__':\n\tparser = argparse.ArgumentParser(description = 'clk or clock serch')\n\tparser.add_argument('-i','--i',dest = 'file_name',type=str,default='')\n\tparser.add_argument('-c','--c',dest = 'chain_count',type=str,default='2')\n\tparser.add_argument('-p','--p',dest = 'c_period',type=str,default='10')\n\targs = parser.parse_args()\n\t\n\tInput_filename = args.file_name\n\tchain = args.chain_count\n\tperiod = args.c_period\n\n\tOutput_filename = \"tcl/name.txt\"\n\n\tfind_out(Input_filename,Output_filename,chain,period)\n"
},
{
"alpha_fraction": 0.49452027678489685,
"alphanum_fraction": 0.5147086977958679,
"avg_line_length": 26.80748748779297,
"blob_id": "2e69fd01fd4d98e65d4ac36667fa6117aa7a21e0",
"content_id": "75b6d5d0941b352fffd0136c785ee7031222b99b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5293,
"license_type": "no_license",
"max_line_length": 164,
"num_lines": 187,
"path": "/src/count.py",
"repo_name": "rasielmoon/project15",
"src_encoding": "UTF-8",
"text": "\n#:coding: utf-8\n#tmax -shell script/tmax_tdf_atpg_loc.tcl 669\n#tmax -shell script/tmax_fsim.tcl 709\n#-40\n\n\n\nimport os\nimport re\nimport random\nimport argparse\n\n\ndef rep(f_area,f_timing,f_tmax,chain,filex,file0,file1,filer,filemy,output):\n\tfo = open(output,'w')\n\tcount = 0\n\tnum = 0\n\tlength=0\n\told = \"0\"\n\tnumx =[]\n\tnum0 =[]\n\tnum1 =[]\n\tnumr =[]\n\tnummy =[]\n\n\tfo.write(\"########################Synthesis Result############################\\n\")\n\tfo.write(\"-------------------------------------------------------------------------------- \\n\")\n\tfor line in open(f_area,'r'):\n\t\tx = re.search(r'Number of sequential cells:',line)\n\t\tv = re.search(r'Total cell area:',line)\n\t\tif x:\n\t\t\tff = line.split(':')[1]\n\t\tif v:\n\t\t\tgate = line.split(':')[1]\n\n\tfo.write(\"Gates \" + gate)\n\tfo.write(\"FFs \" + ff)\n\n\tfor line in open(f_timing,'r'):\n\t\tx = re.search(r'data arrival time',line)\n\t\tif x:\n\t\t\ttime = line.split('time')[1]\n\t\t\t\n\tfo.write(\"Arrival time\" + time + \"\\n\")\n\n\n\n\n\n\n\tfo.write(\"##########################ATPG Result##############################\\n\")\n\tfo.write(\"-------------------------------------------------------------------------------- \\n\")\n\tfor line in open(f_tmax,'r'):\n\t\tx = re.search(r'total faults',line)\n\t\tv = re.search(r'internal patterns',line)\n\t\tw = re.search(r'fault coverage',line)\n\t\tz = re.search(r'Total session CPU time',line)\n\t\tif x:\n\t\t\tfault = line.split('faults')[1]\n\t\tif v:\n\t\t\tpattern = line.split('patterns')[1]\n\t\tif w:\n\t\t\tcoverage = line.split('coverage')[1]\n\t\tif z:\n\t\t\ttime = line.split('time')[1]\n\t\t\t\n\tfo.write(\"Faults \" + fault)\n\tfo.write(\"Patterns \" + pattern)\n\tfo.write(\"Fault_Coverage\" + coverage)\n\tfo.write(\"CPU_Runtime \" + time+\"\\n\")\n\n\n\n\n\n###################xの割合の計算#########################\n\tfor line in open(filex, 'r'):\n\t\tcount = count + 1\n\t\tnum = num + line.count(\"N\")\n\t\tlength = length + len(line)-1\n\t\tif count == chain :\n\t\t\tcount = 0\n#\t\t\tfo_x.write(str(float(num)/length*100) + \"\\n\")\n\t\t\tnumx.append(str(float(num)/length*100))\n\t\t\tnum = 0\n\t\t\tlength = 0\n\n#############0-fill遷移確率の割合の計算##################\n\tfor line in open(file0, 'r'):\n\t\tcount = count + 1\n\t\ttmp = list(line[:-1])\n\t\tfor hoge in range(1,len(tmp)+1) :\n\t\t\tif old != tmp[len(tmp)-hoge] :\n\t\t\t\tnum = num + len(tmp)-hoge+1\n\t\t\told = tmp[len(tmp)-hoge] \n\t\tlength = length + len(line)-1\n\n\t\tif count == chain :\n\t\t\tcount = 0\n#\t\t\tfo_0.write(str(num) + \"\\n\")\n\t\t\tnum0.append(str(num))\n\t\t\tnum = 0\n\t\t\tlength = 0\n#############1-fill遷移確率の割合の計算##################\n\told = \"1\"\n\tfor line in open(file1, 'r'):\n\t\tcount = count + 1\n\t\ttmp = list(line[:-1])\n\t\tfor hoge in range(1,len(tmp)+1) :\n\t\t\tif old != tmp[len(tmp)-hoge] :\n\t\t\t\tnum = num + len(tmp)-hoge+1\n\t\t\told = tmp[len(tmp)-hoge] \n\t\tlength = length + len(line)-1\n\n\t\tif count == chain :\n\t\t\tcount = 0\n#\t\t\tfo_1.write(str(num) + \"\\n\")\n\t\t\tnum1.append(str(num))\n\t\t\tnum = 0\n\t\t\tlength = 0\n#############r-fill遷移確率の割合の計算##################\n\tfor line in open(filer, 'r'):\n\t\tcount = count + 1\n\t\ttmp = list(line[:-1])\n\t\tfor hoge in range(1,len(tmp)+1) :\n\t\t\tif old != tmp[len(tmp)-hoge] :\n\t\t\t\tnum = num + len(tmp)-hoge+1\n\t\t\told = tmp[len(tmp)-hoge] \n\t\tlength = length + len(line)-1\n\n\t\tif count == chain :\n\t\t\tcount = 0\n#\t\t\tfo_r.write(str(num) + \"\\n\")\n\t\t\tnumr.append(str(num))\n\t\t\tnum = 0\n\t\t\tlength = 0\n\n#############my-fill遷移確率の割合の計算##################\n\tfor line in open(filemy, 'r'):\n\t\tcount = count + 1\n\t\ttmp = list(line[:-1])\n\t\tfor hoge in range(1,len(tmp)+1) :\n\t\t\tif old != tmp[len(tmp)-hoge] :\n\t\t\t\tnum = num + len(tmp)-hoge+1\n\t\t\told = tmp[len(tmp)-hoge] \n\t\tlength = length + len(line)-1\n\n\t\tif count == chain :\n\t\t\tcount = 0\n#\t\t\tfo_my.write(str(num) + \"\\n\")\n\t\t\tnummy.append(str(num))\n\t\t\tnum = 0\n\t\t\tlength = 0\n\n\tfo.write(\"#########################X-Fill Result##############################\\n\")\n\tfo.write(\"-------------------------------------------------------------------------------- \\n\")\n\tfo.write(\"PID X-ratio 0-fill 1-fill r-fill my-fill \\n\")\n\tfor hoge in range(0,len(numx)) :\n\t\ttmp = str(numx[hoge])\n\t\tfo.write(\" %3s %6s %6s %6s %6s %6s \\n\" %(str(hoge),tmp[:5],str(num0[hoge]),str(num1[hoge]),str(numr[hoge]),str(nummy[hoge])))\n \n\n\nif __name__ == '__main__':\n\tparser = argparse.ArgumentParser(description = 'clk or clock serch')\n\tparser.add_argument('-i','--i',dest = 'file_name',type=str,default='')\n\tparser.add_argument('-c','--c',dest = 'chain_count',type=str,default='')\n\n\targs = parser.parse_args()\n\tInput_filename = args.file_name\n\tchain = args.chain_count\n\t\n\thoge = re.split(r'/|\\.', Input_filename)\n\tz = hoge[-2]\n\n\tfilename1 = \"./probability/\" + z +\"_scan\" + chain + \".txt\"\n\tfilename2 = \"./probability/\" + z +\"_scan\" + chain + \"_0-fill.txt\"\n\tfilename3 = \"./probability/\" + z +\"_scan\" + chain + \"_1-fill.txt\"\n\tfilename4 = \"./probability/\" + z +\"_scan\" + chain + \"_r-fill.txt\"\n\tfilename5 = \"./probability/\" + z +\"_scan\" + chain + \"_myfill.txt\"\n\toutput = \"./probability/\" + z +\"_scan\" + chain + \"_report.txt\"\n\t\n\tfile_area = \"./report/\" + z +\"_area.txt\"\n\tfile_timing = \"./report/\" + z +\"_timing.txt\"\n\tfile_tmax = \"./report/\"+\"tmax_summaries.txt\"\n\n\trep(file_area,file_timing,file_tmax,int(chain),filename1,filename2,filename3,filename4,filename5,output)\n"
},
{
"alpha_fraction": 0.5273818373680115,
"alphanum_fraction": 0.5706426501274109,
"avg_line_length": 28.61481475830078,
"blob_id": "b5521ea02b5af7f8cd106ce015973204778cb5b3",
"content_id": "78f709ef65d12e5733d81db4a56049cf8c4fcb7f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4011,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 135,
"path": "/src/X_fill.py",
"repo_name": "rasielmoon/project15",
"src_encoding": "UTF-8",
"text": "\n#:coding: utf-8\n#tmax -shell script/tmax_tdf_atpg_loc.tcl 669\n#tmax -shell script/tmax_fsim.tcl 709\n#-40\n\n\n\nimport os\nimport re\nimport random\nimport argparse\n\n\ndef rep(file1,file2,file3,file4,file5,file6,file7,file8,file9,file10):\n\tflg = 1\n\tf2 = open(file2,'w')\n\tf3 = open(file3,'w')\n\tf4 = open(file4,'w')\n\tf5 = open(file5,'w')\n\tf6 = open(file6,'w')\n\tf7 = open(file7,'w')\n\tf8 = open(file8,'w')\n\tf9 = open(file9,'w')\n\tf10 = open(file10,'w')\n\tline2 = \"0\"\n\tline3 = \"0\"\n\tline4 = \"0\"\n\tline5 = \"0\"\n\n\told = \"1\"\n\tfor line in open(file1, 'r'):\n\t\tline2 = line\n\t\tline3 = line\n\t\tline4 = line\n\t\tline5 = line\n\n\t\tif flg == 0 :\n########################piの設定#######################\n\t\t\tx = re.search(r'_pi\"=(\\d|N|P)+',line) \n\t\t\tif x :\n\t\t\t\thoge = x.group(0)[5:]\n\t\t\t\tchar_hoge2 = list(hoge)\n\n\t\t\t\tfor num in range(0,len(char_hoge2)):\n\t\t\t\t\tif char_hoge2[num] == \"N\":\n\t\t\t\t\t\tchar_hoge2[num] = \"1\"\n\n\t\t\t\twrite_data = '_pi\"='\n\t\t\t\twrite_data = write_data + \"\".join(char_hoge2)\n\t\t\t\tline2 = line[0:x.start()]+ write_data +line[x.end():len(line)]\n\t\t\t\tline3 = line[0:x.start()]+ write_data +line[x.end():len(line)]\n\t\t\t\tline4 = line[0:x.start()]+ write_data +line[x.end():len(line)]\n\t\t\t\tline5 = line[0:x.start()]+ write_data +line[x.end():len(line)]\n\n###########################test_siの設定###############\n\t\t\ta = re.search(r'test_si\\d*\"=(\\d|N)+',line)\n\t\t\tif a :\n\t\t\t\tn_count = 0\n\t\t\t\tnum_count = 0\n\t\t\t\ttest = line.split('\"')[1]\n\t\t\t\thoge = a.group(0)[9:]\n\t\t\t\tchar_hoge2 = list(hoge)\n\t\t\t\tchar_hoge3 = list(hoge)\n\t\t\t\tchar_hoge4 = list(hoge)\n\t\t\t\tchar_hoge5 = list(hoge)\n\n\t\t\t\tfor num in range(0,len(char_hoge2)):\n\t\t\t\t\tnum = len(char_hoge2) - num - 1\n\n\t\t\t\t\tif char_hoge2[num] == \"N\":\n\t\t\t\t\t\trand = random.randint(0,1)\n\n\t\t\t\t\t\tchar_hoge2[num] = str(old)\n\t\t\t\t\t\tchar_hoge3[num] = \"0\"\n\t\t\t\t\t\tchar_hoge4[num] = \"1\"\n\t\t\t\t\t\tchar_hoge5[num] = str(rand)\n\n\t\t\t\t\telse:\n\t\t\t\t\t\tif char_hoge2[num] == \"1\" or char_hoge2[num] == \"0\":\n\t\t\t\t\t\t\told = char_hoge2[num]\n\n\n\t\t\t\twrite_data = test\n\t\t\t\twrite_data2 = write_data + \"\".join(char_hoge2)\n\t\t\t\twrite_data3 = write_data + \"\".join(char_hoge3)\n\t\t\t\twrite_data4 = write_data + \"\".join(char_hoge4)\n\t\t\t\twrite_data5 = write_data + \"\".join(char_hoge5)\n\n\t\t\t\tline2 = line[0:a.start()]+ write_data2 +line[a.end():len(line)]\n\t\t\t\tline3 = line[0:a.start()]+ write_data3 +line[a.end():len(line)]\n\t\t\t\tline4 = line[0:a.start()]+ write_data4 +line[a.end():len(line)]\n\t\t\t\tline5 = line[0:a.start()]+ write_data5 +line[a.end():len(line)]\n\t\t\t\n\t\t\t\tf6.write(\"\".join(list(hoge))[1:] + \"\\n\")\n\t\t\t\tf7.write(\"\".join(char_hoge2)[1:] + \"\\n\")\n\t\t\t\tf8.write(\"\".join(char_hoge3)[1:] + \"\\n\")\n\t\t\t\tf9.write(\"\".join(char_hoge4)[1:] + \"\\n\")\n\t\t\t\tf10.write(\"\".join(char_hoge5)[1:] + \"\\n\")\n\n\n\t\telse :\n\t\t\titemList = line[:-1].split(' ')\n\t\t\tif itemList[0] == \"Pattern\" :\n\t\t\t\tflg = 0\n\n\t\tf2.write(line2)\n\t\tf3.write(line3)\n\t\tf4.write(line4)\n\t\tf5.write(line5)\n\n\nif __name__ == '__main__':\n\tparser = argparse.ArgumentParser(description = 'clk or clock serch')\n\tparser.add_argument('-i','--i',dest = 'file_name',type=str,default='')\n\tparser.add_argument('-c','--c',dest = 'chain_count',type=str,default='')\n\n\targs = parser.parse_args()\n\tInput_filename = args.file_name\n\tchain = args.chain_count\n\t\n\thoge = re.split(r'/|\\.', Input_filename)\n\tz = hoge[-2]\n\n\tfilename1 = \"./tmax_output/\" + z +\"_scan\" + chain + \"_tdf_loc_en.stil\"\n\tfilename2 = \"./tmax_output/\" + z +\"_scan\" + chain + \"_tdf_loc_myfill.stil\"\n\tfilename3 = \"./tmax_output/\" + z +\"_scan\" + chain + \"_tdf_loc_0-fill.stil\"\n\tfilename4 = \"./tmax_output/\" + z +\"_scan\" + chain + \"_tdf_loc_1-fill.stil\"\n\tfilename5 = \"./tmax_output/\" + z +\"_scan\" + chain + \"_tdf_loc_r-fill.stil\"\n\tfilename6 = \"./probability/\" + z +\"_scan\" + chain + \".txt\"\n\tfilename7 = \"./probability/\" + z +\"_scan\" + chain + \"_myfill.txt\"\n\tfilename8 = \"./probability/\" + z +\"_scan\" + chain + \"_0-fill.txt\"\n\tfilename9 = \"./probability/\" + z +\"_scan\" + chain + \"_1-fill.txt\"\n\tfilename10 = \"./probability/\" + z +\"_scan\" + chain +\"_r-fill.txt\"\n\n\trep(filename1,filename2,filename3,filename4,filename5,filename6,filename7,filename8,filename9,filename10)\n"
},
{
"alpha_fraction": 0.8125,
"alphanum_fraction": 0.8327205777168274,
"avg_line_length": 22.565217971801758,
"blob_id": "e94db6c4c385abb1abd71990e005df5fd860db82",
"content_id": "86649aa005a476212cc6dbee6c95ccabd457ede9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1156,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 23,
"path": "/README.md",
"repo_name": "rasielmoon/project15",
"src_encoding": "UTF-8",
"text": "project15\nhoge.shを実行するとitc99の回路を自動でスキャン設計し、テストパターンを生成、さらにfillまで行い、各fillでの遷移数などを計算するスクリプトです。\n\n実行前にフォルダを4つ、作っておく必要がある(空で良い)\nフォルダ名は以下の通り\n\nprobability (遷移数計算に使うフォルダ)\nfile_output (デザインコンパイラのファイル出力先)\nreport (デザインコンパイラのレポート出力先)\ntmax_output (tmaxのファイル出力先)\n\n\nコマンド\nコマンド実行場所はhoge.shの置いてあるディレクトリである必要がある\nsh hoge.sh [rtlファイルパス] [chain数] [クロックピリオド]\n例\nsh hoge.sh rtl/b04.vhd 2 10\n\nsrcフォルダ内にpythonのスクリプトを入れており、各ファイルの動作は下記の通り\nx_fill.py・・・4種類のx_fillの実行\ncount.py・・・・fill後のファイルにおける遷移確率のカウント\nenter.py・・・・fillしやすいようにファイルに改行を加えていく\nname.py・・・・・common.tclの内容の書き換え、生成\n\n\n"
},
{
"alpha_fraction": 0.6136783957481384,
"alphanum_fraction": 0.6303142309188843,
"avg_line_length": 18.962963104248047,
"blob_id": "d0454989f30a3be31cf07d9cfe7e91d997155a44",
"content_id": "aac987cc16fef07794df50eb64fdff85aaaf2abe",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 653,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 27,
"path": "/hoge.sh",
"repo_name": "rasielmoon/project15",
"src_encoding": "UTF-8",
"text": "if [ $# != 3 ]; then\n\techo \"Error. Need [file name] , [chain_num] , [clk_period]\"\n\texit 0\nelse\n\tfilepass=$1\n\tchain=$2\n\tC_period=$3\nfi\n\n\n#common.tcl の作成\npython3 src/name.py -i \"${filepass}\" -c \"${chain}\" -p \"${C_period}\"\n\n#スキャン設計(論理合成)\ndc_shell -f ./tcl/dc_scan.tcl\n\n#テスト生成(遷移故障,LOC)\ntmax -shell ./tcl/tmax_tdf_atpg_loc.tcl\n\n#出てきたファイルを改行していくだけ\npython3 src/enter.py -i \"${filepass}\" -c \"${chain}\"\n\n#x_fillファイル生成 my_fill\npython3 src/X_fill.py -i \"${filepass}\" -c \"${chain}\"\n\n#遷移確率のカウント等\npython3 src/count.py -i \"${filepass}\" -c \"${chain}\"\n\n\n"
},
{
"alpha_fraction": 0.5990629196166992,
"alphanum_fraction": 0.6271753907203674,
"avg_line_length": 23.883333206176758,
"blob_id": "16f3d42f4b4f77dc2878a588cd5a7abd40d941b0",
"content_id": "9d440c8bf591390d03be2ba530459f8c02a713d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1628,
"license_type": "no_license",
"max_line_length": 76,
"num_lines": 60,
"path": "/src/probability_count.py",
"repo_name": "rasielmoon/project15",
"src_encoding": "UTF-8",
"text": "\n#:coding: utf-8\n#tmax -shell script/tmax_tdf_atpg_loc.tcl 669\n#tmax -shell script/tmax_fsim.tcl 709\n#-40\n\n\n\nimport os\nimport re\nimport random\nimport argparse\n\n\ndef rep(c_num,file1,file2,file3,file4,file5,file6):\n\tflg = 1\n\told = \"1\"\n\tline_count = 0\n\tn_num = 0\n\tlen_num = 0\n\n\tf = open(file6,'w')\n\n\tfor line in open(file1, 'r'):\n\t\tn_num = n_num + line.count('N')\n\t\tlen_num = len_num + len(line)\n\t\tline_count = line_count + 1\n\n\t\tif(line_count == c_num):\n\t\t\tline_count = 0\n\t\t\tprint(n_num)\n\t\t\tprint(len_num-1)\n\t\t\tn_num = 0\n\t\t\tlen_num = 0\n\n######現在、スキャンチェーンの本数分ずつカウントしてる。全体の入力数とNの数は完了\n\n######次回、Nの割合の計算と、各ファイルの遷移確率の計算を行って終了############\n\n\n\nif __name__ == '__main__':\n\tparser = argparse.ArgumentParser(description = 'clk or clock serch')\n\tparser.add_argument('-i','--i',dest = 'file_name',type=str,default='')\n\tparser.add_argument('-c','--c',dest = 'chain_count',type=str,default='')\n\n\targs = parser.parse_args()\n\tInput_filename = args.file_name\n\tchain = args.chain_count\n\t\n\thoge = re.split(r'/|\\.', Input_filename)\n\tz = hoge[-2]\n\n\tfilename1 = \"./probability/\" + z +\"_scan\" + chain + \".txt\"\n\tfilename2 = \"./probability/\" + z +\"_scan\" + chain + \"_myfill.txt\"\n\tfilename3 = \"./probability/\" + z +\"_scan\" + chain + \"_0-fill.txt\"\n\tfilename4 = \"./probability/\" + z +\"_scan\" + chain + \"_1-fill.txt\"\n\tfilename5 = \"./probability/\" + z +\"_scan\" + chain +\"_r-fill.txt\"\n\tfilename6 = \"./probability/\" + z +\"_scan\" + chain +\"_result.txt\"\n\n\trep(int(chain),filename1,filename2,filename3,filename4,filename5,filename6)\n"
}
] | 7 |
davy1ex/myshop
|
https://github.com/davy1ex/myshop
|
2aed2e717e8180dbce1bb55ff1891d1904365c2b
|
d247d2ec4bc3bdc53a19242b3af2e786e42ad3b7
|
e87876072abb561f497082d7c335c0a9da032e58
|
refs/heads/master
| 2022-12-11T17:48:07.768247 | 2019-11-19T19:07:51 | 2019-11-19T19:07:51 | 215,226,524 | 3 | 0 | null | 2019-10-15T06:45:09 | 2019-12-06T19:10:18 | 2022-12-08T06:50:06 |
Python
|
[
{
"alpha_fraction": 0.706256628036499,
"alphanum_fraction": 0.707317054271698,
"avg_line_length": 28.4375,
"blob_id": "1e8c57bfe7bcbb1d7fb74211deffbb1fea9821ee",
"content_id": "f2c2e591d4c333636273995f46173246e6803ea8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 943,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 32,
"path": "/myshop/myshop/views.py",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "# from django.shortcuts import render\nfrom django.views.generic import TemplateView\nfrom django.views import generic\nfrom django.shortcuts import redirect, HttpResponse\n\nfrom product.models import Product\n# db with login user from social\nfrom django.contrib.auth import logout\n\nfrom django.shortcuts import render\nfrom social_django.models import UserSocialAuth\n\n\n# class IndexView(TemplateView): \n# template_name = 'main/index.html'\n# context_object_name = \"user\"\n\n# def get_queryset(self):\n# # user = UserSocialAuth.objects.all()\n# meta = request.session[0]\n\ndef index(request):\n # print(\"\\n\\n\", request.user.is_authenticated)\n if request.user.is_authenticated:\n user = UserSocialAuth.objects.get(id=request.session['_auth_user_id'])\n else:\n user = ''\n return render(request, \"main/index.html\", {\"user\": user})\n\ndef logout_view(request):\n logout(request)\n return redirect('/')\n\n"
},
{
"alpha_fraction": 0.7402933835983276,
"alphanum_fraction": 0.7402933835983276,
"avg_line_length": 43.57692337036133,
"blob_id": "f2d2bd20de2e6c7489165c5c0deeacd4ad3aa419",
"content_id": "2b573c60c10b39f75acabc38e129195e83c48572",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1159,
"license_type": "no_license",
"max_line_length": 142,
"num_lines": 26,
"path": "/myshop/myshop/urls.py",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom django.urls import path, include\n\nfrom django.conf.urls import url\n\nfrom django.conf import settings\nfrom django.conf.urls.static import static\n\n# from django.views.generic import TemplateView\n# from myshop.views import IndexView\nimport myshop.views \nfrom product.views import ProductsView\nimport shopingCart.views\n\nSOCIAL_AUTH_URL_NAMESPACE = 'social'\nurlpatterns = [\n path('admin/', admin.site.urls),\n # path('', IndexView.as_view(template_name=\"main/index.html\")),\n path('', myshop.views.index, name='main'),\n path('products/', ProductsView.as_view(template_name=\"products/products.html\"), name=\"products\"),\n url('', include('social_django.urls', namespace='social')),\n url('logout/', myshop.views.logout_view, name='logout'),\n url('shoping_cart/main/', shopingCart.views.ShopingCartView.as_view(template_name=\"shoping_cart/shoping_cart.html\"), name=\"shoping_cart\"),\n path('shoping_cart/add_<int:product_id>/', shopingCart.views.add_product),\n path('shoping_cart/remove_<int:product_id>/', shopingCart.views.remove_product),\n] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n"
},
{
"alpha_fraction": 0.8148148059844971,
"alphanum_fraction": 0.8148148059844971,
"avg_line_length": 54,
"blob_id": "34258ef9f1425e9d0558c0553ea6f93941c21306",
"content_id": "9f7ed3de3c0c7353fd5e1ac2c9661d9fb5306e98",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 54,
"license_type": "no_license",
"max_line_length": 54,
"num_lines": 1,
"path": "/run.sh",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "source venv/bin/activate && myshop/manage.py runserver"
},
{
"alpha_fraction": 0.8074533939361572,
"alphanum_fraction": 0.8074533939361572,
"avg_line_length": 19.125,
"blob_id": "74ca5741e08570823b37a4901b73247757893ff0",
"content_id": "01ddc0f99f16c139d673d05493ea317ada6cf830",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 161,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 8,
"path": "/myshop/product/admin.py",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom product.models import Product\n\n\nclass ProductAdmin(admin.ModelAdmin):\n pass\n\nadmin.site.register(Product, ProductAdmin)\n"
},
{
"alpha_fraction": 0.665043830871582,
"alphanum_fraction": 0.6679649353027344,
"avg_line_length": 39.880001068115234,
"blob_id": "ca3c2ccdf46f60f431eb8bdb738744f5377be0d3",
"content_id": "58cb6a69dcaf90028fa1d9303ced9475bb7f135b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1170,
"license_type": "no_license",
"max_line_length": 133,
"num_lines": 25,
"path": "/myshop/shopingCart/models.py",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "import json\nfrom django.db import models\n\n# from product.models import Product\n\n\nclass ShopingCart(models.Model):\n json_product_ids = models.CharField(max_length=500, default='{\"buyed_product_ids\": []}') # список предполагаемых ИДшников покупок\n\n def get_list_product_ids(self):\n ''' извлекает жсон список ИДшников продуктов '''\n return json.loads(self.json_product_ids)['buyed_product_ids']\n\n def add_product_id(self, product_id):\n ''' добавляет новыйй ИДишник и упаковывает в жсон, сохраняет в БД. Всё лежит в первой записи '''\n list_product_ids = self.get_list_product_ids()\n list_product_ids.append(product_id)\n self.json_product_ids = json.dumps({\"buyed_product_ids\": list_product_ids})\n super().save()\n \n def delete_product_id(self, product_id):\n list_product_ids = self.get_list_product_ids()\n list_product_ids.remove(product_id)\n self.json_product_ids = json.dumps({\"buyed_product_ids\": list_product_ids})\n super().save()\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.7582089304924011,
"alphanum_fraction": 0.7582089304924011,
"avg_line_length": 24.846153259277344,
"blob_id": "347477d77a2712cc7fddf4f29214c74fd1021ac8",
"content_id": "c6805cd6f28b2eec10bb3386c1b16c2cdd640b66",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 335,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 13,
"path": "/myshop/product/views.py",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "# from django.shortcuts import render\nfrom django.views.generic import TemplateView\nfrom django.views import generic\n\nfrom product.models import Product\n\n\nclass ProductsView(generic.ListView):\n template_name = 'products.html'\n context_object_name = 'product_list'\n\n def get_queryset(self):\n return Product.objects.all()"
},
{
"alpha_fraction": 0.7129411697387695,
"alphanum_fraction": 0.7388235330581665,
"avg_line_length": 31.69230842590332,
"blob_id": "e179c96df009006cff61ecba4d0282a0a8a411cc",
"content_id": "936f827d4dfd4f1df4e48cc56742f8a1100f37a1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 425,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 13,
"path": "/myshop/product/models.py",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "import os\n\nfrom django.db import models\nfrom myshop.settings import STATIC_URL\n\n\nclass Product(models.Model):\n name = models.CharField(max_length=30)\n price = models.IntegerField(default=0)\n currency = models.CharField(max_length=10, default=\"USD\")\n desctiption = models.CharField(max_length=100)\n type_product = models.CharField(max_length=100)\n photo = models.ImageField(upload_to= \"product\", blank=True)\n"
},
{
"alpha_fraction": 0.6800000071525574,
"alphanum_fraction": 0.6800000071525574,
"avg_line_length": 24.16666603088379,
"blob_id": "12235f608f4e4a06947c7b72e93a9f3c15b24a98",
"content_id": "9d6859c6a09029dde6485006b8489503d4aa7e12",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "JavaScript",
"length_bytes": 150,
"license_type": "no_license",
"max_line_length": 55,
"num_lines": 6,
"path": "/myshop/static/js/shoping_cart.js",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "function remove_items(product_id) {\n // shoping_cart/remove_<int:product_id>\n location.href = \"/shoping_cart/remove_\"+product_id;\n}\n\n// onclick."
},
{
"alpha_fraction": 0.3730715215206146,
"alphanum_fraction": 0.37634408473968506,
"avg_line_length": 31.923076629638672,
"blob_id": "8b808604175033e2bdfadafcb1a5b8272e0defe8",
"content_id": "2252cd5e25da844486436a16c10c7ed81bc9df2f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "HTML",
"length_bytes": 2156,
"license_type": "no_license",
"max_line_length": 149,
"num_lines": 65,
"path": "/myshop/templates/base.html",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "<!DOCTYPE html>\n<html lang=\"en\">\n<head>\n <meta charset=\"UTF-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n \n <!-- если есть титульник -->\n {% if title %} $ \n <title>{{ title }}</title>\n {% else %}\n <title>myshop</title>\n {% endif %}\n\n {% load static %}\n <link rel=\"stylesheet\" href=\"https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css\" integrity=\"sha384-ggOyR0iXCbMQv3Xipma34MD+dH/1fQ784/j6cY/iJTQUOhcWr7x9JvoRxT2MZw1T\" crossorigin=\"anonymous\">\n <link rel=\"stylesheet\" type=\"text/css\" href=\"{% static 'css/base.css' %}\">\n \n {% block head_css %}\n {% endblock %}\n\n</head>\n<body>\n <header>\n <div class=\"wrapper\">\n <div class=\"mainmenu\">\n <ul> \n <li><a href=\"/\">Home</a></li>\n <li><a href=\"/products\">Products</a></li>\n <li><a href=\"/admin/product/product\">Products</a></li>\n <li><a href=\"/admin/product/product/add\">Add Products</a></li>\n <li><a href=\"/shoping_cart/main\">Shoping cart</a></li>\n </ul>\n </div>\n \n \n <div class=\"usermenu\">\n <ul>\n {% if user %}\n <li><strong>{{ user }}</strong></li>\n <li><a href=\"\">Setting</a></li>\n <li><a href=\"{% url 'logout' %}\">LogOut</a></li>\n {% else %}\n <li><a href=\"\">LogIn</a></li>\n <li>\n <div class=\"vk-logo\">\n <a href=\"{% url \"social:begin\" \"vk-oauth2\" %}\">\n <img src=\"{% static 'images/vk-logo.png'%}\" alt=\"\">\n </a>\n </div>\n </li>\n \n {% endif %} \n </ul>\n </div>\n \n \n </div>\n </header>\n <div class=\"body_wrapper\">\n\n {% block body %}\n {% endblock %}\n </div>\n</body>\n</html>"
},
{
"alpha_fraction": 0.49732619524002075,
"alphanum_fraction": 0.5802139043807983,
"avg_line_length": 19.77777862548828,
"blob_id": "5402d20a1008d8fa059e6b188d14be6b90d4731b",
"content_id": "3afe643781e444c815da5938b691af177919c41c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 374,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 18,
"path": "/myshop/product/migrations/0004_auto_20191110_1914.py",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "# Generated by Django 2.2.6 on 2019-11-10 19:14\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('product', '0003_auto_20191106_0656'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='product',\n old_name='desctiption',\n new_name='description',\n ),\n ]\n"
},
{
"alpha_fraction": 0.5237020254135132,
"alphanum_fraction": 0.6004514694213867,
"avg_line_length": 23.61111068725586,
"blob_id": "fe23071f84baa6fdab360bb2b37046f5bd6f53bc",
"content_id": "a5b7110f7418c1ae9d16187df240de4ba06a79f7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 443,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 18,
"path": "/myshop/shopingCart/migrations/0005_auto_20191110_1914.py",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "# Generated by Django 2.2.6 on 2019-11-10 19:14\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('shopingCart', '0004_auto_20191107_0820'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='shopingcart',\n name='json_product_ids',\n field=models.CharField(default='{\"buyed_product_ids\": []}', max_length=500),\n ),\n ]\n"
},
{
"alpha_fraction": 0.5069060921669006,
"alphanum_fraction": 0.5538673996925354,
"avg_line_length": 24.85714340209961,
"blob_id": "3628c2080c381b0bbd8585130b2f0a24bb82bf98",
"content_id": "954c9bc72154872f90c2a1150d80160cf8e14dd8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 724,
"license_type": "no_license",
"max_line_length": 69,
"num_lines": 28,
"path": "/myshop/product/migrations/0003_auto_20191106_0656.py",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "# Generated by Django 2.2.6 on 2019-11-06 06:56\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('product', '0002_auto_20191008_0933'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='product',\n name='currency',\n field=models.CharField(default='USD', max_length=10),\n ),\n migrations.AddField(\n model_name='product',\n name='price',\n field=models.IntegerField(default=0),\n ),\n migrations.AlterField(\n model_name='product',\n name='photo',\n field=models.ImageField(blank=True, upload_to='product'),\n ),\n ]\n"
},
{
"alpha_fraction": 0.7078916430473328,
"alphanum_fraction": 0.7114251852035522,
"avg_line_length": 30.44444465637207,
"blob_id": "badc8523da6a3e71102f80d8ae06c6d477898b88",
"content_id": "5eaa7c6033b693eb9f7dbc68a9e6440aea77abf9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 849,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 27,
"path": "/myshop/shopingCart/views.py",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import redirect, HttpResponse\nfrom django.views import generic\nfrom .models import ShopingCart\nfrom product.models import Product\n\n\nclass ShopingCartView(generic.ListView):\n template_name = 'shoping_cart.html'\n context_object_name = 'list_buyed_product'\n\n def get_queryset(self):\n list_product_ids = ShopingCart.objects.get(id=1).get_list_product_ids()\n product_list = []\n for product_id in list_product_ids:\n product_list.append(Product.objects.get(id=product_id))\n \n return product_list\n\n\ndef add_product(request, product_id):\n ShopingCart.objects.get(id=1).add_product_id(product_id)\n return redirect('/products')\n\n\ndef remove_product(request, product_id):\n ShopingCart.objects.get(id=1).delete_product_id(product_id)\n return redirect(\"/shoping_cart/main\")\n"
},
{
"alpha_fraction": 0.8299999833106995,
"alphanum_fraction": 0.8299999833106995,
"avg_line_length": 19,
"blob_id": "379f6f193d8e19dd5f63320ce6f62f67f8145446",
"content_id": "b619f92e706ce28d63f6b34bdf43f8d3f48b5041",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 100,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 5,
"path": "/myshop/shopingCart/admin.py",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\nfrom .models import ShopingCart\n\n\nadmin.site.register(ShopingCart)\n"
},
{
"alpha_fraction": 0.5169946551322937,
"alphanum_fraction": 0.5778175592422485,
"avg_line_length": 24.409090042114258,
"blob_id": "b0f61093e514d1ac8b2c109ad09fbafd8053b5b5",
"content_id": "e05552bf888097660c3ceefbf7679a6f460c937b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 559,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 22,
"path": "/myshop/shopingCart/migrations/0003_auto_20191107_0814.py",
"repo_name": "davy1ex/myshop",
"src_encoding": "UTF-8",
"text": "# Generated by Django 2.2.6 on 2019-11-07 08:14\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('shopingCart', '0002_auto_20191107_0802'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='shopingcart',\n name='json_products_id',\n ),\n migrations.AddField(\n model_name='shopingcart',\n name='json_product_ids',\n field=models.CharField(default=\"{'buyed_product_ids': []}\", max_length=500),\n ),\n ]\n"
}
] | 15 |
fausst/satchel-devops-challenge
|
https://github.com/fausst/satchel-devops-challenge
|
d8bc7e2a098a8f03bee40ed33e57d1e35fb839a8
|
445ef19bcf6e0227690582a45a54979fb84fddad
|
080f631d726f111d36980adb0523d640f7f453b6
|
refs/heads/master
| 2020-11-24T06:36:52.982385 | 2019-12-24T01:40:56 | 2019-12-24T01:40:56 | 228,011,831 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.713567852973938,
"alphanum_fraction": 0.713567852973938,
"avg_line_length": 13.214285850524902,
"blob_id": "9e22a43b0ebdf3d166b3923cb15a1ada4de08893",
"content_id": "c24e003f757b115b9548c4e7366b333f283a2da8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 199,
"license_type": "no_license",
"max_line_length": 49,
"num_lines": 14,
"path": "/01-docker/etcd/Dockerfile",
"repo_name": "fausst/satchel-devops-challenge",
"src_encoding": "UTF-8",
"text": "FROM debian:latest\n\nRUN apt-get update\nRUN apt-get install -y git golang make\n\nRUN git clone https://github.com/etcd-io/etcd.git\n\nWORKDIR /etcd\n\nENV PATH=\"/etcd/bin:${PATH}\"\n\nRUN make \n\nCMD [\"etcd\"]\n"
},
{
"alpha_fraction": 0.7704347968101501,
"alphanum_fraction": 0.7791304588317871,
"avg_line_length": 43.230770111083984,
"blob_id": "662a8ff9ac2beec774c372e266e459eae11c0b76",
"content_id": "74ce5fcc2e676dd484096ba7514ab8daa213e4c1",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 575,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 13,
"path": "/01-docker/compose/wordpress/README.md",
"repo_name": "fausst/satchel-devops-challenge",
"src_encoding": "UTF-8",
"text": "# Wordpress with MariaDB stack\nThis compose file have 2 services:\n - db --> mariadb\n - wordpress\n\nThe goal of this excercise has been acomplished by:\nPersistence: In this case I used db_data for volume persistence in the mariadb database. Any changes do\nne by wordpress will be permanent saved in this volume.\nNetworking: I have created a isolated newtork for this project called \"isolated_nw\" and it is shared by\n both db and wordpress services, using the default \"brigde\" driver.\n\nThe solution has been started and tested with the following URL:\n- http://localhost:8000\n"
},
{
"alpha_fraction": 0.5723491311073303,
"alphanum_fraction": 0.5858230590820312,
"avg_line_length": 20.59493637084961,
"blob_id": "acbdedaf1acf070f2987e392fd6793179dc15122",
"content_id": "7c3f389cb34f3dc7d1b059d9f82f0bea87f81d8d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "YAML",
"length_bytes": 1707,
"license_type": "no_license",
"max_line_length": 48,
"num_lines": 79,
"path": "/01-docker/compose/wp-joomla/docker-compose.yml",
"repo_name": "fausst/satchel-devops-challenge",
"src_encoding": "UTF-8",
"text": "version: '2'\n\nservices:\n nginx-proxy:\n image: jwilder/nginx-proxy\n ports:\n - \"80:80\"\n volumes:\n - /var/run/docker.sock:/tmp/docker.sock:ro\n networks:\n - joomla_nw\n - wordpress_nw\n db_wordpress:\n image: mariadb:10.2\n restart: always\n environment:\n MYSQL_ROOT_PASSWORD: somewordpress\n MYSQL_DATABASE: wordpress\n MYSQL_USER: wordpress\n MYSQL_PASSWORD: wordpress\n volumes:\n - vm_wordpress:/var/lib/mysql\n networks:\n - wordpress_nw\n\n wordpress:\n depends_on:\n - db_wordpress\n - nginx-proxy\n image: wordpress:latest\n restart: always\n expose:\n - 80\n environment:\n WORDPRESS_DB_HOST: db_wordpress:3306\n WORDPRESS_DB_USER: wordpress\n WORDPRESS_DB_PASSWORD: wordpress\n WORDPRESS_DB_NAME: wordpress\n VIRTUAL_HOST: mywordpress.local \n networks:\n - wordpress_nw\n\n joomla:\n depends_on:\n - db_joomla\n - nginx-proxy\n image: joomla:latest\n restart: always\n expose:\n - 80\n environment:\n JOOMLA_DB_HOST: db_joomla:3306\n JOOMLA_DB_USER: joomla \n JOOMLA_DB_PASSWORD: joomla \n JOOMLA_DB_NAME: joomla \n VIRTUAL_HOST: myjoomla.local \n networks:\n - joomla_nw\n\n db_joomla:\n image: mariadb:10.2\n restart: always\n environment:\n MYSQL_ROOT_PASSWORD: somejoomla\n MYSQL_DATABASE: joomla \n MYSQL_USER: joomla \n MYSQL_PASSWORD: joomla \n volumes:\n - vm_joomla:/var/lib/mysql\n networks:\n - joomla_nw\nnetworks:\n joomla_nw:\n driver: bridge\n wordpress_nw:\n driver: bridge\nvolumes:\n vm_wordpress: {}\n vm_joomla: {}\n\n"
},
{
"alpha_fraction": 0.6969696879386902,
"alphanum_fraction": 0.7631416320800781,
"avg_line_length": 31.959182739257812,
"blob_id": "17456563530041e28537f6ef3f6bb8769b09dae4",
"content_id": "76b1241971ca35eee0a173c800d297c08fad529e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1617,
"license_type": "no_license",
"max_line_length": 175,
"num_lines": 49,
"path": "/03-jenkins/README.md",
"repo_name": "fausst/satchel-devops-challenge",
"src_encoding": "UTF-8",
"text": "# Jenkinsfile\nIn this task i have used a virtual machine, due to lack of resources in the ec2.micro jenkins server. The pipeline job in this server was failing with a \"Out of memory\" error:\n\nfatal error: runtime: out of memory\n\nSo, in order to test the pipeline job, i have launched a jenkins installation in my Ubuntu 14.04 virtual machine (4gb memory) and is has worked.\n\nFor this pipeline job, i have configured the following:\n\n- docker-hub jenkins credential. Needed for push the image in docker-hub\n- docker-hub new repository: fausst/etcd\n- In the jenkins task:\n - Configure a connection with my github repo ( It will do a initial code download)\n - TAG text var. This var will be asigned in the Jenkinsfile for tagging the image.\n\n\nIn the Jenkinsfile, there are 4 steps:\n\n1.- Clone github repo\n...\n > git checkout -f 2e0083b10e3d16bbaceaca15503ab6e9198b5cac # timeout=10\nCommit message: \"Jenkinsfile prueba\"\n\n2.- Build etcd with Dockerfile, and tag it\n...\nSuccessfully built 9b3e2fad326f\nSuccessfully tagged fausst/etcd:my_tag\n\n3.- Testing the build launching \"etcdctl version\":\n...\n[Pipeline] {\n[Pipeline] sh (hide)\n+ etcdctl version\netcdctl version: 3.5.0-pre\nAPI version: 3.5\n[Pipeline] }\n\n4.- Push image in docker-hub\n...\n+ docker push registry.hub.docker.com/fausst/etcd:my_tag\nThe push refers to repository [registry.hub.docker.com/fausst/etcd]\n...\nmy_tag: digest: sha256:23cf615d70389f4c02285d0f2bd10047db367d3a37825ecd3f6b0179caacc790 size: 1379\n...\nFinished: SUCCESS\n\n\nThe jenkins ec2 public IP: http://13.59.12.231:8080/ \nDocker-hub image: https://hub.docker.com/repository/docker/fausst/etcd \n\n"
},
{
"alpha_fraction": 0.6369760632514954,
"alphanum_fraction": 0.6377245783805847,
"avg_line_length": 40.75,
"blob_id": "808316fa55394ff9eedd281b66bc7965196ccbaa",
"content_id": "1b197b0f6b30a3a4bca0a08d46a84f2bdb1f8e0c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 1336,
"license_type": "no_license",
"max_line_length": 106,
"num_lines": 32,
"path": "/02-aws/ansible_test/config/templates/gunicorn_start",
"repo_name": "fausst/satchel-devops-challenge",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\nchdir={{ django_dir }}\nNAME=\"{{ django_project }}\" # Name of the application\nDJANGODIR={{ django_dir }} # Django project directory\nSOCKFILE={{ django_dir }}/{{ django_project }}/run/gunicorn.sock # socket\nUSER=root # the user to run as\nGROUP=root # the group to run as\nNUM_WORKERS=3 # how many worker processes should Gunicorn spawn\nDJANGO_SETTINGS_MODULE={{ django_project }}.settings.production\nDJANGO_WSGI_MODULE={{ django_project }}.wsgi # WSGI module name\nDATABASE_URL=postgres://{{ dbuser }}:{{ dbpassword }}@localhost/{{ dbname }}\nSTATIC_ROOT={{ static_dir }}\n\nexport DJANGO_SETTINGS_MODULE=$DJANGO_SETTINGS_MODULE\nexport PYTHONPATH=$DJANGODIR:$PYTHONPATH\nexport STATIC_ROOT=$STATIC_ROOT\nexport DATABASE_URL=$DATABASE_URL\n\n# Create the run directory if it doesn't exist\nRUNDIR=$(dirname $SOCKFILE)\ntest -d $RUNDIR || mkdir -p $RUNDIR\n\n# Start your Django Unicorn\n# Programs meant to be run under supervisor should not daemonize themselves (do not use --daemon)\nexec /usr/bin/gunicorn ${DJANGO_WSGI_MODULE}:application \\\n--name $NAME \\\n--workers $NUM_WORKERS \\\n--user=$USER --group=$GROUP \\\n--bind=unix:$SOCKFILE \\\n--log-level=debug \\\n--log-file=-\n"
},
{
"alpha_fraction": 0.7884615659713745,
"alphanum_fraction": 0.7937062978744507,
"avg_line_length": 94.33333587646484,
"blob_id": "bf3990ef47af21b9e4fbc21b2b245cbe5605deb0",
"content_id": "b65ec9d14b296139fb83956a3f7abf5802e8c53e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 572,
"license_type": "no_license",
"max_line_length": 191,
"num_lines": 6,
"path": "/01-docker/compose/backup/README.md",
"repo_name": "fausst/satchel-devops-challenge",
"src_encoding": "UTF-8",
"text": "# MariaDB backup job\nIn order to resolve this exercise, i have created another service for the backup to work.\nIn this service, installed with mariadb:10.2, the backup is launched with mysqldump tool and connecting to the remote database. In addition, a new directory is created to generate the backup.\nTo copy the backup to my local machine path, i have used a bind mount to a \"./backup\" directory from the reciently created in the container machine.\n\nFinally, i have launched a \"docker-compose up\" command and proved the backup is generated correctly in the local path.\n"
},
{
"alpha_fraction": 0.6112777590751648,
"alphanum_fraction": 0.6670665740966797,
"avg_line_length": 82.3499984741211,
"blob_id": "54686f63ad9aea21bbb1346bd527f5876c7daf7b",
"content_id": "0c3424eb20b1e69d67d71f99f6e1b429e5089a24",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1677,
"license_type": "no_license",
"max_line_length": 168,
"num_lines": 20,
"path": "/01-docker/compose/wp-joomla/README.md",
"repo_name": "fausst/satchel-devops-challenge",
"src_encoding": "UTF-8",
"text": "# WP-Joomla\nTo achieve this i have used a nginx-proxy as a proxy container: jwilder/nginx-proxy\nWith it, i have added 2 services, joomla and wordpress, and declared the environment var VIRTUAL_HOST pointing to the names configured in /etc/hosts.\nIn this way i have now 2 containers added to a proxy server serving on port 80.\n2 more services has been declared in compose-file as databases for joomla and wordpress, and has been isolated in a specific network and persisted in a specific volume.\nFor the proxy to work, both networks has been added in the compose-file too.\nAt the end, i have the following containers:\n\n$ docker ps\nCONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nf0e8a4efdb39 joomla:latest \"/entrypoint.sh apac…\" 11 minutes ago Up 11 minutes 80/tcp composejoomla_joomla_1\n4e255b0d9132 wordpress:latest \"docker-entrypoint.s…\" 11 minutes ago Up 11 minutes 80/tcp composejoomla_wordpress_1\n04e2445d767d mariadb:10.2 \"docker-entrypoint.s…\" 11 minutes ago Up 11 minutes 3306/tcp composejoomla_db_joomla_1\n41ad28ebd149 mariadb:10.2 \"docker-entrypoint.s…\" 11 minutes ago Up 11 minutes 3306/tcp composejoomla_db_wordpress_1\n3c046f73cca4 jwilder/nginx-proxy \"/app/docker-entrypo…\" 11 minutes ago Up 11 minutes 0.0.0.0:80->80/tcp composejoomla_nginx-proxy_1\n\n\nAnd the services are accesibles in the following URLs:\njoomla --> http://myjoomla.local/\nwordpress --> http://mywordpress.local/\n"
},
{
"alpha_fraction": 0.7077283263206482,
"alphanum_fraction": 0.7306791543960571,
"avg_line_length": 26.727272033691406,
"blob_id": "a67243bf57ef2ddc5c568195866d65ce82bf7211",
"content_id": "754e504de7ca246694835ec74110de5a4bdfe10f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2135,
"license_type": "no_license",
"max_line_length": 135,
"num_lines": 77,
"path": "/02-aws/ansible_test/README.md",
"repo_name": "fausst/satchel-devops-challenge",
"src_encoding": "UTF-8",
"text": "# ansible_test\n\nThe following structure has been created to resolve the execrise:\nansible_test:\n- site.yml\n- ansible.cfg\n- group_vars:\n - all\n- addkey:\n - public_keys:\n \t- ubuntu.pub\n - tasks:\n - main.yml\n- config:\n - templates:\n \t- gunicorn_start\n\t- nginx.conf\n\t- production.j2\n\t- supervisor.conf\n - tasks:\n - main.yml\n- packages:\n - tasks:\n - main.yml\n- postgresql:\n - tasks:\n - main.yml\n- django:\n - tasks:\n - main.yml\n\n\nIn the site.yml (main playbook yaml), ansible do the following:\n\n- Create a security group for the ec2 instances. It will receive request in ports 22,80 and 443, and will send to anywhere (all)\n\n- Create 2 ec2 Ubuntu 14.04 TLS instances\n\n- Install required packages for the deploy to work\n\n- Generate and add our local host public key and adding to authorized_keys file on remote servers\n\n- Pull the django application code from the public github repo fausst/django_test to the servers\n\n- Install django requirements with pip\n\n- Create postgresql database and user for the django app\n\n- Deploy django app (collect stats, migrate database)\n\n- Generate a new AWS load balancer in a default security group listening 80 port and add both web servers to it.\n\n- Copy/substitute neccessary config files for Nginx,Supervisor,Gunicorn,Django\n\n- Restart supervisor and nginx\n\n\nSome considerations to keep in mind:\n\n- A dynamic inventory has been used (ec2.py,ec2.ini)\n\n- Application written in Django\n\n- Launch command: ansible-playbook site.yml --private-key /home/ubuntu/my_key_pair.pem\n\n- Result: a welcome messagge will be displayed if ELB DNS Name is requested (http://awselbdemo-455991089.us-east-2.elb.amazonaws.com): \n\nHello world FROM SERVER 3.135.185.189!\nHello world FROM SERVER 3.133.154.43!\n\nDepending on which server the ELB are pointing one or the other will be printed (F5 many times to see it changing).\n\nThis playbook has been tested in a empty execution (dropping ec2 instances) and it works.\n\n#update_playbook.yml\n\nIt will make a pull of code in the servers, will modify the required django files, and will restart supervisor and nginx.\n"
},
{
"alpha_fraction": 0.752293586730957,
"alphanum_fraction": 0.752293586730957,
"avg_line_length": 61.28571319580078,
"blob_id": "9c74094a4f445fd1dfd408d5f35d67fa244ee93f",
"content_id": "f26d71954d6368a792479b56922dbf70ca9a5143",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 872,
"license_type": "no_license",
"max_line_length": 361,
"num_lines": 14,
"path": "/01-docker/etcd/README.md",
"repo_name": "fausst/satchel-devops-challenge",
"src_encoding": "UTF-8",
"text": "# etcd Dockerfile\nI have used debian:latest as base image to this build.\nFirstly, we need to deploy the following:\n - git --> In order to clone the etcd source code from github\n - golang --> It is neccessary for etcd to work\n - make --> we will install etcd using make Makefile\n\nnext, we do a git clone from github etcd repo, and deploy it on /etcd\n\nFor the install, we change the current path to the deployed path (/etcd) and launch \"make\"\n\nFinally, to achieve a default entrypoint to start the etcd server, we have to define it as CMD and not as ENTRYPOINT, because we wont be able to launch a \"etcdctl\" command later (A entrypoint cannot be overrided by a command and will be attached). Using CMD, we have a \"etcd\" as entrypoint, and we can later launch a \"etcdctl\" command in a run and it will work.\n\n*Dont forget to add the etcdctl path in the ENV environment var\n"
},
{
"alpha_fraction": 0.5656836628913879,
"alphanum_fraction": 0.5907059907913208,
"avg_line_length": 22.787233352661133,
"blob_id": "0e0baac4535b1e58bba3cf00774fdc70b565b729",
"content_id": "3d243ce6772ec059a930ab31303382df1e3a641f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "YAML",
"length_bytes": 1119,
"license_type": "no_license",
"max_line_length": 139,
"num_lines": 47,
"path": "/01-docker/compose/backup/docker-compose.yml",
"repo_name": "fausst/satchel-devops-challenge",
"src_encoding": "UTF-8",
"text": "version: '3.3'\n\nservices:\n db:\n image: mariadb:10.2\n restart: always\n environment:\n MYSQL_ROOT_PASSWORD: somewordpress\n MYSQL_DATABASE: wordpress\n MYSQL_USER: wordpress\n MYSQL_PASSWORD: wordpress\n ports:\n - 3306\n - 3307\n networks:\n - isolated_nw\n db-backup:\n image: mariadb:10.2\n environment:\n MYSQL_ROOT_PASSWORD: somewordpress\n MYSQL_DATABASE: wordpress\n MYSQL_USER: wordpress\n MYSQL_PASSWORD: wordpress\n MYSQL_HOST: db \n command: bash -c \"sleep 10 && mkdir -p /backups && mysqldump --host db --user=wordpress -pwordpress wordpress >/backups/wordpress.sql\"\n volumes:\n - \"./backup:/backups\"\n networks:\n - isolated_nw\n\n wordpress:\n depends_on:\n - db\n image: wordpress:latest\n ports:\n - \"8000:80\"\n restart: always\n environment:\n WORDPRESS_DB_HOST: db:3306\n WORDPRESS_DB_USER: wordpress\n WORDPRESS_DB_PASSWORD: wordpress\n WORDPRESS_DB_NAME: wordpress\n networks:\n - isolated_nw\nnetworks:\n isolated_nw:\n driver: bridge\n\n"
},
{
"alpha_fraction": 0.7629629373550415,
"alphanum_fraction": 0.7629629373550415,
"avg_line_length": 32.5,
"blob_id": "f91cec7b5948768156113d604531bdd1782e2949",
"content_id": "446f6c3e374ee5b467e41f7c8a0232e6e98c545e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 135,
"license_type": "no_license",
"max_line_length": 75,
"num_lines": 4,
"path": "/02-aws/python_app/django/yourproject/views.py",
"repo_name": "fausst/satchel-devops-challenge",
"src_encoding": "UTF-8",
"text": "from django.http import HttpResponse\n\ndef index(request):\n return HttpResponse(\"Hello world FROM SERVER %PUSH_SERVER_PUBLIC_IP%!\")\n\n"
}
] | 11 |
ACTCollaboration/sync-nersc-scinet
|
https://github.com/ACTCollaboration/sync-nersc-scinet
|
f2421c3353fb54b3545366a8f512c2703f9bdddd
|
b881e8ba376695ca0df01a4aa18b70b5c41864c0
|
c7c8be7e15e42f9e991423c4b90034badfec41a6
|
refs/heads/master
| 2021-01-11T19:44:39.989288 | 2018-06-22T00:26:46 | 2018-06-22T00:26:46 | 79,385,695 | 0 | 1 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6647727489471436,
"alphanum_fraction": 0.6772727370262146,
"avg_line_length": 35.58333206176758,
"blob_id": "51836bafacac242cb93d0a5827aae970da730000",
"content_id": "8bb5c572ef4fa24fd47aa298e6f1b6b72edd0bf2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 880,
"license_type": "no_license",
"max_line_length": 204,
"num_lines": 24,
"path": "/syncScript.sh",
"repo_name": "ACTCollaboration/sync-nersc-scinet",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\n# Highly recommended that the following DEL_FLAG flag is kept commented. Only enable it if you are sure\n# that the destination directories on Cori are correct and that no unrelated files will be\n# removed.\n#DEL_FLAG=\"--delete\"\n\ncd \"$(dirname \"$0\")\"\nsource sync_common.sh\nSCI_SYNC_USER=\"$(trimWhite $(cat config/scinetUserName))\"\n\n\nwhile IFS=, read INSRC_DIR INDEST_DIR\ndo\n\n SRC_DIR=\"$(trimWhite $INSRC_DIR)\"\n DEST_DIR=\"$(trimWhite $INDEST_DIR)\"\n case \"$SRC_DIR\" in \\#*) continue ;; esac # ignore comments\n if [ -z \"$SRC_DIR\" ]; then continue ; fi # ignore empty\n if [ -z \"$DEST_DIR\" ]; then continue ; fi # ignore empty\n nohup rsync -azPvL -e \"ssh -A -o IdentityFile=~/.ssh/id_astro_compute_040418.pub [email protected] ssh -A \" nia-datamover1:$SRC_DIR/ $DEST_DIR $DEL_FLAG > rsyncCommand.log 2>&1 &\n wait\n \ndone <$1\n\n\n"
},
{
"alpha_fraction": 0.7055016160011292,
"alphanum_fraction": 0.708737850189209,
"avg_line_length": 29.899999618530273,
"blob_id": "063b4c8684d1c5390e2b9a0fd03f9ce93a9d0bb7",
"content_id": "9f0c1cb7d1753bae9dfc4646bffbc4605141a12d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 309,
"license_type": "no_license",
"max_line_length": 73,
"num_lines": 10,
"path": "/localScript.sh",
"repo_name": "ACTCollaboration/sync-nersc-scinet",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n\ncd \"$(dirname \"$0\")\"\nsource sync_common.sh\n\nNERSC_SYNC_USER=\"$(trimWhite $(cat config/nerscUserName))\"\nNERSC_DIR=\"$(trimWhite $(cat config/nerscPath))\"\n\nssh -A [email protected] \"$NERSC_DIR/syncScript.sh $NERSC_DIR/listForSync.txt\"\n#ssh -A [email protected] \"$NERSC_DIR/syncScript.sh $NERSC_DIR/listForSync.txt\"\n"
},
{
"alpha_fraction": 0.5829564929008484,
"alphanum_fraction": 0.5871304273605347,
"avg_line_length": 30.25,
"blob_id": "7f592ab6959ce7e7d515dc3dae28d24c2d61ac2e",
"content_id": "6a84742baa3be2ac93f8b15fe1d437325b53fccc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2875,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 92,
"path": "/actsync.py",
"repo_name": "ACTCollaboration/sync-nersc-scinet",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/python2\nfrom __future__ import print_function\nimport time, sys, yaml, os\nfrom daemon import runner\nimport datetime as dt\nimport pytz\nfrom dateutil import parser\nimport getpass\nimport calendar\nimport numpy as np\nimport traceback\nimport subprocess\n\n\n\ndef check_if_time(frequency,trigger_day,time_zone_string,trigger_time,tolerance):\n today = dt.date.today()\n if frequency=='weekly':\n assert trigger_day in [calendar.day_name[x] for x in xrange(7)], \"Unrecognized day name.\"\n isDay = (calendar.day_name[today.weekday()]==trigger_day)\n if not(isDay): return False\n elif frequency=='nightly' or frequency=='daily':\n pass\n else:\n raise NotImplementedError\n\n timezone=pytz.timezone(time_zone_string)\n time_now = dt.datetime.now(tz=timezone)\n\n datestring = dt.datetime.strftime(dt.datetime.today().date(),format='%b %d %Y')\n\n passtime = trigger_time\n dtin = parser.parse(passtime)\n dtin_aware = timezone.localize(dtin)\n if abs((dtin_aware-time_now).total_seconds())<tolerance:\n return True\n else:\n return False\n \n \n\nclass App():\n def __init__(self,daemon_command,yaml_file,time_interval_sec=60,tolerance_seconds=240):\n self.dir = os.path.dirname(os.path.abspath(__file__))\n self.stdin_path = '/dev/null'\n self.stdout_path = self.dir+'/syncact_out_'+str(time.time())+\".log\"\n self.stderr_path = self.dir+'/syncact_err_'+str(time.time())+\".log\"\n self.pidfile_path = '/tmp/syncact_daemon.pid'\n self.pidfile_timeout = 5\n self.interval = time_interval_sec\n self.tolerance = tolerance_seconds\n assert self.tolerance>self.interval\n\n self.last_day = -1\n\n if daemon_command!=\"stop\":\n with open('settings.yaml') as f:\n self.settings = yaml.safe_load(f)\n print(\"syncact: Daemon is running.\")\n \n\n def run(self):\n \n while True:\n\n now_day = dt.datetime.today().day\n if check_if_time(self.settings['frequency'],\n None,\n self.settings['time_zone'],\n self.settings['trigger_time'],\n tolerance=self.tolerance) and (now_day!=self.last_day):\n print(\"Starting sync...\")\n self.last_day = dt.datetime.today().day\n subprocess.call(\".\"+self.dir+\"/localScript.sh\") \n \n time.sleep(self.interval)\n\n\n\ndef main(argv):\n try:\n yamlFile = sys.argv[2]\n except:\n assert sys.argv[1]==\"stop\", \"No settings yaml file specified.\"\n yamlFile = None\n\n app = App(sys.argv[1],yamlFile)\n daemon_runner = runner.DaemonRunner(app)\n daemon_runner.do_action()\n \nif (__name__ == \"__main__\"):\n main(sys.argv)\n"
},
{
"alpha_fraction": 0.7488471269607544,
"alphanum_fraction": 0.7502660751342773,
"avg_line_length": 40.39706039428711,
"blob_id": "ca2a31f134d03360a040255d1ace162136c6dee1",
"content_id": "93cb6db0b87c1e11d34670e5117c8f370e4f7d0b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 2819,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 68,
"path": "/README.md",
"repo_name": "ACTCollaboration/sync-nersc-scinet",
"src_encoding": "UTF-8",
"text": "Scinet->NERSC nightly sync\n==========================\n\nThis set of scripts allows us to:\n\n1. ssh into cori.nersc.gov\n2. rsync a list of directories from scinet to cori\n\nIt is intended as a one-way stream to update the\nNERSC ACT project directories with the latest\nproducts (primarily from the mapmakers) from Scinet.\n\n\nYou can open an issue on the github page to add\na directory on Scinet that you would like synced\nto NERSC.\n\n\nRequirements\n------------\n\n- localMachine must have keys for Scinet and Cori with SSH Agent Forwarding enabled\n- If password-protected (recommended), keys must be in the keyring so no password is requested\n- clone of this repo on localMachine\n- localMachine must have a cron daemon running, and localScript.sh should be added to nightly\n- clone of this repo in user space on Cori\n\nProcedure\n---------\n\nTo use this yourself, you only need to clone this repo on your local machine and on Cori,\nand modify the following files:\n- scinetUserName: replace msyriac with your username on Scinet\n- nerscUserName: replace msyriac with your username on NERSC/Cori\n- nerscPath: path to the directory of this repo on NERSC/Cori, no slash at end\n\nAnd then add localScript.sh to your nightly (or as desired) cron jobs.\n\nThis sets up the following procedure:\n- cron on localMachine calls localScript.sh\n- localScript.sh schematically does\n `ssh {nerscUserName}@cori.nersc.gov '{nerscPath}/syncScript.sh {nerscPath}/syncList.txt'`\n- syncScript.sh on cori reads from syncList.txt line by line for {SRC_DIR} and {DEST_DIR}\n- syncScript.sh issues schematically `rsync {scinetUserName}@login.scinet.utoronto.ca:{SRC_DIR} {DEST_DIR}`\n for each line in syncList.txt\n\n\nManual sync\n-----------\n\nTo manually sync, log in to cori and run `{nerscPath}/syncScript.sh {nerscPath}/syncList.txt`.\n\n\nWarnings and caveats\n--------------------\n\n1. If files are being written on Scinet while the rsync is running, the destination files on\n NERSC may get corrupted. If this was the case, you just need to manually sync as described\n above and rsync will take care of the rest.\n \n2. By default, the $DEL_FLAG=\"--delete\" is disabled in syncScript.sh. Enabled, this flag deletes\n files on Cori that are not in the corresponding Scinet directory. However, if you mess up the\n destination directory on Cori, this could potentially delete parent directories with unrelated\n project files. That's bad. So I've disabled it. This means that old files that are no longer in\n the Scinet directories will be retained in the Cori directories. For now, this might be preferable\n to risking total data loss. TODO: provide a script to clean up Cori directories with files older\n than a specified date. TODO: Another way to avoid this is to always check that the destination\n directory is a child of the ACT project directory with no \"..\" in the path.\n\n\n\n\n"
}
] | 4 |
simbas2000/DjangoTest1
|
https://github.com/simbas2000/DjangoTest1
|
57c2b13da5072d178af8b178bfdf80a2830c5ea5
|
b63ad1c2f8978d80bfae3d2d12ad7e3ca5c05a62
|
f5c8a7675bdb78a4963fab7e373d944f1d35e766
|
refs/heads/master
| 2022-05-02T03:38:48.182466 | 2019-09-27T18:29:03 | 2019-09-27T18:29:03 | 199,300,611 | 1 | 0 | null | 2019-07-28T14:55:16 | 2019-12-01T14:40:26 | 2022-04-22T22:02:34 |
JavaScript
|
[
{
"alpha_fraction": 0.6206195950508118,
"alphanum_fraction": 0.6353479027748108,
"avg_line_length": 36.132076263427734,
"blob_id": "b0b7de72ea5eab95e6b586df3509e8b2ad408806",
"content_id": "8df96109f89ba7daf4962bab252b2315a47c586c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1969,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 53,
"path": "/functional_tests/test_simple_list_creation.py",
"repo_name": "simbas2000/DjangoTest1",
"src_encoding": "UTF-8",
"text": "from .base import FunctionalTest\nimport time\nfrom selenium import webdriver\nfrom selenium.webdriver.common.keys import Keys\n\n\nclass NewVisitorTest(FunctionalTest):\n\n def test_can_start_a_list_and_retrive_it_later(self):\n # User visits homepage\n self.browser.get(self.server_url)\n\n # User notices title and header\n self.assertIn('To-Do', self.browser.title)\n header_text = self.browser.find_element_by_tag_name('h1').text\n self.assertIn('To-Do', header_text)\n time.sleep(0.02) # seems needed to avoid bug with selenium (?)\n\n # User enter items in list using inputbox\n inputbox = self.get_item_input_box()\n self.assertEqual(inputbox.get_attribute('placeholder'), 'Enter a to-do item')\n inputbox.send_keys('Item 1')\n inputbox.send_keys(Keys.ENTER)\n time.sleep(0.2)\n user_list_url = self.browser.current_url\n self.assertRegex(user_list_url, '/lists/.+')\n self.check_for_row_in_list_table('1: Item 1')\n inputbox = self.get_item_input_box()\n inputbox.send_keys('Item 2')\n inputbox.send_keys(Keys.ENTER)\n time.sleep(0.2)\n self.check_for_row_in_list_table('2: Item 2')\n\n self.browser.quit()\n self.browser = webdriver.Firefox()\n\n self.browser.get(self.server_url)\n page_text = self.browser.find_element_by_tag_name('body').text\n self.assertNotIn('1: Item 1', page_text)\n self.assertNotIn('2: Item 2', page_text)\n\n inputbox = self.get_item_input_box()\n inputbox.send_keys('l2_i1')\n inputbox.send_keys(Keys.ENTER)\n time.sleep(0.4)\n\n user2_list_url = self.browser.current_url\n self.assertRegex(user2_list_url, '/lists/.+')\n self.assertNotEqual(user_list_url, user2_list_url)\n\n page_text = self.browser.find_element_by_tag_name('body').text\n self.assertNotIn('1: Item 1', page_text)\n self.assertIn('l2_i1', page_text)\n\n"
},
{
"alpha_fraction": 0.6210851073265076,
"alphanum_fraction": 0.6312307119369507,
"avg_line_length": 36.78333282470703,
"blob_id": "674894613107d472c9d7fcea33bf472aaaad16fa",
"content_id": "8488e7b2c6f6f6a8d4ae2019ee047a0a86f41543",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2267,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 60,
"path": "/functional_tests/test_list_item_validation.py",
"repo_name": "simbas2000/DjangoTest1",
"src_encoding": "UTF-8",
"text": "from .base import FunctionalTest\nimport time\nfrom selenium.webdriver.common.keys import Keys\n\n\nclass ItemValidationTest(FunctionalTest):\n\n def get_error_element(self):\n if 'is-invalid' in self.browser.page_source:\n error = self.browser.find_element_by_class_name(\"invalid-feedback\")\n return error.text\n else:\n return None\n\n def test_cannot_add_empty_list_items(self):\n self.browser.get(self.server_url)\n self.get_item_input_box().send_keys('\\n')\n self.get_item_input_box().send_keys(Keys.ENTER)\n\n time.sleep(.2)\n # The home page refreshes, and there is an error message saying\n # that list items cannot be blank\n self.assertEqual(self.get_error_element(), \"You can't have an empty list item\")\n\n self.get_item_input_box().send_keys('Item 1' + Keys.ENTER)\n self.check_for_row_in_list_table('1: Item 1')\n\n self.get_item_input_box().send_keys('')\n self.get_item_input_box().send_keys(Keys.ENTER)\n\n time.sleep(.2)\n self.check_for_row_in_list_table('1: Item 1')\n self.assertEqual(self.get_error_element(), \"You can't have an empty list item\")\n\n self.get_item_input_box().send_keys('Item 2' + Keys.ENTER)\n time.sleep(.2)\n self.check_for_row_in_list_table('1: Item 1')\n self.check_for_row_in_list_table('2: Item 2')\n\n def test_cannot_add_duplicate_items(self):\n self.browser.get(self.server_url)\n self.get_item_input_box().send_keys('itm1' + Keys.ENTER)\n time.sleep(.2)\n self.check_for_row_in_list_table('1: itm1')\n # User accidentally tries to enter a duplicate item\n self.get_item_input_box().send_keys('itm1' + Keys.ENTER)\n time.sleep(.2)\n self.check_for_row_in_list_table('1: itm1')\n self.assertEqual(self.get_error_element(), \"You've already got this in your list\")\n\n def test_error_messages_are_cleared_on_input(self):\n self.browser.get(self.server_url)\n self.get_item_input_box().send_keys(Keys.ENTER)\n time.sleep(.2)\n self.assertIsNotNone(self.get_error_element())\n\n self.get_item_input_box().send_keys('a')\n\n time.sleep(.2)\n self.assertIsNone(self.get_error_element())\n"
},
{
"alpha_fraction": 0.5,
"alphanum_fraction": 0.72826087474823,
"avg_line_length": 12.285714149475098,
"blob_id": "a2265b08446127bd0acc7293c4006e57746a9bdd",
"content_id": "81af8d545483fce23366b4811ba36cd18d87a688",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 92,
"license_type": "no_license",
"max_line_length": 17,
"num_lines": 7,
"path": "/requirements.txt",
"repo_name": "simbas2000/DjangoTest1",
"src_encoding": "UTF-8",
"text": "Django==2.2.3\npytz==2019.1\nselenium==3.141.0\nsqlparse==0.3.0\nurllib3==1.25.3\ngunicorn\nfabric"
},
{
"alpha_fraction": 0.6434652805328369,
"alphanum_fraction": 0.6489917635917664,
"avg_line_length": 40.3271598815918,
"blob_id": "96a1b8a20f65e5f418d9f719ff78be77fd4271bc",
"content_id": "67d66bcebc8acf06c4f160a1fc46576fcabaf3a9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6695,
"license_type": "no_license",
"max_line_length": 100,
"num_lines": 162,
"path": "/lists/tests/test_views.py",
"repo_name": "simbas2000/DjangoTest1",
"src_encoding": "UTF-8",
"text": "from django.urls import resolve\nfrom django.test import TestCase\nfrom django.http import HttpRequest\nfrom lists.views import home_page\nfrom django.shortcuts import render\nfrom django.utils.html import escape\nfrom lists.models import Item, List\nfrom lists.forms import ItemForm, EMPTY_LIST_ERROR, ExistingListItemForm, DUPLICATE_ITEM_ERROR\nimport re\n\n\ndef remove_csrf_tag(text):\n return re.sub(r'<[^>]*csrfmiddlewaretoken[^>]*>', '', text)\n\n\nclass NewListTest(TestCase):\n\n def test_saving_a_POST_request(self):\n # Setup\n self.client.post(\n '/lists/new',\n data={'text': 'A new list item'}\n )\n # Assert\n self.assertEqual(Item.objects.count(), 1)\n new_item = Item.objects.first()\n self.assertEqual(new_item.text, 'A new list item')\n\n def test_redirects_after_POST(self):\n # Setup\n response = self.client.post(\n '/lists/new',\n data={'text': 'A new list item'}\n )\n # Assert\n list_ = List.objects.first()\n self.assertEqual(response.status_code, 302)\n self.assertEqual(response['location'], '/lists/{}/'.format(list_.id))\n\n def test_validation_errors_are_sent_back_to_home_page_template(self): # ?\n response = self.client.post('/lists/new', data={'text': ''})\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(response, 'home.html')\n expected_error = escape(\"You can't have an empty list item\")\n self.assertContains(response, expected_error)\n\n def test_invalid_list_items_arent_saved(self):\n self.client.post('/lists/new', data={'text': ''})\n self.assertEqual(List.objects.count(), 0)\n self.assertEqual(Item.objects.count(), 0)\n\n def test_for_invalid_input_renders_home_template(self):\n response = self.client.post('/lists/new', data={'text': ''})\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(response, 'home.html')\n\n def test_validation_errors_are_shown_on_home_page(self):\n response = self.client.post('/lists/new', data={'text': ''})\n self.assertContains(response, escape(EMPTY_LIST_ERROR))\n\n def test_for_invalid_input_passes_form_to_template(self):\n response = self.client.post('/lists/new', data={'text': ''})\n self.assertIsInstance(response.context['form'], ItemForm)\n\n\nclass ListViewTest(TestCase):\n\n def test_uses_list_template(self):\n list_ = List.objects.create()\n response = self.client.get('/lists/{}/'.format(list_.id))\n self.assertTemplateUsed(response, 'list.html')\n\n def test_passes_correct_list_to_template(self):\n other_list = List.objects.create()\n correct_list = List.objects.create()\n response = self.client.get('/lists/{}/'.format(correct_list.id))\n self.assertEqual(response.context['list'], correct_list)\n\n def test_displays_only_items_for_that_list(self):\n correct_list = List.objects.create()\n Item.objects.create(text='item 1', list=correct_list)\n Item.objects.create(text='item 2', list=correct_list)\n other_list = List.objects.create()\n Item.objects.create(text='l2_:item 1', list=other_list)\n Item.objects.create(text='l2_:item 2', list=other_list)\n\n response = self.client.get('/lists/%d/' % (correct_list.id))\n\n self.assertContains(response, 'item 1')\n self.assertContains(response, 'item 2')\n self.assertNotContains(response, 'l2_:item 1')\n self.assertNotContains(response, 'l2_:item 2')\n\n def test_can_save_a_POST_request_to_an_existing_list(self):\n other_list = List.objects.create()\n correct_list = List.objects.create()\n self.client.post('/lists/{}/'.format(correct_list.id), data={'text': 'New item'})\n self.assertEqual(Item.objects.count(), 1)\n new_item = Item.objects.first()\n self.assertEqual(new_item.text, 'New item')\n self.assertEqual(new_item.list, correct_list)\n\n def test_POST_redirects_to_list_view(self):\n other_list = List.objects.create()\n correct_list = List.objects.create()\n response = self.client.post('/lists/{}/'.format(correct_list.id), data={'text': 'New item'})\n self.assertRedirects(response, '/lists/{}/'.format(correct_list.id))\n\n def test_validation_errors_end_up_on_lists_page(self): # ?\n list_ = List.objects.create()\n response = self.client.post('/lists/{}/'.format(list_.id), data={'text': ''})\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(response, 'list.html')\n expected_error = escape(\"You can't have an empty list item\")\n self.assertContains(response, expected_error)\n\n def post_invalid_input(self):\n list_ = List.objects.create()\n return self.client.post('/lists/{}/'.format(list_.id), data={'text': ''})\n\n def test_for_invalid_input_nothing_saved_to_db(self):\n self.post_invalid_input()\n self.assertEqual(Item.objects.count(), 0)\n\n def test_for_invalid_input_renders_list_template(self):\n response = self.post_invalid_input()\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(response, 'list.html')\n\n def test_for_invalid_input_passes_form_to_template(self):\n response = self.post_invalid_input()\n self.assertIsInstance(response.context['form'], ExistingListItemForm)\n\n def test_displays_item_form(self):\n list_ = List.objects.create()\n response = self.client.get('/lists/{}/'.format(list_.id))\n self.assertIsInstance(response.context['form'], ExistingListItemForm)\n self.assertContains(response, 'name=\"text\"')\n\n def test_for_invalid_input_shows_error_on_page(self):\n response = self.post_invalid_input()\n self.assertContains(response, escape(EMPTY_LIST_ERROR))\n\n def test_duplicate_item_validation_errors_end_up_on_lists_page(self):\n list1 = List.objects.create()\n item1 = Item.objects.create(list=list1, text='txt')\n response = self.client.post('/lists/{}/'.format(list1.id), data={'text': 'txt'})\n expected_error = escape(DUPLICATE_ITEM_ERROR)\n self.assertContains(response, expected_error)\n self.assertTemplateUsed(response, 'list.html')\n self.assertEqual(Item.objects.all().count(), 1)\n\n\nclass HomePageTest(TestCase):\n\n def test_home_page_renders_home_template(self):\n response = self.client.get('/')\n self.assertTemplateUsed(response, 'home.html') #\n\n def test_home_page_uses_item_form(self):\n response = self.client.get('/')\n self.assertIsInstance(response.context['form'], ItemForm)\n"
},
{
"alpha_fraction": 0.6267432570457458,
"alphanum_fraction": 0.6333059668540955,
"avg_line_length": 43.06024169921875,
"blob_id": "f3d5042f09ea71f8895754d9053ca9fdddc0e9ff",
"content_id": "d5967c940176033f38f024c9e100ffed8ba26f96",
"detected_licenses": [],
"is_generated": false,
"is_vendor": true,
"language": "Python",
"length_bytes": 3657,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 83,
"path": "/deploy_tools/fabfile.py",
"repo_name": "simbas2000/DjangoTest1",
"src_encoding": "UTF-8",
"text": "from fabric import task, Connection\nfrom invoke import run as local_run\nimport shutil\nimport os\nimport re\nimport random\n\nREPO_URL = 'https://github.com/simbas2000/DjangoTest1.git'\n\n\ndef _exists(c: Connection, path):\n result = c.run('test -a {} && echo 1 || echo 0'.format(path))\n return result.stdout.strip() == \"1\"\n\n\ndef _create_directory_structure_if_necessary(c: Connection, site_folder):\n for subfolder in ('database', 'static', 'virtualenv', 'source'):\n c.run('mkdir -p {}/{}'.format(site_folder, subfolder))\n\n\ndef _get_latest_source(c: Connection, source_folder):\n if _exists(c, '{}/.git'.format(source_folder)):\n c.run('cd {} && git fetch'.format(source_folder))\n else:\n c.run('git clone {} {}'.format(REPO_URL, source_folder))\n current_commit = local_run('git log -n 1 --format=%H') # LAUNCH FAB FROM LOCAL REPO DIR AFTER PUSH\n c.run('cd {} && git reset --hard {}'.format(source_folder, current_commit.stdout.strip()))\n\n\ndef _update_settings(c: Connection, source_folder, sitename):\n settings_path = source_folder + '/superlists/settings.py'\n secret_key_path = source_folder + '/superlists/secret_key.py'\n loc_tmp_dir = 'tmp_remote_settings'\n loc_new_settings_path = loc_tmp_dir + '/settings.py'\n loc_old_settings_path = loc_tmp_dir + '/settings_old.py'\n loc_secret_key_path = loc_tmp_dir + '/secret_key.py'\n os.mkdir(loc_tmp_dir)\n try:\n c.get(settings_path, local=os.getcwd()+'/'+loc_tmp_dir+'/settings.py')\n os.rename(loc_tmp_dir+'/settings.py', loc_old_settings_path)\n with open(loc_old_settings_path, 'r') as f:\n content = f.read()\n new_content = re.sub(\"DEBUG = True\", \"DEBUG = False\", content)\n new_content = re.sub(r'ALLOWED_HOSTS = \\[.*\\]', 'ALLOWED_HOSTS = [\"{}\"]'.format(sitename), new_content)\n new_content = re.sub(r\"SECRET_KEY = '.*'\", 'from .secret_key import SECRET_KEY', new_content)\n with open(loc_new_settings_path, 'w') as nf:\n nf.write(new_content)\n if not _exists(c, secret_key_path):\n chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'\n key = ''.join(random.SystemRandom().choice(chars) for _ in range(50))\n with open(loc_secret_key_path, 'w') as nkey:\n nkey.write('SECRET_KEY = \"{}\"'.format(key))\n c.put(os.getcwd()+'/'+loc_secret_key_path, remote=source_folder+'/superlists/')\n c.put(os.getcwd()+'/'+loc_new_settings_path, remote=source_folder+'/superlists/')\n finally:\n shutil.rmtree(loc_tmp_dir)\n\n\ndef _update_virtualenv(c: Connection, source_folder):\n virtualenv_folder = source_folder + '/../virtualenv'\n if not _exists(c, virtualenv_folder + '/bin/pip'):\n c.run('virtualenv --python=python3 {}'.format(virtualenv_folder))\n c.run('{}/bin/pip install -r {}/requirements.txt'.format(virtualenv_folder, source_folder))\n\n\ndef _update_static_files(c: Connection, source_folder):\n c.run('cd {} && ../virtualenv/bin/python3 manage.py collectstatic --noinput'.format(source_folder))\n\n\ndef _update_database(c: Connection, source_folder):\n c.run('cd {} && ../virtualenv/bin/python3 manage.py migrate --noinput'.format(source_folder))\n\n\n@task\ndef deploy(c):\n site_folder = '/home/%s/sites/%s' % (c.user, c.host)\n source_folder = site_folder + '/source'\n _create_directory_structure_if_necessary(c, site_folder)\n _get_latest_source(c, source_folder)\n _update_settings(c, source_folder, c.host)\n _update_virtualenv(c, source_folder)\n _update_static_files(c, source_folder)\n _update_database(c, source_folder)\n"
}
] | 5 |
rubycon75/2DV515-A3
|
https://github.com/rubycon75/2DV515-A3
|
972cd4d4190e2863b0bcc115775c1ab69f6af0bd
|
8adad0db928290f06a4b9ad329d9486d2cd75058
|
2a56327d41903efee30415cdaa06d817aa391524
|
refs/heads/master
| 2020-09-21T04:11:21.483902 | 2019-12-04T19:01:51 | 2019-12-04T19:01:51 | 224,674,474 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7037037014961243,
"alphanum_fraction": 0.7407407164573669,
"avg_line_length": 32.230770111083984,
"blob_id": "b13cbcb98262685df419a6bb719b66c859ec1294",
"content_id": "bc1c48b647c73bda9469daeadd9b2724333510c6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 432,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 13,
"path": "/README.md",
"repo_name": "rubycon75/2DV515-A3",
"src_encoding": "UTF-8",
"text": "# 2DV515-A3\nAssignment 3 in 2DV515 Web Intelligence. \nSearch engine for wikipedia articles. \nRequirements: Python 3.7 and modules *flask*, *flask-cors* \n\n## How to run\nRun app.py to start the server at localhost:5000, open client.html in a browser.\n\n## API routes\n\n### GET /{query}\n{query} must be a string of one or more words.\nReturns an object including info about duration and amount of hits, as well as an array of results.\n"
},
{
"alpha_fraction": 0.6536458134651184,
"alphanum_fraction": 0.6770833134651184,
"avg_line_length": 16.454545974731445,
"blob_id": "c6f4cb65d8ee083cb210858558670dd7872e0b9f",
"content_id": "3fb4eee8f9d0a96cce817ad7320316de0ec09a81",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 384,
"license_type": "no_license",
"max_line_length": 45,
"num_lines": 22,
"path": "/app.py",
"repo_name": "rubycon75/2DV515-A3",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n\"\"\"\n2DV515 Web Intelligence\nA3 - Search engine\nby David Johansson (dj222dq)\n\"\"\"\n\nfrom flask import Flask, jsonify\nfrom flask_cors import CORS\nfrom pagedb import PageDB\n\nAPP = Flask(__name__)\nPAGEDB = PageDB()\n\nCORS(APP)\n\[email protected]('/<string:search_term>')\ndef search(search_term):\n return jsonify(PAGEDB.query(search_term))\n\nif __name__ == \"__main__\":\n APP.run()\n"
}
] | 2 |
Axelwickm/HunterGatherers
|
https://github.com/Axelwickm/HunterGatherers
|
41326c4c831e369ac77e579ac365173ea4e9ef0a
|
7996b645dfbb6658fdf63bd4859ebded194b69f2
|
74479658c0362320af13126f27c2d22ea1b808e0
|
refs/heads/master
| 2021-06-05T22:42:37.798322 | 2021-05-13T21:16:03 | 2021-05-13T21:16:03 | 158,754,677 | 1 | 0 |
MIT
| 2018-11-22T22:15:26 | 2019-01-04T14:55:10 | 2019-01-04T20:41:18 |
C++
|
[
{
"alpha_fraction": 0.5122037529945374,
"alphanum_fraction": 0.5263530015945435,
"avg_line_length": 24.690908432006836,
"blob_id": "1f1acc68f4e69a97262191ae7912b4168dad5e3d",
"content_id": "90fdf149d31ad88c008fc888cc74e368a14bd2f9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2827,
"license_type": "permissive",
"max_line_length": 94,
"num_lines": 110,
"path": "/MarkovNames.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2019-01-08.\n//\n\n#include <fstream>\n#include <iostream>\n#include <random>\n#include \"MarkovNames.h\"\n#include \"Config.h\"\n\nbool MarkovNames::loaded = false;\nnlohmann::json MarkovNames::chain;\n\n\nvoid MarkovNames::loadResources() {\n if (!loaded){\n printf(\"Loading NamesMarkov.json\\n\");\n loaded = true;\n std::ifstream file(\"resources/NamesMarkov.json\");\n printf(\"Parsing NamesMarkov to json object\\n\");\n file >> chain;\n file.close();\n printf(\"Parsing done.\\n\");\n }\n}\n\nMarkovNames::MarkovNames(const bool random, unsigned long seed) : random(random) {\n randomEngine = std::mt19937(seed);\n}\n\nstd::string MarkovNames::generate(const std::vector<double> genome) {\n loadResources();\n std::string name = \"\";\n std::string last = \"START\";\n\n const int decisions = 8;\n const float k = 10;\n const double commonBias = 1.15;\n\n std::vector<double> reducedGenome;\n reducedGenome.reserve(decisions);\n for (std::size_t i = 0; i < decisions; i++){\n reducedGenome.push_back(0);\n for (std::size_t j = 0; j < decisions; j++){\n reducedGenome.at(i) += genome.at((decisions*i+j) % (genome.size()-1)) / decisions;\n }\n }\n\n\n for (double &itr : reducedGenome) {\n // Logistic function to make values more uniform\n itr = pow(itr, commonBias);\n itr = 1.f / (1.f + expf(float(-k * (itr - 0.5f))));\n }\n\n int lookback = 4;\n int i = 0;\n auto itr = reducedGenome.begin();\n int maxLength = 25;\n\n while (true){\n std::vector<float> weights;\n weights.reserve(chain[last].size());\n\n if (chain.count(last) == 0){\n last = last.substr(1, last.size()-2);\n if (last.empty()){\n name = name.append(\"\\n\");\n break;\n }\n continue;\n }\n\n for (auto &b : chain[last]){\n weights.push_back(b[1]);\n }\n\n i++;\n auto dist = std::uniform_int_distribution<std::size_t>{0, weights.size()-1};\n std::size_t index = 0;\n if (random){\n index = dist(randomEngine);\n }\n else {\n index = static_cast<size_t>(floor((*itr) * weights.size()));\n if (index == chain[last].size()){\n index--;\n }\n itr++;\n if (itr == reducedGenome.end()){\n itr = reducedGenome.begin();\n }\n };\n\n auto newLast = chain[last][index][0].get<std::string>();\n if (newLast == \"\\n\"){\n break;\n }\n name = name.append(newLast);\n if (name.size() >= maxLength){\n break;\n }\n\n last = name.substr(fmaxf((float) name.size() - lookback, 0), std::string::npos);\n }\n\n return name;\n}\n\nnlohmann::json chain;\n\n"
},
{
"alpha_fraction": 0.6979768872261047,
"alphanum_fraction": 0.7182080745697021,
"avg_line_length": 19.969696044921875,
"blob_id": "b1439f7f5b40b3f64c8f622ffb3d4ba6b2e2bbb5",
"content_id": "73d09aa287abc2373a149d99c7ac0584fbf1b732",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 692,
"license_type": "permissive",
"max_line_length": 79,
"num_lines": 33,
"path": "/Mushroom.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by axelw on 2018-12-28.\n//\n\n#ifndef HUNTERGATHERERS_MUSHROOM_H\n#define HUNTERGATHERERS_MUSHROOM_H\n\n#include <random>\n\n#include \"WorldObject.h\"\n#include \"Config.h\"\n\nclass Mushroom : public WorldObject {\npublic:\n Mushroom(World *world, const sf::Vector2f &position, const Config &config);\n static void loadResources();\n\n void update(float deltaTime) override;\n void draw(sf::RenderWindow *window, float deltaTime) override;\n\nprivate:\n sf::Sprite sprite;\n\n static bool loaded;\n static sf::Texture texture;\n\n const Config &config;\n std::uniform_real_distribution<float> dist;\n static std::mt19937 randomEngine;\n};\n\n\n#endif //HUNTERGATHERERS_MUSHROOM_H\n"
},
{
"alpha_fraction": 0.518207311630249,
"alphanum_fraction": 0.5350140333175659,
"avg_line_length": 27.377483367919922,
"blob_id": "902a71c732af3c511c6517fc8eea05141b9e692c",
"content_id": "bb5dcea0a6614d79ef9f89292d8fbee3fd1a682f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4284,
"license_type": "permissive",
"max_line_length": 157,
"num_lines": 151,
"path": "/utils.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-23.\n//\n\n#ifndef FAMILYISEVERYTHING_UTILS_CPP\n#define FAMILYISEVERYTHING_UTILS_CPP\n\n#include <SFML/Graphics.hpp>\n\n#define EPSILON 1e-6f\n\ntemplate<class T>\ninline bool boxesIntersect(const sf::Rect<T> a, const sf::Rect<T> b){\n return !(b.left > a.left + a.width\n || b.left + b.width < a.left\n || b.top > a.top + a.height\n || b.top + b.width < a.top);\n}\n\ntemplate <class T>\ninline bool pointInBox(const sf::Vector2<T> p, const sf::Rect<T> a){\n return a.left < p.x && a.top < p.y\n && p.x <= a.left + a.width && p.y <= a.top + a.height;\n}\n\ntemplate<class T>\ninline bool clipT(const T &n, const T &d, sf::Vector2<T> *c) {\n sf::Vector2<T> c2(*c);\n if (abs(d) < EPSILON){\n return n < 0;\n }\n T t = n / d;\n\n if (0 < d) {\n if (t > c2.y) return false;\n if (t > c2.x) (*c).x = t;\n } else {\n if (t < c2.x) return false;\n if (t < c2.y) (*c).y = t;\n }\n return true;\n}\n\ntemplate<class T>\ninline bool lineIntersectWithBox(const sf::Vector2<T> &lineStart, const sf::Vector2<T> &lineEnd, const sf::Vector2<T> &boxpos, const sf::Vector2<T> &boxdim){\n T box[4] = {boxpos.x, boxpos.y, boxpos.x + boxdim.x, boxpos.y + boxdim.y};\n T dx = lineEnd.x - lineStart.x;\n T dy = lineEnd.y - lineStart.y;\n\n if (dx < EPSILON && dy < EPSILON &&\n lineStart.x >= box[0] && lineStart.x <= box[2] &&\n lineStart.y >= box[1] && lineStart.y <= box[3]){\n return true;\n }\n\n sf::Vector2<T> c = {0, 1};\n return clipT(box[0] - lineStart.x, dx, &c) &&\n clipT(lineStart.x - box[2], -dx, &c) &&\n clipT(box[1] - lineStart.y, dy, &c) &&\n clipT(lineStart.y - box[3], -dy, &c);\n\n\n}\n\ninline sf::Color colorFromGenome(const std::vector<double>& genome){\n std::vector<double> reducedGenome(3);\n for (std::size_t i = 0; i < 3; i++){\n reducedGenome.push_back(0);\n for (std::size_t j = 0; j < 3; j++){\n reducedGenome.at(i) += genome.at((3*i+j) % (genome.size()-1)) / 3;\n }\n }\n return sf::Color(reducedGenome[0]*250+5, reducedGenome[1]*250+5, reducedGenome[2]*250+5);\n}\n\ntemplate <class T>\nclass Contiguous2dVector {\npublic:\n Contiguous2dVector(std::size_t n, std::size_t m, const T fillValue)\n : fillValue(fillValue), n(n), m(m) {\n data = std::vector<T>(n*m, fillValue);\n }\n\n explicit Contiguous2dVector(const T fillValue)\n : fillValue(fillValue), n(0), m(0) {}\n\n Contiguous2dVector<T>& operator=(const Contiguous2dVector& other){\n n = other.n; m = other.m;\n data = other.data;\n return *this;\n }\n\n void push_back_row(std::vector<T> item){\n if (m < item.size()){\n // Insert fill value after every row\n const std::size_t nm = item.size();\n data.reserve(n*nm);\n for (std::size_t i = 0; i < n; i++){\n // FIXME: +1 v?\n data.insert(std::begin(data)+m+i*nm, nm-m, fillValue);\n }\n m = nm;\n }\n\n // Add new data\n data.insert(std::end(data), std::begin(item), std::end(item));\n n++;\n }\n\n void clear(){\n data.clear();\n n = 0; m = 0;\n }\n\n std::size_t getN() const {\n return n;\n }\n\n std::size_t getM() const {\n return m;\n }\n\n T& at(std::size_t x, std::size_t y){\n if (n <= x) // For some reason this is false positive if using size_t\n throw std::out_of_range(\"x (\"+std::to_string(x)+\") too big for n (\"+std::to_string(n)+\")\");\n if (m <= y)\n throw std::out_of_range(\"y (\"+std::to_string(x)+\") too big for m (\"+std::to_string(m)+\")\");\n\n return data.at(x*m+y);\n }\n\n std::pair<typename std::vector<T>::iterator, typename std::vector<T>::iterator> at(std::size_t x){\n if (n <= x)\n throw std::out_of_range(\"x (\"+std::to_string(x)+\") too big for n (\"+std::to_string(n)+\")\");\n\n return std::make_pair(std::begin(data)+x*m, std::begin(data)+x*m+m);\n }\n\n const T getFillValue() const {\n return fillValue;\n }\n\nprivate:\n std::size_t n, m;\n const T fillValue;\n std::vector<T> data;\n};\n\ntemplate class Contiguous2dVector<sf::Color>;\n\n#endif //FAMILYISEVERYTHING_UTILS_CPP"
},
{
"alpha_fraction": 0.6369341015815735,
"alphanum_fraction": 0.6481398344039917,
"avg_line_length": 29.93055534362793,
"blob_id": "4a1efcd53609d51a280eeefcd2423ce5add3e959",
"content_id": "5eed88844029217c15f53898d9e6072daac56d99",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2231,
"license_type": "permissive",
"max_line_length": 131,
"num_lines": 72,
"path": "/Camera.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-23.\n//\n\n#include \"Camera.h\"\n\nCamera::Camera(Config &config, sf::RenderWindow *window, sf::Vector2f worldSize) : config(config) {\n this->window = window;\n this->view = window->getDefaultView();\n this->worldSize = worldSize;\n view.zoom(1.f);\n window->setView(view);\n followingAgent = false;\n}\n\nvoid Camera::move(sf::Vector2f offset) {\n followingAgent = false;\n offset *= view.getSize().x/window->getSize().x;\n view.move(offset.x, offset.y);\n\n view.setCenter((float) fmin(view.getCenter().x, worldSize.x), (float) fmin(view.getCenter().y, worldSize.y));\n view.setCenter((float) fmax(view.getCenter().x, 0), (float) fmax(view.getCenter().y, 0));\n\n window->setView(view);\n}\n\nvoid Camera::update(float deltaT) {\n if (followingAgent){\n if (agentFollow.expired()){\n followingAgent = false;\n }\n else {\n view.setCenter(agentFollow.lock()->getPosition());\n window->setView(view);\n }\n }\n}\n\n\nvoid Camera::zoomTo(float mouseWheelDelta, sf::Vector2<int> mousePosition) {\n sf::Vector2f c1 = (sf::Vector2f) window->mapPixelToCoords(sf::Mouse::getPosition(*window));\n view.zoom(1.f-mouseWheelDelta * config.controls.scrollFactor);\n view.setSize((float) fmin(view.getSize().x, worldSize.x), (float) fmin(view.getSize().y, worldSize.y));\n auto c2 = (sf::Vector2f) window->mapPixelToCoords(mousePosition, view);\n view.move(c1-c2);\n\n //view.setCenter((float) fmin(view.getCenter().x, window->getSize().x), (float) fmin(view.getCenter().y, window->getSize().y));\n //view.setCenter((float) fmax(view.getCenter().x, 0), (float) fmax(view.getCenter().y, 0));\n\n window->setView(view);\n}\n\nvoid Camera::resizeWindow(sf::Event::SizeEvent size) {\n float aspectRatio = (float) size.width / size.height;\n sf::Vector2f v = view.getSize();\n view.setSize(v.x, v.x/aspectRatio);\n window->setView(view);\n}\n\nsf::View Camera::getView() {\n return view;\n}\n\nvoid Camera::setView(sf::View view) {\n this->view = view;\n window->setView(view);\n}\n\nvoid Camera::followAgent(Agent *agent) {\n followingAgent = true;\n agentFollow = std::dynamic_pointer_cast<Agent>(agent->getSharedPtr());\n}\n\n\n\n\n"
},
{
"alpha_fraction": 0.6904761791229248,
"alphanum_fraction": 0.7083333134651184,
"avg_line_length": 17,
"blob_id": "a4966378b34021f5ba5a158a169fb0857d678607",
"content_id": "22b3bfa5fbf4ad052a8a559bf33a09f63e827dc7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 504,
"license_type": "permissive",
"max_line_length": 66,
"num_lines": 28,
"path": "/Skull.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2019-01-15.\n//\n\n#ifndef HUNTERGATHERERS_SKULL_H\n#define HUNTERGATHERERS_SKULL_H\n\n\n#include \"WorldObject.h\"\n\nclass Skull : public WorldObject {\npublic:\n Skull(World *world, const sf::Vector2f &position);\n static void loadResources();\n\n void update(float deltaTime) override;\n void draw(sf::RenderWindow *window, float deltaTime) override;\n\nprivate:\n sf::Sprite sprite;\n\n static bool loaded;\n static sf::Texture texture;\n\n};\n\n\n#endif //HUNTERGATHERERS_SKULL_H\n"
},
{
"alpha_fraction": 0.6190853118896484,
"alphanum_fraction": 0.6390813589096069,
"avg_line_length": 25.72486686706543,
"blob_id": "d63d35f9a4c44e96d681d3faa90094cf3eaadb2b",
"content_id": "d89f493965abb64362e19c734fb6d5f348ce0f19",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 5051,
"license_type": "permissive",
"max_line_length": 94,
"num_lines": 189,
"path": "/WorldObject.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-21.\n//\n\n#include \"WorldObject.h\"\n#include \"Quadtree.h\"\n#include \"World.h\"\n\nWorldObject::WorldObject(std::string type, World *world, sf::Vector2f position, bool collider)\n: type(std::move(type)), collider(collider) {\n this->position = position;\n this->quadtree = nullptr;\n this->world = world;\n age = 0;\n velocity = sf::Vector2f(0, 0);\n mass = 1.f;\n friction = 0.f;\n bounds = sf::IntRect(0, 0, 0, 0);\n}\n\nWorldObject::WorldObject(const WorldObject &other)\n: type(other.type), collider(other.collider) {\n this->position = other.position;\n this->quadtree = other.quadtree;\n this->world = other.world;\n age = 0;\n velocity = other.velocity;\n mass = other.mass;\n friction = other.friction;\n bounds = other.bounds;\n}\n\nstd::shared_ptr<WorldObject> WorldObject::getSharedPtr() {\n return std::shared_ptr<WorldObject>(me);\n}\n\nvoid WorldObject::setQuadtree(Quadtree<float> *quadtree, std::weak_ptr<WorldObject> object) {\n this->quadtree = quadtree;\n this->me = object;\n}\n\nQuadtree<float> *WorldObject::getQuadtree() {\n return quadtree;\n}\n\nvoid WorldObject::update(float deltaTime) {\n sf::Vector2f old = position;\n update(deltaTime, old);\n}\n\nvoid WorldObject::update(float deltaTime, sf::Vector2f oldPosition) {\n age += deltaTime;\n velocity *= powf(friction, deltaTime);\n position += velocity * deltaTime;\n colliding = false;\n if (world->getDimensions().x-1 <= position.x){\n position.x = world->getDimensions().x-1;\n colliding = true;\n }\n if (world->getDimensions().y-1 <= position.y){\n position.y = world->getDimensions().y-1;\n colliding = true;\n }\n if (position.x <= 1){\n position.x = 1;\n colliding = true;\n }\n if (position.y <= 1){\n position.y = 1;\n colliding = true;\n }\n if (quadtree != nullptr) {\n quadtree->move(oldPosition, this);\n }\n\n}\n\nvoid WorldObject::applyForce(float deltaTime, const sf::Vector2f force) {\n velocity += force/mass*deltaTime;\n}\n\nvoid WorldObject::draw(sf::RenderWindow *window, float deltaTime) {\n if (world->getConfig().render.showWorldObjectBounds){\n sf::VertexArray rect(sf::LineStrip, 5);\n sf::IntRect b = getBounds();\n rect[0].position = sf::Vector2f(getPosition() + sf::Vector2f(b.left, b.top));\n rect[1].position = sf::Vector2f(getPosition() + sf::Vector2f(b.width, b.top));\n rect[2].position = sf::Vector2f(getPosition() + sf::Vector2f(b.width, b.height));\n rect[3].position = sf::Vector2f(getPosition() + sf::Vector2f(b.left, b.height));\n rect[4].position = sf::Vector2f(getPosition() + sf::Vector2f(b.left, b.top));\n\n rect[0].color = sf::Color(200, 200, 200);\n rect[1].color = sf::Color(200, 200, 200);\n rect[2].color = sf::Color(200, 200, 200);\n rect[3].color = sf::Color(200, 200, 200);\n rect[4].color = sf::Color(200, 200, 200);\n window->draw(rect);\n }\n}\n\nconst sf::Vector2f &WorldObject::getPosition() const {\n return position;\n}\n\nvoid WorldObject::setPosition(const sf::Vector2f &position) {\n WorldObject::position = position;\n}\n\n\nconst sf::Vector2f &WorldObject::getVelocity() const {\n return velocity;\n}\n\nvoid WorldObject::setVelocity(const sf::Vector2f &velocity) {\n WorldObject::velocity = velocity;\n}\n\nfloat WorldObject::getSpeed() const {\n return sqrtf(velocity.x*velocity.x + velocity.y+velocity.y);\n}\n\nfloat WorldObject::getMass() const {\n return mass;\n}\n\nvoid WorldObject::setMass(float accelerationFactor) {\n WorldObject::mass = accelerationFactor;\n}\n\nconst sf::IntRect &WorldObject::getBounds() const {\n return bounds;\n}\n\nconst sf::IntRect WorldObject::getWorldBounds() const {\n return sf::IntRect(getPosition().x + getBounds().left,\n getPosition().y + getBounds().top,\n getBounds().width - getBounds().left,\n getBounds().height - getBounds().top);\n}\n\n\n\nconst sf::FloatRect WorldObject::getWorldBoundsf() const {\n return sf::FloatRect(getPosition().x + getBounds().left,\n getPosition().y + getBounds().top,\n getBounds().width - getBounds().left,\n getBounds().height - getBounds().top);\n}\n\nvoid WorldObject::setBounds(const sf::IntRect &bounds) {\n WorldObject::bounds = bounds;\n}\n\nconst bool WorldObject::isCollider() const {\n return collider;\n}\n\nfloat WorldObject::getFriction() const {\n return friction;\n}\n\n\nvoid WorldObject::setFriction(float friction) {\n WorldObject::friction = friction;\n}\n\nfloat WorldObject::getAge() const {\n return age;\n}\n\nvoid WorldObject::setColor(const sf::Color &color) {\n WorldObject::color = color;\n}\n\nconst sf::Color &WorldObject::getColor() const {\n return color;\n}\n\nvoid WorldObject::setAge(float age) {\n WorldObject::age = age;\n}\n\nbool WorldObject::isColliding() const {\n return colliding;\n}\n\nvoid WorldObject::setColliding(bool colliding) {\n WorldObject::colliding = colliding;\n}\n"
},
{
"alpha_fraction": 0.765625,
"alphanum_fraction": 0.78125,
"avg_line_length": 37.20000076293945,
"blob_id": "8d05a4218b5705534524b549f17b81e8c2f5a94c",
"content_id": "c6a9242ee0dfce613110d2adc55052ed9efc801e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 192,
"license_type": "permissive",
"max_line_length": 74,
"num_lines": 5,
"path": "/README.md",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "# HunterGatherers\nArtificial life simulation of agents in a 2d-world. Made for an AI-course.\nWritten in C++17, with graphics done in SFML.\n\nReport: [HunterGatherers.pdf](HunterGatherers.pdf)\n\n"
},
{
"alpha_fraction": 0.5688311457633972,
"alphanum_fraction": 0.5798701047897339,
"avg_line_length": 23.460317611694336,
"blob_id": "09831e7c509fe6203e57c9b94a7f6cae0d1f9882",
"content_id": "5176726a368af4cac830b260cf6593feabf5bb3d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1540,
"license_type": "permissive",
"max_line_length": 97,
"num_lines": 63,
"path": "/resources/CreateSurnameMarkov.py",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "import random\nfrom numpy.random import choice\nimport json\n\nmarkovChain = {}\nlookback = 4\n\ndef addToChain(a, b):\n #print(repr(a+\" -> \"+b))\n if a not in markovChain:\n markovChain[a] = {}\n if b not in markovChain[a]:\n markovChain[a][b] = 0\n markovChain[a][b] += 1\n\n\ndef processName(name):\n addToChain(\"START\", name[0])\n for x in range(1, len(name)):\n addToChain(name[max(x-lookback, 0):x], name[x])\n\ndef generate():\n last = \"START\"\n name = \"\"\n while True:\n try:\n last = choice(list(markovChain[last].keys()), 1, list(markovChain[last].values()))[0]\n except KeyError:\n last = last[1:]\n if last == \"\":\n name += \"\\n\"\n break\n continue\n name += last\n last = name[max(len(name)-lookback, 0): len(name)]\n if name[len(name)-1:len(name)] == \"\\n\":\n break\n\n\n return name\n\n\nf = open(\"surnames.txt\", encoding=\"utf8\")\nfor line in f:\n if line.rstrip() != \"\":\n processName(line.split(\",\")[0]+\"\\n\")\n\n\nmarkovChainConverted = {}\n\nfor a, bList in markovChain.items():\n markovChainConverted[a] = list(bList.items())\n markovChainConverted[a].sort(key=lambda x: x[1], reverse = True)\n\nwith open('NamesMarkov.json', 'w') as outfile:\n json.dump(markovChainConverted, outfile, indent=0)\n # This works, but file is 3 times larger than it has to be.\n # Therefore the generated repo file has been minified.\n\nprint(\"File saved.\")\n\nfor i in range(1):\n print(repr(generate()))"
},
{
"alpha_fraction": 0.6139534711837769,
"alphanum_fraction": 0.6372092962265015,
"avg_line_length": 21.578947067260742,
"blob_id": "ef17503adf6c7a4538947f43a7f58058531ca63c",
"content_id": "01e87d8714f721ae48389fd0c66bc7c768525ad9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 860,
"license_type": "permissive",
"max_line_length": 61,
"num_lines": 38,
"path": "/Skull.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2019-01-15.\n//\n\n#include \"Skull.h\"\n#include \"World.h\"\n\nbool Skull::loaded = false;\nsf::Texture Skull::texture;\n\nvoid Skull::loadResources() {\n if (!loaded){\n loaded = true;\n texture.loadFromFile(\"resources/Skull.png\");\n }\n}\n\nSkull::Skull(World *world, const sf::Vector2f &position)\n : WorldObject(\"Skull\", world, position, false) {\n loadResources();\n sprite = sf::Sprite(texture);\n sprite.setOrigin(25, 47);\n setMass(0.5);\n setVelocity(sf::Vector2f(0, -40));\n}\n\nvoid Skull::update(float deltaTime) {\n WorldObject::update(deltaTime);\n if (2 < getAge()){\n world->removeObject(getSharedPtr(), false);\n }\n}\n\nvoid Skull::draw(sf::RenderWindow *window, float deltaTime) {\n sprite.setPosition(getPosition());\n window->draw(sprite);\n WorldObject::draw(window, deltaTime);\n}\n\n\n"
},
{
"alpha_fraction": 0.6277519464492798,
"alphanum_fraction": 0.6328217387199402,
"avg_line_length": 37.59785461425781,
"blob_id": "ea98b4c4d7fde9b7896200d2fb9f4007dad3a30e",
"content_id": "d6f339d9914b4afcf3237170da90dddb0cab0759",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 14399,
"license_type": "permissive",
"max_line_length": 131,
"num_lines": 373,
"path": "/OpenCL_Wrapper.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-27.\n//\n\n#include <fstream>\n#include <sstream>\n#include <functional>\n\n#include \"OpenCL_Wrapper.h\"\n\n\nOpenCL_Wrapper::OpenCL_Wrapper(std::string deviceToUse) {\n // Find the right device\n int err;\n size_t valueSize;\n cl_uint platformCount;\n cl_platform_id *platforms;\n cl_uint deviceCount;\n cl_device_id *devices;\n cl_uint maxMaxComputeUnits = 0;\n\n device_id = nullptr;\n\n // Get platforms\n clGetPlatformIDs(0, nullptr, &platformCount);\n platforms = (cl_platform_id *) malloc(sizeof(cl_platform_id) * platformCount);\n clGetPlatformIDs(platformCount, platforms, nullptr);\n\n for (int i = 0; i < platformCount; i++) {\n // Get devices in this platform\n clGetDeviceIDs(platforms[i], CL_DEVICE_TYPE_ALL, 0, nullptr, &deviceCount);\n devices = (cl_device_id*) malloc(sizeof(cl_device_id) * deviceCount);\n clGetDeviceIDs(platforms[i], CL_DEVICE_TYPE_ALL, deviceCount, devices, nullptr);\n\n\n for (int j = 0; j < deviceCount; j++) {\n\n char *value;\n clGetDeviceInfo(devices[j], CL_DEVICE_NAME, 0, nullptr, &valueSize);\n value = (char*) malloc(valueSize);\n clGetDeviceInfo(devices[j], CL_DEVICE_NAME, valueSize, value, nullptr);\n printf(\"%d. Device: %s\\n\", j+1, value);\n free(value);\n\n // print hardware device version\n\n clGetDeviceInfo(devices[j], CL_DEVICE_VERSION, 0, nullptr, &valueSize);\n value = (char*) malloc(valueSize);\n clGetDeviceInfo(devices[j], CL_DEVICE_VERSION, valueSize, value, nullptr);\n printf(\" %d.%d Hardware version: %s\\n\", j+1, 1, value);\n free(value);\n\n // print software driver version\n clGetDeviceInfo(devices[j], CL_DRIVER_VERSION, 0, nullptr, &valueSize);\n value = (char*) malloc(valueSize);\n clGetDeviceInfo(devices[j], CL_DRIVER_VERSION, valueSize, value, nullptr);\n printf(\" %d.%d Software version: %s\\n\\n\", j+1, 2, value);\n free(value);\n\n clGetDeviceInfo(devices[j], CL_DEVICE_MAX_COMPUTE_UNITS, sizeof(maxComputeUnits), &maxComputeUnits, nullptr);\n\n // If a device has been specified in argument options.\n if (!deviceToUse.empty()){\n // Get device name and see if it matches\n clGetDeviceInfo(devices[j], CL_DEVICE_NAME, 0, nullptr, &valueSize);\n auto name = (char *) malloc(valueSize);\n clGetDeviceInfo(devices[j], CL_DEVICE_NAME, valueSize, name, nullptr);\n\n if (std::string(name) == deviceToUse){\n device_id = devices[j];\n free(name);\n break;\n }\n\n free(name);\n }\n else {\n // Use the device with the most compute units\n if (maxMaxComputeUnits < maxComputeUnits){\n maxMaxComputeUnits = maxComputeUnits;\n device_id = devices[j];\n }\n }\n\n }\n free(devices);\n\n }\n\n free(platforms);\n\n // Check if found\n if (device_id == nullptr && deviceToUse.empty()){\n throw std::runtime_error(\"No device found.\");\n }\n else if (device_id == nullptr){\n throw std::runtime_error(\"Specified device not found.\");\n }\n\n // Print selectedText device name\n clGetDeviceInfo(device_id, CL_DEVICE_NAME, 0, nullptr, &valueSize);\n auto name = (char *) malloc(valueSize);\n clGetDeviceInfo(device_id, CL_DEVICE_NAME, valueSize, name, nullptr);\n printf(\"Using OpenCL on device: %s\\n\", name);\n printf(\"OpenCL device max compute units: %d\\n\", maxComputeUnits);\n free(name);\n\n // Create OpenCL context\n context = clCreateContext( nullptr, 1, &device_id, nullptr, nullptr, &err);\n if (!context){\n throw std::runtime_error(\"Failed to create OpenCL context: \"+std::to_string(err));\n }\n\n // Create OpenCL command queue\n command_queue = clCreateCommandQueue(context, device_id, CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE, &err);\n if (!command_queue){\n throw std::runtime_error(\"Failed to create OpenCL command queue: \"+std::to_string(err));\n }\n\n\n // Build the programs and create the kernels\n const std::string kernelSource = loadFile(\"../cl/neural_net.cl\");\n cl_program neuralNetProgram = createAndCompileProgram(kernelSource);\n perceptronKernel = clCreateKernel(neuralNetProgram, \"perceptron\", &err);\n\n if (!perceptronKernel || err != CL_SUCCESS){\n throw std::runtime_error(\"Failed to create OpenCL kernel-\");\n }\n\n\n}\n\nOpenCL_Wrapper::~OpenCL_Wrapper() {\n while (!agentRegister.empty()){\n removeAgent(agentRegister.begin()->second.agent);\n }\n}\n\nconst std::string OpenCL_Wrapper::loadFile(std::string filename) {\n std::ifstream f(filename);\n if (f.fail()){\n throw std::runtime_error(\"Failed to load file (does it exist?): \"+filename);\n }\n const std::string str((std::istreambuf_iterator<char>(f)), (std::istreambuf_iterator<char>()));\n return str;\n}\n\ncl_program OpenCL_Wrapper::createAndCompileProgram(const std::string &source) {\n const char *source_data = source.data();\n const char **source_data_p = &source_data;\n cl_int err;\n cl_program program = clCreateProgramWithSource(context, 1, source_data_p, nullptr, &err);\n\n if (!program || err){\n throw std::runtime_error(\"Failed to create OpenCL program.\");\n }\n err = clBuildProgram(program, 0, nullptr, nullptr, nullptr, nullptr);\n\n if (err){\n char log[10240] = \"\";\n clGetProgramBuildInfo(program, device_id, CL_PROGRAM_BUILD_LOG, sizeof(log), log, nullptr);\n throw std::runtime_error(\"Failed to build OpenCL program: \"+std::to_string(err)+\" \\n-----\\n\"+std::string(log)+\"\\n-----\\n\");\n };\n\n return program;\n}\n\nvoid OpenCL_Wrapper::addAgent(Agent *agent) {\n\n const MapGenes &genes = *agent->getGenes();\n\n agentRegister[agent] = AgentEntry();\n AgentEntry &agentEntry = agentRegister.at(agent);\n agentEntry.agent = agent;\n\n\n agentEntry.outputBandwidth = (unsigned int) genes.getGene<IntegerGene>(\"OutputCount\")->getValue();\n agentEntry.output.resize(agentEntry.outputBandwidth, 0);\n agentEntry.layerCount = (unsigned int) genes.getGene<IntegerGene>(\"LayerCount\")->getValue();\n\n auto layersList = genes.getGene<ListGenes>(\"Layers\")->getList().begin();\n std::vector<float> layerBiases;\n std::generate_n(std::back_inserter(layerBiases), agentEntry.layerCount, [&]{\n auto g = ((MapGenes*) layersList->get())->getGene<FloatGene>(\"Bias\");\n layersList++;\n return g->getValue();\n });\n\n\n layersList = genes.getGene<ListGenes>(\"Layers\")->getList().begin();\n std::vector<unsigned> layerSizes;\n layerSizes.push_back((unsigned int) genes.getGene<IntegerGene>(\"InputCount\")->getValue());\n agentEntry.maxLayerSize = 0;\n std::generate_n(std::back_inserter(layerSizes), agentEntry.layerCount, [&]{\n auto g = ((MapGenes*) layersList->get())->getGene<LambdaGene<int> >(\"PerceptronCount\");\n layersList++;\n int val = g->getValue();\n agentEntry.maxLayerSize = agentEntry.maxLayerSize < val ? val : agentEntry.maxLayerSize;\n return val;\n });\n\n agentEntry.layerSizes_Host = layerSizes;\n\n std::vector<float> layerWeights;\n for (auto &layer : genes.getGene<ListGenes>(\"Layers\")->getList()) {\n for (auto &perceptron : ((MapGenes *) layer.get())->getGene<ListGenes>(\"Perceptrons\")->getList()) {\n for (auto &weight : ((MapGenes *) perceptron.get())->getGene<ListGenes>(\"Weights\")->getList()) {\n layerWeights.push_back(((FloatGene*) weight.get())->getValue());\n }\n }\n }\n\n cl_int err;\n agentEntry.layerSizes = clCreateBuffer(context, CL_MEM_READ_ONLY, sizeof(unsigned)*(agentEntry.layerCount+1),nullptr, &err);\n if (err){\n throw std::runtime_error(\"Failed to create OpenCL neural net layer sizes buffer: \"+std::to_string(err));\n }\n agentEntry.layerWeights = clCreateBuffer(context, CL_MEM_READ_ONLY, sizeof(float)*layerWeights.size(), nullptr, &err);\n if (err){\n throw std::runtime_error(\"Failed to create OpenCL neural net layer weight buffer: \"+std::to_string(err));\n }\n agentEntry.layerBiases = clCreateBuffer(context, CL_MEM_READ_ONLY, sizeof(float)*agentEntry.layerCount, nullptr, &err);\n if (err){\n throw std::runtime_error(\"Failed to create OpenCL neural net layer weight buffer: \"+std::to_string(err));\n }\n\n\n err = clEnqueueWriteBuffer(command_queue, agentEntry.layerSizes, CL_TRUE, 0, sizeof(unsigned)*(agentEntry.layerCount+1),\n &layerSizes.front(), 0, nullptr, nullptr);\n if (err){\n throw std::runtime_error(\"Failed to enqueue layer sizes buffer write: \"+std::to_string(err));\n }\n\n err = clEnqueueWriteBuffer(command_queue, agentEntry.layerWeights, CL_TRUE, 0, sizeof(float)*layerWeights.size(),\n &layerWeights.front(), 0, nullptr, nullptr);\n if (err){\n throw std::runtime_error(\"Failed to enqueue layer sizes buffer write: \"+std::to_string(err));\n }\n\n err = clEnqueueWriteBuffer(command_queue, agentEntry.layerBiases, CL_TRUE, 0, sizeof(float)*agentEntry.layerCount,\n &layerBiases.front(), 0, nullptr, nullptr);\n if (err){\n throw std::runtime_error(\"Failed to enqueue layer sizes buffer write: \"+std::to_string(err));\n }\n\n agentEntry.netActivationA = clCreateBuffer(context, CL_MEM_READ_WRITE, sizeof(float)*agentEntry.maxLayerSize,\n nullptr, &err);\n agentEntry.netActivationB = clCreateBuffer(context, CL_MEM_READ_WRITE, sizeof(float)*agentEntry.maxLayerSize,\n nullptr, &err);\n if (err){\n throw std::runtime_error(\"Failed to create net activation buffer: \"+std::to_string(err));\n }\n\n}\n\nvoid OpenCL_Wrapper::removeAgent(Agent *agent) {\n AgentEntry &a = agentRegister.at(agent);\n\n cl_int err = clReleaseMemObject(a.netActivationA);\n err |= clReleaseMemObject(a.netActivationB);\n\n err |= clReleaseMemObject(a.layerSizes);\n err |= clReleaseMemObject(a.layerBiases);\n err |= clReleaseMemObject(a.layerWeights);\n\n agentRegister.erase(agent);\n if (err){\n throw std::runtime_error(\"Failed to release memory objects: \"+std::to_string(err));\n }\n}\n\nvoid OpenCL_Wrapper::think(std::shared_ptr<Agent> agent, const std::vector<float> &percept) {\n AgentEntry &agentEntry = agentRegister.at(agent.get());\n\n cl_event lastEvent;\n cl_event newEvent;\n\n cl_int err = 0;\n err |= clSetKernelArg(perceptronKernel, 0, sizeof(cl_mem), &agentEntry.layerSizes);\n err |= clSetKernelArg(perceptronKernel, 1, sizeof(cl_mem), &agentEntry.layerWeights);\n err |= clSetKernelArg(perceptronKernel, 2, sizeof(cl_mem), &agentEntry.layerBiases);\n if (err){\n throw std::runtime_error(\"Failed to set OpenCL neural net architecture kernel arg: \"+std::to_string(err));\n }\n\n // Buffer the percept\n // This will probably not fill the whole buffer, but the kernel is fine with that.\n err = clEnqueueWriteBuffer(command_queue, agentEntry.netActivationA, CL_FALSE, 0,\n sizeof(float)*agentEntry.maxLayerSize, &percept.at(0), 0, nullptr, &lastEvent);\n\n if (err){\n throw std::runtime_error(\"Failed to write to first net activation buffer: \"+std::to_string(err));\n }\n\n unsigned layerOffset = 0;\n for (unsigned i = 0; i < agentEntry.layerCount; i++){\n err = clSetKernelArg(perceptronKernel, 3, sizeof(unsigned), &i);\n err |= clSetKernelArg(perceptronKernel, 4, sizeof(unsigned), &layerOffset);\n\n if (err){\n throw std::runtime_error(\"Failed to set OpenCL current layer kernel arg: \"+std::to_string(err));\n }\n\n if (i%2 == 0){\n err = clSetKernelArg(perceptronKernel, 5, sizeof(cl_mem), &agentEntry.netActivationA);\n err |= clSetKernelArg(perceptronKernel, 6, sizeof(cl_mem), &agentEntry.netActivationB);\n }\n else {\n err = clSetKernelArg(perceptronKernel, 6, sizeof(cl_mem), &agentEntry.netActivationA);\n err |= clSetKernelArg(perceptronKernel, 5, sizeof(cl_mem), &agentEntry.netActivationB);\n }\n if (err){\n throw std::runtime_error(\"Failed to set OpenCL activation kernel arg: \"+std::to_string(err));\n }\n\n\n size_t localSize = std::min(maxComputeUnits, agentEntry.layerSizes_Host[i+1]);\n size_t globalSize = (size_t) ceil((double) agentEntry.layerSizes_Host[i+1]/localSize)*localSize;\n\n err = clEnqueueNDRangeKernel(command_queue, perceptronKernel, 1, nullptr, &globalSize, &localSize,\n 1, &lastEvent, &newEvent);\n\n if (err){\n throw std::runtime_error(\"Failed to enqueue perceptron ND Range kernels: \"+std::to_string(err));\n }\n err = clReleaseEvent(lastEvent);\n\n if (err){\n throw std::runtime_error(\"Failed to release event: \"+std::to_string(err));\n }\n lastEvent = newEvent;\n layerOffset += agentEntry.layerSizes_Host[i] * agentEntry.layerSizes_Host[i+1];\n\n\n }\n\n\n if (agentEntry.layerCount % 2 == 0){\n err = clEnqueueReadBuffer(command_queue, agentEntry.netActivationA, CL_FALSE, 0,\n sizeof(float)*agentEntry.output.size(), &agentEntry.output.front(),\n 1, &lastEvent, &newEvent);\n }\n else {\n err = clEnqueueReadBuffer(command_queue, agentEntry.netActivationB, CL_FALSE, 0,\n sizeof(float)*agentEntry.output.size(), &agentEntry.output.front(),\n 1, &lastEvent, &newEvent);\n }\n\n if (err){\n throw std::runtime_error(\"Failed to read output buffer from network: \"+std::to_string(err));\n }\n\n clReleaseEvent(lastEvent);\n lastEvent = newEvent;\n\n clSetEventCallback(lastEvent, CL_COMPLETE, responseCallback, (void*) &agentEntry);\n\n}\n\nvoid OpenCL_Wrapper::clFinishAll() {\n clFinish(command_queue);\n}\n\nvoid OpenCL_Wrapper::responseCallback(cl_event e, cl_int status, void *data) {\n auto entry = (AgentEntry*) data;\n //printf(\"net out: %f, \", entry->output[0]);\n for (float &f : entry->output) {\n f = 1.f / (1.f + expf(-f));\n }\n //printf(\"normalized: %f\\n\", entry->output[0]);\n entry->agent->setActions(entry->output);\n clReleaseEvent(e);\n}\n\n\n"
},
{
"alpha_fraction": 0.4795467257499695,
"alphanum_fraction": 0.48438364267349243,
"avg_line_length": 33.62200927734375,
"blob_id": "9821fede84a29dfa3b964397a1ef568ab4d219e3",
"content_id": "6d1fe6064165838088a995c361faf831026c7e0d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 7236,
"license_type": "permissive",
"max_line_length": 104,
"num_lines": 209,
"path": "/main.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "#include <iostream>\n#include <memory>\n\n#include \"Camera.h\"\n#include \"World.h\"\n#include \"GUI.h\"\n#include \"MarkovNames.h\"\n\nint main(int argc, char *argv[]) {\n\n\n // Process potential arguments config filename, and OpenCL device\n std::string deviceName;\n std::string configFilename = \"Config.json\";\n\n for (int i = 0; i < argc; i++){\n std::string s(argv[i]);\n if (s == \"-CL_DEVICE\"){\n deviceName = std::string(argv[i+1]);\n while (true){\n auto c = deviceName.find('_');\n if (c == std::string::npos){\n break;\n }\n deviceName.replace(c, 1, \" \");\n }\n\n printf(\"Set to use OpenCL device: %s\\n\", deviceName.c_str());\n\n i++;\n }\n else if (s == \"-CONFIG\"){\n configFilename = std::string(argv[i+1]);\n }\n }\n Config config;\n config.loadConfigFromFile(configFilename);\n printf(\"Using seed: %lu\\n\", config.seed);\n\n MarkovNames::loadResources();\n Gene::randomEngine = std::mt19937(config.seed++);\n\n // Create main objects\n sf::RenderWindow window(sf::VideoMode(config.render.windowSize.x, config.render.windowSize.y),\n \"Hunter Gatherers\");\n sf::Image icon;\n icon.loadFromFile(\"../resources/icon.png\");\n window.setIcon(icon.getSize().x, icon.getSize().y, icon.getPixelsPtr());\n Camera camera(config, &window,\n sf::Vector2f(config.render.windowSize.x * 10, config.render.windowSize.y * 10));\n OpenCL_Wrapper cl(deviceName);\n World world(config, &window, &cl);\n GUI gui(config, &window, &world, &camera);\n\n // Game loop variables\n\n sf::Clock deltaClock;\n float timeFactor = config.controls.timeFactorInitial;\n bool paused = false;\n\n bool dragging = false;\n bool notMoving = false;\n sf::Vector2<int> mousePosition = sf::Mouse::getPosition();\n float thresholdSpeed = 50;\n\n const Controls& controls = config.controls;\n\n // Game loop\n while (window.isOpen()) {\n sf::Time dt = deltaClock.restart();\n\n // Get events\n sf::Event event{};\n while (window.pollEvent(event)) {\n notMoving = true;\n\n // A key was pressed\n if (event.type == sf::Event::KeyPressed){\n sf::Keyboard::Key code = event.key.code;\n if (code == controls.pause){\n paused = !paused;\n }\n else if (code == controls.close){\n window.close();\n }\n else if (code == controls.showInterface){\n config.render.showInterface = !config.render.showInterface;\n }\n else if (code == controls.clearStats){\n world.clearStatistics();\n for (auto &agent : world.getAgents()){\n agent->clearPath();\n }\n }\n if (code == controls.up){\n camera.move(sf::Vector2f(0, controls.upAmount));\n }\n if (code == controls.down){\n camera.move(sf::Vector2f(0, controls.downAmount));\n }\n if (code == controls.left){\n camera.move(sf::Vector2f(controls.leftAmount, 0));\n }\n if (code == controls.right){\n camera.move(sf::Vector2f(controls.rightAmount, 0));\n }\n if (code == controls.slowDown){\n timeFactor = fmaxf(controls.timeFactorDelta, timeFactor - controls.timeFactorDelta);\n }\n else if (code == controls.speedUp){\n timeFactor = fminf(controls.timeFactorMax, timeFactor + controls.timeFactorDelta);\n }\n }\n\n // Mouse pressed\n else if (event.type == sf::Event::MouseButtonPressed){\n if (event.mouseButton.button == sf::Mouse::Button::Left){\n dragging = false;\n }\n }\n\n // Mouse released\n else if (event.type == sf::Event::MouseButtonReleased){\n if (event.mouseButton.button == sf::Mouse::Button::Left && !dragging){\n // Click!\n auto mapPos = (sf::Vector2f) window.mapPixelToCoords(mousePosition);\n auto hits = world.getQuadtree().searchNear(mapPos, 0.1);\n bool selected = false;\n for (auto &hit : hits) {\n if (pointInBox(mapPos, hit->getWorldBoundsf())) {\n if (hit != gui.getSelectedAgent() && typeid(*hit) == typeid(Agent)) {\n gui.selectAgent(std::dynamic_pointer_cast<Agent>(hit));\n selected = true;\n break;\n }\n }\n }\n if (!selected){\n if (!gui.click(mousePosition)){\n gui.selectAgent(nullptr);\n }\n }\n }\n }\n\n // Mouse drag\n else if (event.type == sf::Event::MouseMoved){\n auto oldMousePos = mousePosition;\n mousePosition = sf::Vector2<int>(event.mouseMove.x, event.mouseMove.y);\n if (sf::Mouse::isButtonPressed(sf::Mouse::Button::Left)){\n sf::Vector2f delta = sf::Vector2f(oldMousePos - mousePosition);\n float speed = std::sqrt(delta.x*delta.x+delta.y*delta.y);\n if (speed < thresholdSpeed || dragging)\n camera.move(delta);\n }\n else {\n gui.hover(mousePosition);\n }\n\n dragging = true;\n notMoving = false;\n }\n\n // Mouse was scrolled\n else if (event.type == sf::Event::MouseWheelScrolled){\n sf::Vector2<int> c = sf::Mouse::getPosition(window);\n camera.zoomTo(event.mouseWheelScroll.delta, c);\n }\n\n // A finger was placed on the screen\n else if (event.type == sf::Event::TouchBegan){\n std::cout<<\"Touch!\\n\";\n }\n\n // Window resized\n else if (event.type == sf::Event::Resized){\n camera.resizeWindow(event.size);\n }\n\n // Close window\n else if (event.type == sf::Event::Closed){\n window.close();\n }\n\n if (notMoving)\n dragging = false;\n }\n\n if (config.shouldReload){\n printf(\"Reloading config\\n\");\n config.loadConfigFromFile(configFilename);\n }\n\n camera.update(dt.asSeconds());\n\n // Updating world\n if (!paused){\n world.update(dt.asSeconds()*timeFactor);\n }\n\n // Rendering\n window.clear(sf::Color::Black);\n world.draw(paused ? 0 : dt.asSeconds()*timeFactor);\n gui.draw(paused ? 0 : dt.asSeconds(), timeFactor);\n window.display();\n }\n\n return 0;\n}\n"
},
{
"alpha_fraction": 0.6583065986633301,
"alphanum_fraction": 0.6617174744606018,
"avg_line_length": 22.733333587646484,
"blob_id": "93dc158535e98c57f43bfd7b12ca5c41a18cc608",
"content_id": "0d77d28eaf46668e09f367054fd26bdfc59bba2e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4984,
"license_type": "permissive",
"max_line_length": 126,
"num_lines": 210,
"path": "/Gene.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-12-03.\n//\n\n#ifndef HUNTERGATHERERS_GENOME_H\n#define HUNTERGATHERERS_GENOME_H\n\n\n#include <map>\n#include <list>\n#include <memory>\n#include <functional>\n#include <random>\n#include <any>\n\n\nclass Gene {\npublic:\n\n explicit Gene(const std::type_info &type);\n\n const std::type_info &type;\n\n virtual std::shared_ptr<Gene> clone() const = 0;\n virtual void generate() = 0;\n virtual void mutate(float factor) = 0;\n virtual void evaluate(float mutationFactor, unsigned version) = 0;\n\n virtual void writeNormal(std::vector<double> &vector);\n\n float getMutationWeight() const;\n void setMutationWeight(float mutationWeight);\n\n\n Gene *getOwner() const {\n return owner;\n }\n\n template <class T>\n T *getOwner() const {\n if (owner->type != typeid(T)){\n throw std::runtime_error(\"Requested owner gene is type \"+std::string(owner->type.name())\n + \", not \" + typeid(T).name() + \".\");\n }\n return (T*) owner;\n\n }\n\n void setOwner(Gene *owner);\n\n enum State {\n UNEVALUATED,\n EVALUATING,\n EVALUATED\n };\n\n State getState() const;\n void setState(State state);\n\n unsigned int getEvaluationCount() const;\n void setEvaluationCount(unsigned int evaluationCount);\n\n static std::mt19937 randomEngine;\n\nprivate:\n Gene *owner;\n\n State state;\n unsigned evaluationCount;\n float mutationWeight;\n\n};\n\nclass FloatGene : public Gene {\npublic:\n FloatGene(float minVal, float maxVal);\n\n std::shared_ptr<Gene> clone() const override;\n void generate() override;\n void mutate(float factor) override;\n void evaluate(float mutationFactor, unsigned version) override;\n void writeNormal(std::vector<double> &vector) override;\n\n float getValue() const;\n void setValue(float value);\n\n float getMinVal() const;\n void setMinVal(float minVal);\n\n float getMaxVal() const;\n void setMaxVal(float maxVal);\n\nprivate:\n float value;\n float minVal, maxVal;\n};\n\n\nclass IntegerGene : public Gene {\npublic:\n IntegerGene(int minVal, int maxVal);\n\n std::shared_ptr<Gene> clone() const override;\n void generate() override;\n void mutate(float factor) override;\n void evaluate(float mutationFactor, unsigned version) override;\n void writeNormal(std::vector<double> &vector) override;\n\n int getValue() const;\n void setValue(int value);\n\n int getMinVal() const;\n void setMinVal(int minVal);\n\n int getMaxVal() const;\n void setMaxVal(int maxVal);\n\nprivate:\n int value;\n int minVal, maxVal;\n};\n\ntemplate<class T>\nclass LambdaGene : public Gene {\npublic:\n explicit LambdaGene(std::function<T(LambdaGene<T>&, float)> lambda);\n\n std::shared_ptr<Gene> clone() const override;\n void generate() override;\n void evaluate(float mutationFactor, unsigned version) override;\n\n T getValue() const;\n void setValue(T value);\n\nprivate:\n void mutate(float factor) override;\n const std::function<T(LambdaGene&, float)> lambda;\n\n T value;\n};\n\ntemplate class LambdaGene<float>;\ntemplate class LambdaGene<int>;\ntemplate class LambdaGene<std::string>;\ntemplate class LambdaGene<std::any>; // TODO: Does this work?!\ntemplate class LambdaGene<void*>;\n\nclass MapGenes : public Gene {\npublic:\n MapGenes();\n ~MapGenes() = default;\n\n std::shared_ptr<Gene> clone() const override;\n void generate() override;\n void mutate(float factor) override;\n void evaluate(float mutationFactor, unsigned version) override;\n\n void writeNormal(std::vector<double> &vector) override;\n\n void addGenes(const std::string &name, std::shared_ptr<Gene> gene);\n\n Gene *getGene(const std::string &name) const {\n return genes.at(name).get();\n }\n\n template <class T>\n T *getGene(const std::string &name) const {\n Gene *g = genes.at(name).get();\n const std::type_info &c = g->type;\n if (g->type != typeid(T)){\n throw std::runtime_error(\"Requested gene in map \"+name+\" is type \"+g->type.name()+\", not \"+ typeid(T).name()+\".\");\n }\n return (T*) g;\n\n }\n\n\nprivate:\n std::map<std::string, std::shared_ptr<Gene> > genes;\n};\n\nclass ListGenes : public Gene {\npublic:\n ListGenes(std::shared_ptr<Gene> templateGene, std::size_t count);\n ListGenes(std::shared_ptr<Gene> templateGene, std::string countGeneName);\n\n std::shared_ptr<Gene> clone() const override;\n void generate() override;\n void mutate(float factor) override;\n void evaluate(float mutationFactor, unsigned version) override;\n\n void writeNormal(std::vector<double> &vector) override;\n\n size_t getCount() const;\n void setCount(size_t count);\n\n const std::list<std::shared_ptr<Gene>> &getList() const;\n\nprivate:\n void updateCount();\n std::list<std::shared_ptr<Gene> > genes;\n\n std::shared_ptr<Gene> templateGene;\n\n std::size_t count;\n const std::string countGeneName;\n const bool staticCount;\n};\n\n#endif //HUNTERGATHERERS_GENOME_H\n"
},
{
"alpha_fraction": 0.6339446902275085,
"alphanum_fraction": 0.6375613808631897,
"avg_line_length": 24.18655014038086,
"blob_id": "916cd9897f1f57b786da88038df97dbf1d6e565c",
"content_id": "1feca9f355fa2561797387123ffe7ef02fd03535",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 11613,
"license_type": "permissive",
"max_line_length": 118,
"num_lines": 461,
"path": "/Gene.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-12-03.\n//\n\n\n#include \"Gene.h\"\n#include \"Config.h\"\n\nstd::mt19937 Gene::randomEngine = std::mt19937(0); // Seed is set outside class\n\nGene::Gene(const std::type_info &type) : type(type), state(UNEVALUATED), evaluationCount(0), mutationWeight(1) {};\n\nvoid Gene::setOwner(Gene *owner) {\n Gene::owner = owner;\n evaluationCount = owner->getEvaluationCount();\n}\n\n\nGene::State Gene::getState() const {\n return state;\n}\n\nvoid Gene::setState(Gene::State state) {\n Gene::state = state;\n}\n\n\nunsigned int Gene::getEvaluationCount() const {\n return evaluationCount;\n}\n\nvoid Gene::setEvaluationCount(unsigned int evaluationCount) {\n Gene::evaluationCount = evaluationCount;\n}\n\nfloat Gene::getMutationWeight() const {\n return mutationWeight;\n}\n\nvoid Gene::setMutationWeight(float mutationWeight) {\n Gene::mutationWeight = mutationWeight;\n}\n\nvoid Gene::writeNormal(std::vector<double> &vector) {\n\n}\n\n\n// Float gene\n\nFloatGene::FloatGene(float minVal, float maxVal): Gene(typeid(FloatGene)), minVal(minVal), maxVal(maxVal) {}\n\nstd::shared_ptr<Gene> FloatGene::clone() const {\n return std::make_shared<FloatGene>(*this);;\n}\n\nvoid FloatGene::generate() {\n std::uniform_real_distribution<float> distribution(minVal, maxVal);\n value = distribution(randomEngine);\n setState(EVALUATED);\n}\n\nvoid FloatGene::mutate(float factor) {\n setEvaluationCount(getEvaluationCount()+1);\n float change = factor*getMutationWeight()*(maxVal-minVal);\n std::uniform_real_distribution<float> distribution(-change, change);\n value = std::max(std::min(value + distribution(randomEngine), maxVal), minVal);\n}\n\nvoid FloatGene::evaluate(float mutationFactor, unsigned version) {\n if (getState() == UNEVALUATED){\n setState(EVALUATING);\n generate();\n setState(EVALUATED);\n }\n else if (version != getEvaluationCount()) {\n setState(EVALUATING);\n mutate(mutationFactor);\n setState(EVALUATED);\n }\n}\n\nfloat FloatGene::getValue() const {\n if (getState() != EVALUATED){\n throw std::runtime_error(\"FloatGene value not evaluated.\");\n }\n return value;\n}\n\nvoid FloatGene::setValue(float value) {\n FloatGene::value = value;\n}\n\nfloat FloatGene::getMinVal() const {\n return minVal;\n}\n\nvoid FloatGene::setMinVal(float minVal) {\n FloatGene::minVal = minVal;\n}\n\nfloat FloatGene::getMaxVal() const {\n return maxVal;\n}\n\nvoid FloatGene::setMaxVal(float maxVal) {\n FloatGene::maxVal = maxVal;\n}\n\nvoid FloatGene::writeNormal(std::vector<double> &vector) {\n float val = (value-minVal)/(maxVal-minVal);\n if (val == val){ // Nan check\n vector.push_back(val);\n }\n}\n\n\n// Integer gene\n\nIntegerGene::IntegerGene(int minVal, int maxVal) : Gene(typeid(IntegerGene)), minVal(minVal), maxVal(maxVal) {}\n\nstd::shared_ptr<Gene> IntegerGene::clone() const {\n return std::make_shared<IntegerGene>(*this);;\n}\n\n\nvoid IntegerGene::generate() {\n std::binomial_distribution<int> distribution(maxVal-minVal, 0.5f);\n value = minVal + distribution(randomEngine);\n setState(EVALUATED);\n}\n\nvoid IntegerGene::mutate(float factor) {\n setEvaluationCount(getEvaluationCount()+1);\n std::poisson_distribution distribution(factor);\n int change = distribution(randomEngine);\n if (std::uniform_int_distribution(0, 1)(randomEngine) == 0){\n change = -change;\n }\n\n value = std::max(std::min(value + change, maxVal), minVal);\n}\n\nvoid IntegerGene::evaluate(float mutationFactor, unsigned version) {\n if (getState() == UNEVALUATED){\n setState(EVALUATING);\n generate();\n setEvaluationCount(version);\n }\n else if (version != getEvaluationCount()) {\n setState(EVALUATING);\n mutate(mutationFactor);\n }\n setState(EVALUATED);\n}\n\n\nint IntegerGene::getValue() const {\n if (getState() != EVALUATED){\n throw std::runtime_error(\"IntegerGene value not evaluated.\");\n }\n return value;\n}\n\nvoid IntegerGene::setValue(int value) {\n IntegerGene::value = value;\n}\n\nint IntegerGene::getMinVal() const {\n return minVal;\n}\n\nvoid IntegerGene::setMinVal(int minVal) {\n IntegerGene::minVal = minVal;\n}\n\nint IntegerGene::getMaxVal() const {\n return maxVal;\n}\n\nvoid IntegerGene::setMaxVal(int maxVal) {\n IntegerGene::maxVal = maxVal;\n}\n\nvoid IntegerGene::writeNormal(std::vector<double> &vector) {\n double val = (double) (value-minVal)/(maxVal-minVal);\n if (val == val){ // Nan check\n vector.push_back(val);\n }\n\n}\n\n\n// Map genes\n\nMapGenes::MapGenes() : Gene(typeid(MapGenes)) {}\n\nstd::shared_ptr<Gene> MapGenes::clone() const {\n std::shared_ptr<MapGenes> copy = std::make_shared<MapGenes>(*this);\n copy->genes = std::map<std::string, std::shared_ptr<Gene>>();\n\n for (auto &gene : genes) {\n auto geneCopy = gene.second->clone();\n geneCopy->setOwner(copy.get());\n copy->genes.emplace(gene.first, std::move(geneCopy));\n }\n\n return copy;\n}\n\n\nvoid MapGenes::generate() {\n setState(EVALUATING);\n for (auto &gene: genes){\n gene.second->evaluate(0, getEvaluationCount());\n }\n setState(EVALUATED);\n}\n\n\nvoid MapGenes::mutate(float factor) {\n setEvaluationCount(getEvaluationCount()+1);\n for (auto &gene: genes) {\n gene.second->evaluate(factor, getEvaluationCount());\n }\n}\n\nvoid MapGenes::evaluate(float mutationFactor, unsigned version) {\n if (getState() == EVALUATING){\n throw std::runtime_error(\"MapGene is evaluating (possible loop).\");\n }\n if (getState() == UNEVALUATED){\n setState(EVALUATING);\n generate();\n setEvaluationCount(version);\n }\n else if (version != getEvaluationCount()) {\n setState(EVALUATING);\n mutate(mutationFactor);\n }\n setState(EVALUATED);\n}\n\n\nvoid MapGenes::addGenes(const std::string &name, std::shared_ptr<Gene> gene) {\n genes.emplace(name, gene);\n genes.at(name)->setOwner(this);\n}\n\nvoid MapGenes::writeNormal(std::vector<double> &vector) {\n for (auto &gene: genes) {\n gene.second->writeNormal(vector);\n }\n}\n\n\n// List Gene\n\nListGenes::ListGenes(std::shared_ptr<Gene> templateGene, std::size_t count)\n: Gene(typeid(ListGenes)), templateGene(std::move(templateGene)), staticCount(true), count(count), countGeneName(\"\") {\n ListGenes::templateGene->setOwner(this);\n}\n\nListGenes::ListGenes(std::shared_ptr<Gene> templateGene, std::string countGeneName)\n: Gene(typeid(ListGenes)), templateGene(std::move(templateGene)), staticCount(false), countGeneName(countGeneName) {\n ListGenes::templateGene->setOwner(this);\n}\n\nstd::shared_ptr<Gene> ListGenes::clone() const {\n std::shared_ptr<ListGenes> copy = std::make_shared<ListGenes>(*this);\n copy->templateGene->setOwner(copy.get());\n\n copy->genes = std::list<std::shared_ptr<Gene>>();\n for (auto &gene : genes) {\n auto geneCopy = gene->clone();\n geneCopy->setOwner(copy.get());\n copy->genes.push_back(geneCopy);\n }\n\n return copy;\n}\n\nvoid ListGenes::generate() {\n setState(EVALUATING);\n genes.clear();\n\n updateCount();\n\n for (int i = 0; i < count; i++){\n auto n = templateGene->clone();\n n->setOwner(this);\n genes.push_back(n);\n }\n\n for (auto &gene: genes){\n gene->evaluate(0, getEvaluationCount());\n }\n setState(EVALUATED);\n}\n\n\nvoid ListGenes::mutate(float factor) {\n setEvaluationCount(getEvaluationCount()+1);\n updateCount();\n\n std::size_t delta = count - genes.size();\n\n if (count < genes.size()){\n auto original = genes;\n genes.clear();\n std::sample(original.begin(), original.end(), std::back_inserter(genes), count, randomEngine);\n }\n else if (genes.size() < count){\n auto dist = std::uniform_int_distribution<std::size_t>(0, count-1);\n std::vector<std::size_t> insertPositions;\n insertPositions.reserve(count-genes.size());\n\n for (std::size_t i = 0; i < count-genes.size(); i++){\n insertPositions.push_back(dist(randomEngine));\n }\n\n std::sort(insertPositions.begin(), insertPositions.end());\n\n auto itr = genes.begin();\n std::size_t i = 0;\n for (std::size_t j = 0; j < insertPositions.size(); j++){\n while (insertPositions.at(j)+j != i){\n i++;\n itr = std::next(itr);\n }\n (*genes.insert(itr, templateGene->clone()))->setOwner(this);\n }\n }\n int i = 0;\n for (auto &gene: genes) {\n gene->evaluate(factor, getEvaluationCount());\n i++;\n }\n}\n\nvoid ListGenes::evaluate(float mutationFactor, unsigned version) {\n if (getState() == EVALUATING){\n throw std::runtime_error(\"ListGene is evaluating (possible loop).\");\n }\n if (getState() == UNEVALUATED){\n setState(EVALUATING);\n generate();\n setEvaluationCount(version);\n }\n else if (version != getEvaluationCount()) {\n setState(EVALUATING);\n mutate(mutationFactor);\n }\n setState(EVALUATED);\n}\n\nsize_t ListGenes::getCount() const {\n return count;\n}\n\nvoid ListGenes::setCount(size_t count) {\n ListGenes::count = count;\n}\n\nconst std::list<std::shared_ptr<Gene>> &ListGenes::getList() const {\n return genes;\n}\n\nvoid ListGenes::updateCount(){\n if (!staticCount){\n Gene *countGene;\n\n try {\n countGene = getOwner<MapGenes>()->getGene(countGeneName);\n }\n catch (const std::runtime_error &error) {\n throw std::runtime_error(\"Looking for count gene \"+countGeneName+\", but owner isn't map.\\n\");\n }\n\n countGene->evaluate(0, getEvaluationCount());\n\n if (countGene->type == typeid(IntegerGene)){\n count = ((IntegerGene*) countGene)->getValue();\n }\n else if (countGene->type == typeid(LambdaGene<int>)){\n count = ((LambdaGene<int>*) countGene)->getValue();\n }\n else {\n throw std::runtime_error(\"Count gene \"+countGeneName+\" isn't IntegerGene or Lambda<int>, but instead\" +\n std::string(typeid(countGene->type).name()));\n }\n\n }\n}\n\nvoid ListGenes::writeNormal(std::vector<double> &vector) {\n for (auto &gene: genes){\n gene->writeNormal(vector);\n }\n}\n\n\n\n\n// Lambda gene\n\ntemplate<class T>\nLambdaGene<T>::LambdaGene(std::function<T(LambdaGene<T>&, float)> lambda) : Gene(typeid(LambdaGene)), lambda(lambda) {\n\n}\n\ntemplate <class T>\nstd::shared_ptr<Gene> LambdaGene<T>::clone() const {\n return std::make_shared<LambdaGene>(*this);\n}\n\ntemplate<class T>\nvoid LambdaGene<T>::generate() {\n setState(EVALUATING);\n value = lambda(*this, 0.f);\n setState(EVALUATED);\n}\n\ntemplate<class T>\nvoid LambdaGene<T>::mutate(float factor) {\n setEvaluationCount(getEvaluationCount()+1);\n setState(EVALUATING);\n value = lambda(*this, factor);\n setState(EVALUATED);\n}\n\ntemplate<class T>\nvoid LambdaGene<T>::evaluate(float mutationFactor, unsigned version) {\n if (getState() == EVALUATING){\n throw std::runtime_error(\"LambdaGene is evaluating (possible loop).\");\n }\n if (getState() == UNEVALUATED){\n setState(EVALUATING);\n generate();\n setEvaluationCount(version);\n }\n else if (version != getEvaluationCount()) {\n setState(EVALUATING);\n mutate(mutationFactor);\n }\n setState(EVALUATED);\n}\n\n\ntemplate<class T>\nT LambdaGene<T>::getValue() const {\n if (getState() != EVALUATED){\n throw std::runtime_error(\"LambdaGene value not evaluated.\");\n }\n return value;\n}\n\ntemplate<class T>\nvoid LambdaGene<T>::setValue(T value) {\n LambdaGene::value = value;\n}\n\n\n"
},
{
"alpha_fraction": 0.6737695336341858,
"alphanum_fraction": 0.6767706871032715,
"avg_line_length": 21.362415313720703,
"blob_id": "365130e6cac1081c77f21f0a5a09c84e0c57054e",
"content_id": "94a3f8b39dc1b38b8cac498261cc5470155e3229",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3332,
"license_type": "permissive",
"max_line_length": 63,
"num_lines": 149,
"path": "/Config.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-23.\n//\n\n#ifndef HUNTERGATHERERS_CONFIG_H\n#define HUNTERGATHERERS_CONFIG_H\n\n#include <SFML/Graphics.hpp>\n#include \"json/json.hpp\"\n#include \"Populator.h\"\n\nstruct WorldSettings {\n sf::Vector2f dimensions;\n float mushroomReproductionRate{};\n float mushroomReproductionDistance{};\n unsigned mushroomReproductionNearLimit{};\n unsigned terrainSquare{};\n float quadtreeLimit{};\n std::map<std::string, Populator::Entry> populatorEntries{};\n};\n\nstruct AgentSettings {\n float mass;\n float friction;\n float maxSpeed;\n float turnFactor;\n float punchTime;\n float actionCooldown;\n\n float energyToParent;\n float energyToChild;\n float energyLossRate;\n float turnRateEnergyLoss;\n float movementEnergyLoss;\n float punchEnergy;\n float punchDamage;\n float mushroomEnergy;\n float maxEnergy;\n unsigned maxMushroomCount;\n\n bool canReproduce;\n bool canWalk;\n bool canTurn;\n bool canEat;\n bool canPlace;\n bool canPunch;\n\n unsigned memory;\n float memoryReactivity;\n\n bool perceiveCollision;\n unsigned receptorCount;\n bool perceiveColor;\n bool perceiveEnergyLevel;\n bool perceiveMushroomCount;\n\n float FOV;\n float visibilityDistance;\n float visualReactivity;\n\n float mutation;\n int layersMin, layersMax;\n float biasMin, biasMax;\n float weightMin, weightMax;\n int perceptronPerLayerMin, perceptronPerLayerMax;\n};\n\n\nstruct RenderSettings {\n sf::Vector2u windowSize;\n\n bool graphLine{};\n bool graphPopulation{};\n bool graphMeanGeneration{};\n bool graphMeanPerceptrons{};\n bool graphMeanAge{};\n bool graphMeanChildren{};\n bool graphMeanMurders{};\n bool graphMeanEnergy{};\n bool graphMeanMushrooms{};\n bool graphMeanSpeed{};\n\n bool graphSpectrogram{};\n bool graphGeneration{};\n bool graphPerceptrons{};\n bool graphAge{};\n bool graphEnergy{};\n bool graphChildren{};\n bool graphMurders{};\n bool graphMushrooms{};\n bool graphSpeed{};\n\n unsigned bins{};\n bool showInterface{};\n bool showWorldObjectBounds{};\n bool showQuadtree{};\n bool showQuadtreeEntities{};\n bool showVision{};\n bool showPaths{};\n bool renderOnlyAgents{};\n bool visualizeGeneration{};\n bool visualizeAge{};\n bool visualizeMushrooms{};\n bool visualizeChildren{};\n bool visualizeMurders{};\n bool visualizeColor{};\n};\n\nstruct Controls {\n sf::Keyboard::Key pause;\n sf::Keyboard::Key close;\n sf::Keyboard::Key showInterface;\n sf::Keyboard::Key clearStats;\n\n sf::Keyboard::Key up;\n float upAmount;\n sf::Keyboard::Key down;\n float downAmount;\n sf::Keyboard::Key left;\n float leftAmount;\n sf::Keyboard::Key right;\n float rightAmount;\n\n sf::Keyboard::Key slowDown;\n sf::Keyboard::Key speedUp;\n float timeFactorInitial;\n float timeFactorDelta;\n float timeFactorMax;\n float scrollFactor;\n};\n\n\nstruct Config {\n long unsigned int seed{};\n WorldSettings world{};\n AgentSettings agents{};\n RenderSettings render;\n Controls controls{};\n\n bool shouldReload;\n\n void loadConfigFromFile(const std::string &filename);\n\n sf::Keyboard::Key findKeyCode(std::string key);\n\n};\n\n\n#endif //HUNTERGATHERERS_CONFIG_H\n"
},
{
"alpha_fraction": 0.6817042827606201,
"alphanum_fraction": 0.6967418789863586,
"avg_line_length": 20.567567825317383,
"blob_id": "54aa988d8a4f2835450466570dd33272c9b4fd3a",
"content_id": "6ab8a0c24d74e6a7f6990a1384df1fa13028e3a9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 798,
"license_type": "permissive",
"max_line_length": 86,
"num_lines": 37,
"path": "/Camera.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-23.\n//\n\n#ifndef HUNTERGATHERERS_CAMERA_H\n#define HUNTERGATHERERS_CAMERA_H\n\n\n#include <SFML/Graphics.hpp>\n#include <cmath>\n\n#include \"Config.h\"\n#include \"Agent.h\"\n\nclass Camera {\npublic:\n explicit Camera(Config &config, sf::RenderWindow *window, sf::Vector2f worldSize);\n void update(float deltaT);\n void move(sf::Vector2f offset);\n void zoomTo(float mouseWheelDelta, sf::Vector2<int> mousePosition);\n void followAgent(Agent* agent);\n void resizeWindow(sf::Event::SizeEvent size);\n\n sf::View getView();\n void setView(sf::View view);\nprivate:\n Config& config;\n sf::RenderWindow *window;\n sf::View view;\n sf::Vector2f worldSize;\n\n bool followingAgent;\n std::weak_ptr<Agent> agentFollow;\n};\n\n\n#endif //HUNTERGATHERERS_CAMERA_H\n"
},
{
"alpha_fraction": 0.5953931212425232,
"alphanum_fraction": 0.608867347240448,
"avg_line_length": 35.03995132446289,
"blob_id": "e343758bd9de93170ce7b1124472d295549f90f9",
"content_id": "25b533c5c38ab1a8bc4f2b78420ca1a963687925",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 28870,
"license_type": "permissive",
"max_line_length": 168,
"num_lines": 801,
"path": "/Agent.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-21.\n//\n\n#include <iostream>\n#include <cmath>\n\n#include \"Agent.h\"\n#include \"Config.h\"\n#include \"utils.cpp\"\n#include \"World.h\"\n#include \"Mushroom.h\"\n#include \"MarkovNames.h\"\n#include \"Skull.h\"\n\n#define PI 3.14159265f\n\nbool Agent::loaded = false;\nsf::Texture Agent::walkingTexture;\nsf::Texture Agent::punchTexture;\n\nvoid Agent::loadResources() {\n if (!loaded){\n Agent::walkingTexture.loadFromFile(\"resources/WalkCycle.png\");\n Agent::punchTexture.loadFromFile(\"resources/Punch.png\");\n loaded = true;\n }\n}\n\nAgent::Agent(const AgentSettings &settings, World *world, sf::Vector2f position, float orientation)\n: settings(settings), WorldObject(\"Agent\", world, position, true), orientation(orientation) {\n loadResources();\n\n generation = 0;\n oldChildCount = 0;\n childCount = 0;\n oldMurderCount = 0;\n murderCount = 0;\n energy = settings.maxEnergy;\n setMass(settings.mass);\n setFriction(settings.friction);\n actionCooldown = settings.actionCooldown;\n punchTimer = 0;\n\n inventory.mushrooms = 0;\n\n frameIndex = 0;\n frame = sf::IntRect(0, 0, 32, 32);\n sprite = sf::Sprite(walkingTexture, frame);\n sprite.setOrigin(16, 5);\n setBounds(sf::IntRect(-8, 10, 8, 27));\n\n receptors.resize(settings.receptorCount);\n std::fill(std::begin(receptors), std::end(receptors), 0.f);\n\n lineOfSight.resize(settings.receptorCount*2);\n lineOfSight[0] = sf::Vertex(sf::Vector2f(0,0));\n lineOfSight[1] = sf::Vertex(sf::Vector2f(0,0));\n lineOfSight[0].color = sf::Color::Cyan;\n lineOfSight[1].color = sf::Color::Cyan;\n\n std::size_t inputCount = settings.perceiveCollision + receptors.size() + 3*settings.perceiveColor\n + settings.perceiveEnergyLevel + settings.perceiveMushroomCount + settings.memory;\n std::size_t outputCount = settings.canReproduce + settings.canWalk + 2*settings.canTurn + settings.canEat + settings.canPlace + settings.canPunch + settings.memory;\n\n percept = std::vector<float>(inputCount);\n std::fill(std::begin(percept), std::end(percept), 0.f);\n actions = std::vector<float>(outputCount);\n std::fill(std::begin(actions), std::end(actions), 0.f);\n memory = std::vector<float>(settings.memory);\n std::fill(std::begin(memory), std::end(memory), 0.f);\n\n constructGenome(inputCount, outputCount);\n networkStatistics.perceptronCount = 0;\n for (auto &l : genes->getGene<ListGenes>(\"Layers\")->getList()) {\n networkStatistics.perceptronCount += ((MapGenes*) l.get())->getGene<LambdaGene<int>>(\"PerceptronCount\")->getValue();\n }\n networkStatistics.layers = (unsigned) genes->getGene<IntegerGene>(\"LayerCount\")->getValue();\n\n MarkovNames nameGenerator(false, world->getConfig().seed++);\n std::vector<double> genome;\n genes->writeNormal(genome);\n name = nameGenerator.generate(genome);\n setColor(colorFromGenome(genome));\n sprite.setColor(color);\n\n actionUpdates = 0;\n pathTimer = 0;\n perceptMean = std::vector<float>(inputCount);\n std::fill(std::begin(perceptMean), std::end(perceptMean), 0.f);\n actionsMean = std::vector<float>(outputCount);\n std::fill(std::begin(actionsMean), std::end(actionsMean), 0.f);\n varX = std::vector<std::vector<float>>(inputCount, std::vector<float>(outputCount, 0.f));\n covXY = std::vector<std::vector<float>>(inputCount, std::vector<float>(outputCount, 0.f));\n\n}\n\nAgent::Agent(const Agent &other, float mutation)\n: settings(other.settings), WorldObject(other), orientation(other.orientation) {\n loadResources();\n generation = other.generation;\n oldChildCount = 0;\n childCount = 0;\n oldMurderCount = 0;\n murderCount = 0;\n energy = other.energy;\n actionCooldown = settings.actionCooldown;\n punchTimer = 0;\n\n actionUpdates = 0;\n pathTimer = 0;\n\n inventory.mushrooms = 0;\n\n frameIndex = 0;\n frame = sf::IntRect(0, 0, 32, 32);\n sprite = sf::Sprite(walkingTexture, frame);\n sprite.setOrigin(other.sprite.getOrigin());\n setBounds(other.getBounds());\n\n actions.resize(other.actions.size());\n std::fill(std::begin(actions), std::end(actions), 0.f);\n percept.resize(other.percept.size());\n std::fill(std::begin(percept), std::end(percept), 0.f);\n memory.resize(other.memory.size());\n std::fill(std::begin(memory), std::end(memory), 0.f);\n genes = std::dynamic_pointer_cast<MapGenes>(other.genes->clone());\n genes->mutate(mutation);\n\n networkStatistics.perceptronCount = 0;\n for (auto &l : genes->getGene<ListGenes>(\"Layers\")->getList()) {\n networkStatistics.perceptronCount += ((MapGenes*) l.get())->getGene<LambdaGene<int>>(\"PerceptronCount\")->getValue();\n }\n networkStatistics.layers = (unsigned) genes->getGene<IntegerGene>(\"LayerCount\")->getValue();\n\n MarkovNames nameGenerator(false, world->getConfig().seed++);\n std::vector<double> genome;\n std::vector<double> genome2;\n genes->writeNormal(genome);\n name = nameGenerator.generate(genome);\n other.genes->writeNormal(genome2);\n setColor(colorFromGenome(genome));\n sprite.setColor(color);\n\n // AI\n receptors.resize(other.receptors.size());\n std::fill(std::begin(receptors), std::end(receptors), 0.f);\n\n lineOfSight.resize(settings.receptorCount*2);\n lineOfSight[0] = sf::Vertex(sf::Vector2f(0,0));\n lineOfSight[1] = sf::Vertex(sf::Vector2f(0,0));\n lineOfSight[0].color = sf::Color::Cyan;\n lineOfSight[1].color = sf::Color::Cyan;\n\n sf::Vertex orientationLine[2];\n\n unsigned inputCount = genes->getGene<IntegerGene>(\"InputCount\")->getValue();\n unsigned outputCount = genes->getGene<IntegerGene>(\"OutputCount\")->getValue();\n actionUpdates = 0;\n perceptMean = std::vector<float>(inputCount);\n std::fill(std::begin(perceptMean), std::end(perceptMean), 0.f);\n actionsMean = std::vector<float>(outputCount);\n std::fill(std::begin(actionsMean), std::end(actionsMean), 0.f);\n varX = std::vector<std::vector<float>>(inputCount, std::vector<float>(outputCount, 0.f));\n covXY = std::vector<std::vector<float>>(inputCount, std::vector<float>(outputCount, 0.f));\n}\n\n\nvoid Agent::constructGenome(size_t inputCount, size_t outputCount) {\n\n // Function for finding out how many weights every peceptron should have\n auto previousLayerPerceptronCountLambda = [](LambdaGene<int> &l, float mutationFactor) {\n auto layers = l.getOwner<MapGenes>()->getOwner<ListGenes>()->getOwner<MapGenes>()->getOwner<ListGenes>();\n auto thisLayer = l.getOwner<MapGenes>()->getOwner<ListGenes>()->getOwner<MapGenes>();\n\n // Find which layer is calling this function\n auto itr = layers->getList().begin();\n for (auto &_ : layers->getList()) {\n if (itr->get() == thisLayer){\n break;\n }\n itr++;\n }\n\n // If this is the first layer, then the count depends on how many inputs the network has\n if (itr == layers->getList().begin()){\n auto count = layers->getOwner<MapGenes>()->getGene<IntegerGene>(\"InputCount\");\n count->evaluate(mutationFactor, l.getEvaluationCount());\n return count->getValue();\n }\n\n // Else, the count depends on the perceptron count in the previous layer\n itr--;\n auto lastLayer = ((MapGenes*) itr->get());\n auto count = lastLayer->getGene<LambdaGene<int> >(\"PerceptronCount\");\n count->evaluate(mutationFactor, l.getEvaluationCount());\n return count->getValue();\n\n };\n\n // Function for finding how many perceptrons this layer should be\n auto perceptronCountLambda = [](LambdaGene<int> &l, float mutationFactor) {\n auto layers = l.getOwner<MapGenes>()->getOwner<ListGenes>();\n auto thisLayer = l.getOwner<MapGenes>();\n\n // Find which layer is calling this function\n auto itr = layers->getList().begin();\n for (auto &_ : layers->getList()) {\n if (itr->get() == thisLayer){\n break;\n }\n itr++;\n }\n\n // If this is the last layer, then the count depends on how many outputs the networks has\n if (++itr == layers->getList().end()){\n auto count = layers->getOwner<MapGenes>()->getGene<IntegerGene>(\"OutputCount\");\n count->evaluate(mutationFactor, l.getEvaluationCount());\n return count->getValue();\n }\n\n // Else, the count depends on the MutatingPerceptronCount gene, and is therefore random\n auto count = l.getOwner<MapGenes>()->getGene<IntegerGene>(\"MutatingPerceptronCount\");\n count->evaluate(mutationFactor, l.getEvaluationCount());\n return count->getValue();\n };\n\n // Create a perceptron map which has a weight count, and a list of the weights.\n auto perceptron = std::make_shared<MapGenes>();\n auto weightCount = std::make_shared<LambdaGene<int> >(previousLayerPerceptronCountLambda);\n perceptron->addGenes(\"WeightCount\", weightCount);\n auto weight = std::make_shared<FloatGene>(settings.weightMin, settings.weightMax);\n auto weights = std::make_shared<ListGenes>(weight, \"WeightCount\");\n perceptron->addGenes(\"Weights\", weights);\n\n // Create a layer map which has:\n auto layer = std::make_shared<MapGenes>();\n // a random mutating integer gene which might be used to define count of perceptrons,\n auto mutatingPerceptronCount = std::make_shared<IntegerGene>(settings.perceptronPerLayerMin, settings.perceptronPerLayerMax);\n layer->addGenes(\"MutatingPerceptronCount\", mutatingPerceptronCount);\n // a lambda gene which decides if the mutatingPerceptronCount should be used,\n auto perceptronCount = std::make_shared<LambdaGene<int> >(perceptronCountLambda);\n layer->addGenes(\"PerceptronCount\", perceptronCount);\n // the bias for this layer,\n auto bias = std::make_shared<FloatGene>(settings.biasMin, settings.biasMax);\n layer->addGenes(\"Bias\", bias);\n // a list of perceptron maps\n auto perceptrons = std::make_shared<ListGenes>(perceptron, \"PerceptronCount\");\n layer->addGenes(\"Perceptrons\", perceptrons);\n\n // The top gene is a map gene containing a layer count, predefined input and output counts,\n // and a list of layer maps\n genes = std::make_shared<MapGenes>();\n auto layerCount = std::make_shared<IntegerGene>(settings.layersMin, settings.layersMax);\n genes->addGenes(\"LayerCount\", layerCount);\n auto inputCountG = std::make_shared<IntegerGene>(inputCount, inputCount);\n genes->addGenes(\"InputCount\", inputCountG);\n auto outputCountG = std::make_shared<IntegerGene>(outputCount, outputCount);\n genes->addGenes(\"OutputCount\", outputCountG);\n auto layers = std::make_shared<ListGenes>(layer, \"LayerCount\");\n genes->addGenes(\"Layers\", layers);\n\n // The genome is then generated according to this structure\n genes->generate();\n}\n\nvoid Agent::updatePercept(float deltaTime) {\n auto perceptIterator = percept.begin();\n\n if (settings.perceiveCollision){\n *perceptIterator = isColliding();\n perceptIterator++;\n }\n\n // Calculate perceptors\n\n sf::Vector2f visionEnd = getPosition() + sf::Vector2f(settings.visibilityDistance, 0);\n sf::Vector2f dV = visionEnd - getPosition();\n unsigned averageColor[3] = {0, 0, 0};\n unsigned objectsSeen = 0;\n\n for (size_t i = 0; i < receptors.size(); i++){\n float angle = (orientation - settings.FOV/2.f + settings.FOV*((float) i/(receptors.size()-1)))*PI/180.f;\n sf::Vector2f lineEnd = {\n dV.x * cosf(angle) - dV.y * sinf(angle),\n dV.x * sinf(angle) - dV.y * cosf(angle)\n };\n\n std::vector<std::shared_ptr<WorldObject> > nl;\n quadtree->searchNearLine(nl, getPosition(), getPosition()+lineEnd);\n\n float intersected = 0;\n for (auto &n : nl){\n if (n.get() != this){\n sf::Vector2f a = sf::Vector2f(n->getPosition().x + n->getBounds().left,\n n->getPosition().y + n->getBounds().top);\n\n sf::Vector2f b = sf::Vector2f(n->getBounds().width - n->getBounds().left,\n n->getBounds().height - n->getBounds().top);\n\n if (lineIntersectWithBox(getPosition(), getPosition()+lineEnd, a, b)){\n const auto col = n->getColor();\n objectsSeen++;\n averageColor[0] += col.r; averageColor[1] += col.b; averageColor[2] += col.g;\n sf::Vector2f dPos = n->getPosition() - getPosition();\n float normalizedDistance = (dPos.x*dPos.x+dPos.y*dPos.y)\n /(settings.visibilityDistance*settings.visibilityDistance);\n if (normalizedDistance < 1.f){\n intersected += 1.f-normalizedDistance;\n }\n }\n }\n }\n\n intersected = std::fminf(intersected, 1.f);\n\n receptors[i] += (intersected - receptors[i]) * settings.visualReactivity * deltaTime;\n *perceptIterator = receptors[i];\n perceptIterator++;\n\n if (world->getConfig().render.showVision) {\n lineOfSight[i*2].position = getPosition();\n lineOfSight[i*2+1].position = getPosition() + lineEnd;\n lineOfSight[i*2].color = sf::Color(155*receptors[i]+100, 155*receptors[i]+100, 155*receptors[i]+100, 255);\n lineOfSight[i*2+1].color = lineOfSight[i*2].color;\n }\n\n }\n\n if (settings.perceiveColor) {\n sf::Color averageCol(125, 125, 125);\n if (objectsSeen != (unsigned) 0){\n averageCol = sf::Color(averageColor[0]/objectsSeen, averageColor[1]/objectsSeen, averageColor[2]/objectsSeen);\n }\n if (world->getConfig().render.showVision){\n for (auto& los : lineOfSight){\n los.color *= averageCol;\n }\n }\n\n *perceptIterator = averageCol.r/255.f;\n perceptIterator++;\n\n *perceptIterator = averageCol.b/255.f;;\n perceptIterator++;\n\n *perceptIterator = averageCol.g/255.f;\n perceptIterator++;\n\n }\n\n if (world->getConfig().render.showVision){\n sf::Vector2f lineEnd = {\n dV.x * cosf(orientation*PI/180.f) - dV.y * sinf(orientation*PI/180.f),\n dV.x * sinf(orientation*PI/180.f) - dV.y * cosf(orientation*PI/180.f)\n };\n\n orientationLine[0].position = getPosition();\n orientationLine[1].position = getPosition() + lineEnd;\n orientationLine[0].color = sf::Color(100, 100, 200, 50);\n orientationLine[1].color = orientationLine[0].color;\n }\n\n\n if (settings.perceiveEnergyLevel){\n *perceptIterator = energy / settings.maxEnergy;\n perceptIterator++;\n }\n\n if (settings.perceiveMushroomCount){\n *perceptIterator = inventory.mushrooms / settings.maxMushroomCount;\n perceptIterator++;\n }\n\n for (auto& mem : memory){\n *perceptIterator = mem;\n perceptIterator++;\n }\n\n if (perceptIterator != percept.end()){\n throw std::runtime_error(\"All percept values not updated. At \"\n +std::to_string(perceptIterator - percept.begin())+\" of \"+std::to_string(percept.size()));\n }\n}\n\nvoid Agent::update(float deltaTime) {\n WorldObject::update(deltaTime);\n pathTimer += deltaTime;\n if (pathTimer >= 1){\n path.emplace_back(sf::Vertex(getPosition(), getColor()));\n pathTimer = 0;\n }\n actionCooldown = fmaxf(actionCooldown - deltaTime, 0.f);\n\n // Apply actions\n auto actionIterator = actions.begin();\n\n sf::Vector2f orientationVector = {\n cosf(orientation*PI/180.f),\n sinf(orientation*PI/180.f)\n };\n\n if (settings.canWalk){\n float walk = *(actionIterator++)-0.3f;\n if (punchTimer == 0){\n applyForce(deltaTime, orientationVector * walk * settings.maxSpeed);\n energy -= fabsf(walk) * settings.movementEnergyLoss * deltaTime;\n }\n }\n\n if (settings.canTurn){\n const float turn1 = *(actionIterator++);\n const float turn2 = *(actionIterator++);\n float turn = (turn1 - turn2)*settings.turnFactor;\n orientation += turn*deltaTime;\n energy -= fabsf(turn1 - turn2) * settings.turnRateEnergyLoss * deltaTime;\n }\n\n if (settings.canReproduce){\n const float reproduceWilling = *(actionIterator++);\n if (0.6 < reproduceWilling && 60 < energy && actionCooldown == 0){\n actionCooldown = settings.actionCooldown;\n world->reproduce(*this);\n }\n }\n\n if (settings.canEat){\n const float eatWilling = *(actionIterator++);\n if (0 < inventory.mushrooms && 0.75 < eatWilling && actionCooldown == 0){\n actionCooldown = settings.actionCooldown;\n inventory.mushrooms--;\n energy += world->getConfig().agents.mushroomEnergy;\n }\n }\n\n if (settings.canPlace){\n const float placeWilling = *(actionIterator++);\n if (0 < inventory.mushrooms && 0.7 < placeWilling && actionCooldown == 0) {\n actionCooldown = settings.actionCooldown;\n inventory.mushrooms--;\n sf::Vector2<float> position(getPosition().x+50*orientationVector.x, getPosition().y+50*orientationVector.y);\n std::shared_ptr<Mushroom> mushroom(new Mushroom(world, position, world->getConfig()));\n world->addObject(mushroom);\n }\n }\n\n if (settings.canPunch){\n const float punchWilling = *(actionIterator++);\n if (punchTimer == 0 && 0.6 < punchWilling && actionCooldown == 0){\n punchTimer += deltaTime;\n sprite.setTexture(punchTexture);\n frameIndex = 0;\n sprite.setTextureRect(frame);\n energy -= settings.punchEnergy;\n\n }\n else if (settings.punchTime < punchTimer){\n punchTimer = 0;\n frameIndex = 0;\n frame = sf::IntRect(0, 0, 32, 32);\n sprite.setTexture(walkingTexture);\n actionCooldown = settings.actionCooldown;\n }\n else if (punchTimer != 0) {\n punchTimer += deltaTime;\n }\n }\n\n auto near = quadtree->searchNear(getPosition(), 64);\n for (auto &object : near) {\n if (object.get() != this ){\n sf::FloatRect a(getPosition().x + getBounds().left,\n getPosition().y + getBounds().top,\n getBounds().width - getBounds().left,\n getBounds().height - getBounds().top);\n\n sf::FloatRect b(object->getPosition().x + object->getBounds().left,\n object->getPosition().y + object->getBounds().top,\n object->getBounds().width - object->getBounds().left,\n object->getBounds().height - object->getBounds().top);\n auto diff = getPosition() - object->getPosition();\n float dist = std::sqrt(diff.x*diff.x+diff.y*diff.y);\n\n if (dist < 32)\n setColliding(true);\n\n if (dist < 64){\n auto &type = typeid(*object.get());\n if (type == typeid(Agent)){\n if (punchTimer == deltaTime){\n auto enemy = (Agent*) object.get();\n float initialEnergy = enemy->getEnergy();\n enemy->setEnergy(enemy->getEnergy() - settings.punchDamage);\n if (enemy->getEnergy() < 0){\n printf(\"Agent %s murdered %s stealing %u mushrooms and %f energy.\\n\", name.c_str(),\n enemy->name.c_str(), enemy->inventory.mushrooms, initialEnergy);\n inventory.mushrooms += enemy->inventory.mushrooms;\n enemy->inventory.mushrooms = 0;\n world->addObject(std::make_shared<Skull>(world, enemy->getPosition()));\n energy += initialEnergy;\n murderCount++;\n }\n }\n }\n else if (type == typeid(Mushroom) && inventory.mushrooms < settings.maxMushroomCount){\n world->removeObject(object->getSharedPtr(), false);\n inventory.mushrooms++;\n }\n }\n }\n }\n\n energy = fminf(energy, 100);\n energy -= deltaTime*settings.energyLossRate;\n if (energy <= 0){\n world->removeObject(getSharedPtr(), false);\n }\n\n for (float &mem : memory) {\n mem = (mem*(1.f - settings.memoryReactivity*deltaTime)) + *(actionIterator++)*settings.memoryReactivity*deltaTime;\n }\n\n if (actionIterator != actions.end()){\n throw std::runtime_error(\"All actions values not accessed. At \"\n +std::to_string(actionIterator - actions.begin())+\" of \"+std::to_string(actions.size()));\n }\n}\n\nvoid Agent::draw(sf::RenderWindow *window, float deltaTime) {\n frameTimer += deltaTime;\n if (actions.at(0)*10.f < frameTimer && punchTimer == 0){\n frameIndex = frameIndex % 12 + 1; // Skip the first frame\n frame.top = frameIndex * 32;\n sprite.setTextureRect(frame);\n int o = ((360 + (int) orientation%360 + 315))%360 / 90;\n if (o == 1) o = 3; // To match the order in the image\n else if (o == 3) o = 1;\n\n frame.left = o*32;\n frameTimer = 0;\n }\n else if (settings.punchTime / 5.f < frameTimer){\n frameIndex = (frameIndex + 1) % 5;\n frame.top = frameIndex*32;\n int o = ((360 + (int) orientation%360 + 315))%360 / 90;\n if (o == 1) o = 3; // To match the order in the image\n else if (o == 3) o = 1;\n frame.left = o*32;\n sprite.setTextureRect(frame);\n frameTimer = 0;\n }\n\n sprite.setPosition(getPosition());\n\n auto changePathColor = [&](){\n if (!(world->getConfig().render.showPaths || drawPathNextFrame))\n return;\n const sf::Color col = sprite.getColor();\n for (auto &vert : path){\n vert.color = col;\n }\n };\n\n if (world->getConfig().render.visualizeGeneration && !world->getHistoricalStatistics().empty()){\n unsigned deltaGeneration = world->getHistoricalStatistics().back().highestGeneration\n - world->getHistoricalStatistics().back().lowestGeneration;\n if (deltaGeneration != 0){\n sf::Color c = sprite.getColor();\n c.a = 250 * (float) (generation - world->getHistoricalStatistics().back().lowestGeneration)\n / (float) deltaGeneration + 5;\n sprite.setColor(c);\n changePathColor();\n alreadyRegularColor = false;\n }\n }\n else if (world->getConfig().render.visualizeAge){\n sf::Color c = sprite.getColor();\n c.a = 250.f / (1.f + expf(-(getAge()-200.f)/200.f)) + 5.f;\n sprite.setColor(c);\n changePathColor();\n alreadyRegularColor = false;\n }\n else if (world->getConfig().render.visualizeMushrooms){\n sf::Color c = sprite.getColor();\n c.a = 250.f* (float) inventory.mushrooms/(float) world->getConfig().agents.maxMushroomCount + 5.f;\n sprite.setColor(c);\n changePathColor();\n alreadyRegularColor = false;\n }\n else if (world->getConfig().render.visualizeChildren){\n sf::Color c = sprite.getColor();\n c.a = 250.f / (1.f + expf(-(childCount-10)/3.f)) + 5.f;\n sprite.setColor(c);\n changePathColor();\n alreadyRegularColor = false;\n }\n else if (world->getConfig().render.visualizeMurders){\n sf::Color c = sprite.getColor();\n c.a = 250.f / (1.f + expf(-(murderCount*2-4))) + 5.f;\n sprite.setColor(c);\n changePathColor();\n alreadyRegularColor = false;\n }\n else if (!alreadyRegularColor){\n sf::Color c = sprite.getColor();\n c.a = 255;\n sprite.setColor(c);\n changePathColor();\n alreadyRegularColor = true;\n }\n\n if (world->getConfig().render.visualizeColor){\n sf::RectangleShape c;\n c.setSize(sf::Vector2f(sprite.getLocalBounds().width, sprite.getLocalBounds().height));\n c.setOrigin(sprite.getOrigin());\n c.setPosition(sprite.getPosition());\n c.setFillColor(sprite.getColor());\n c.setScale(3, 3);\n window->draw(c);\n }\n\n window->draw(sprite);\n\n if (world->getConfig().render.showVision){\n window->draw(&lineOfSight.front(), 2*receptors.size(), sf::Lines);\n window->draw(orientationLine, 2, sf::Lines);\n }\n\n if (world->getConfig().render.showPaths || drawPathNextFrame){\n window->draw(&path.front(), path.size(), sf::Lines);\n }\n drawPathNextFrame = false;\n\n WorldObject::draw(window, deltaTime);\n}\n\nconst AgentSettings &Agent::getSettings() const {\n return settings;\n}\n\nMapGenes *Agent::getGenes() const {\n return genes.get();\n}\n\nfloat Agent::getOrientation() const {\n return orientation;\n}\n\nvoid Agent::setOrientation(float orientation) {\n Agent::orientation = orientation;\n}\n\nconst std::vector<float> &Agent::getPercept() const {\n return percept;\n}\n\nvoid Agent::setPercept(const std::vector<float> &percept) {\n Agent::percept = percept;\n}\n\nconst std::vector<float> &Agent::getActions() const {\n return actions;\n}\n\nvoid Agent::setActions(const std::vector<float> &actions) {\n Agent::actions = actions;\n actionUpdates++;\n\n networkRegression();\n}\n\nvoid Agent::clearPath() {\n path.clear();\n}\n\nvoid Agent::queuePathDraw() {\n drawPathNextFrame = true;\n}\n\nvoid Agent::networkRegression() {\n\n // Means\n auto perceptDelta = std::vector<float>(percept.size());\n for (unsigned i = 0; i < percept.size(); i++) {\n perceptDelta[i] = percept[i] - perceptMean[i];\n perceptMean[i] += perceptDelta[i] / actionUpdates;\n }\n\n auto actionsDelta = std::vector<float>(actions.size());\n for (unsigned i = 0; i < actions.size(); i++) {\n actionsDelta[i] = actions[i] - actionsMean[i];\n actionsMean[i] += actionsDelta[i] / actionUpdates;\n }\n\n for (unsigned i = 0; i < percept.size(); i++){\n float dx = perceptDelta[i];\n for (unsigned j = 0; j < actions.size(); j++){\n float dy = actionsDelta[j];\n varX[i][j] += (float(float(actionUpdates-1.f)/ (float) actionUpdates)*dx*dx - varX[i][j])/(float) actionUpdates;\n covXY[i][j] += (float(float(actionUpdates-1.f)/ (float) actionUpdates)*dx*dy - covXY[i][j])/(float) actionUpdates;\n }\n }\n}\n\nstd::vector<float> Agent::getRegressionActions(unsigned id) const {\n auto p = std::vector<float>(covXY[id].size(), 0);\n for (unsigned i = 0; i < actions.size(); i++){\n p[i] = covXY[id][i] / varX[id][i];\n if (p[i] != p[i]){ // Nan-check\n p[i] = 0.f;\n }\n // Increase the strength of the correlations\n p[i] = 2.5f*p[i];\n }\n return p;\n}\n\nstd::vector<float> Agent::getRegressionPercept(unsigned id) const {\n auto p = std::vector<float>(covXY.size(), 0);\n for (unsigned i = 0; i < percept.size(); i++){\n p[i] = covXY[i][id] / varX[i][id];\n if (p[i] != p[i]){ // Nan-check\n p[i] = 0.f;\n }\n }\n return p;\n}\n\n\n\nfloat Agent::getEnergy() const {\n return energy;\n}\n\nvoid Agent::setEnergy(float energy) {\n Agent::energy = energy;\n}\n\nconst std::string &Agent::getName() const {\n return name;\n}\n\nunsigned int Agent::getGeneration() const {\n return generation;\n}\n\nvoid Agent::setGeneration(unsigned int generation) {\n Agent::generation = generation;\n}\n\nunsigned int Agent::getChildCount() const {\n return childCount;\n}\n\nvoid Agent::setChildCount(unsigned int childCount) {\n Agent::childCount = childCount;\n}\n\nunsigned int Agent::getMurderCount() const {\n return murderCount;\n}\n\nvoid Agent::setMurderCount(unsigned int murderCount) {\n Agent::murderCount = murderCount;\n}\n\nunsigned int Agent::getNewBirths() {\n unsigned delta = childCount - oldChildCount;\n oldChildCount = childCount;\n return delta;\n}\n\nunsigned int Agent::getNewMurders() {\n unsigned delta = murderCount - oldMurderCount;\n oldMurderCount = murderCount;\n return delta;\n}\n\nconst Agent::Inventory &Agent::getInventory() const {\n return inventory;\n}\n\nvoid Agent::setInventory(const Agent::Inventory &inventory) {\n Agent::inventory = inventory;\n}\n\nconst Agent::NetworkStatistics &Agent::getNetworkStatistics() const {\n return networkStatistics;\n}\n\nvoid Agent::setNetworkStatistics(const Agent::NetworkStatistics &networkStatistics) {\n Agent::networkStatistics = networkStatistics;\n}\n\nconst std::vector<float> &Agent::getReceptors() const {\n return receptors;\n}\n\nconst std::vector<float> &Agent::getMemory() const {\n return memory;\n}\n\n\n"
},
{
"alpha_fraction": 0.692307710647583,
"alphanum_fraction": 0.7009679079055786,
"avg_line_length": 22.094118118286133,
"blob_id": "e84a9362ae8dbd2d7d05c1ca2906e15daab8e255",
"content_id": "16e452d3bb452b5440062b7e583834beff2be73f",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1963,
"license_type": "permissive",
"max_line_length": 95,
"num_lines": 85,
"path": "/WorldObject.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-21.\n//\n\n#ifndef FAMILYISEVERYTHING_WORLDOBJECT_H\n#define FAMILYISEVERYTHING_WORLDOBJECT_H\n\n#include <SFML/Graphics.hpp>\n\n#include <memory>\n\n\ntemplate<class T>\nclass Quadtree;\nclass World;\n\nclass WorldObject {\npublic:\n explicit WorldObject(std::string type, World *world, sf::Vector2f position, bool collider);\n\n WorldObject(const WorldObject &other);\n const std::string type;\n std::shared_ptr<WorldObject> getSharedPtr();\n\n virtual void update(float deltaTime);\n virtual void update(float deltaTime, sf::Vector2f oldPosition);\n virtual void draw(sf::RenderWindow *window, float deltaTime);\n\n void setQuadtree(Quadtree<float> *quadtree, std::weak_ptr<WorldObject> object);\n Quadtree<float> *getQuadtree();\n\n const sf::Vector2f &getVelocity() const;\n void setVelocity(const sf::Vector2f &velocity);\n\n float getSpeed() const;\n\n float getMass() const;\n void setMass(float accelerationFactor);\n\n float getFriction() const;\n void setFriction(float friction);\n\n void applyForce(float deltaTime, sf::Vector2f force);\n\n const sf::IntRect getWorldBounds() const;\n const sf::FloatRect getWorldBoundsf() const;\n\n const sf::IntRect &getBounds() const;\n void setBounds(const sf::IntRect &bounds);\n\n const sf::Vector2f &getPosition() const;\n void setPosition(const sf::Vector2f &position);\n\n const bool isCollider() const;\n bool isColliding() const;\n void setColliding(bool colliding);\n\n float getAge() const;\n void setAge(float age);\n\n const sf::Color &getColor() const;\n void setColor(const sf::Color &color);\n\nprotected:\n World *world;\n std::weak_ptr<WorldObject> me;\n const bool collider;\n bool colliding;\n float age;\n sf::Color color;\n\n Quadtree<float> *quadtree;\n sf::Vector2f position;\n sf::Vector2f velocity;\n sf::IntRect bounds;\n\n float mass;\n float friction;\n\n\n};\n\n\n\n#endif //FAMILYISEVERYTHING_WORLDOBJECT_H\n"
},
{
"alpha_fraction": 0.626953125,
"alphanum_fraction": 0.643359363079071,
"avg_line_length": 36.64706039428711,
"blob_id": "decb468214d802b69ca0b57bdaf056c736b20a47",
"content_id": "83344915853ac817e7011c441ff192a6af6d42d2",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2560,
"license_type": "permissive",
"max_line_length": 106,
"num_lines": 68,
"path": "/Mushroom.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by axelw on 2018-12-28.\n//\n\n#include \"Mushroom.h\"\n#include \"World.h\"\n\nbool Mushroom::loaded = false;\nsf::Texture Mushroom::texture;\nstd::mt19937 Mushroom::randomEngine = std::mt19937(0); // Seed is set outside class\n\nvoid Mushroom::loadResources() {\n if (!loaded){\n loaded = true;\n texture.loadFromFile(\"resources/Mushroom.png\");\n }\n}\n\nMushroom::Mushroom(World *world, const sf::Vector2f &position, const Config &config)\n: WorldObject(\"Mushroom\", world, position, true), config(config) {\n dist = std::uniform_real_distribution<float>(0, 1);\n loadResources();\n sprite = sf::Sprite(texture);\n sprite.setScale(0.5f, 0.5f);\n sf::FloatRect localBounds = sprite.getLocalBounds();\n WorldObject::setBounds(sf::IntRect(localBounds.left*0.5f, localBounds.top*0.5f,\n localBounds.width*0.5f, localBounds.height*0.5f));\n WorldObject::setColor(sf::Color::Red);\n}\n\nvoid Mushroom::update(float deltaTime) {\n WorldObject::update(deltaTime);\n\n auto popEntry = world->getPopulator().getEntry(\"Mushroom\");\n float rand = dist(randomEngine);\n if (rand < config.world.mushroomReproductionRate*deltaTime && popEntry.count < popEntry.targetCount) {\n auto near = world->getQuadtree().searchNear(position, config.world.mushroomReproductionDistance);\n unsigned count = 0;\n for (auto &e : near){\n if (typeid(*e.get()) == typeid(Mushroom)){\n auto diff = getPosition() - e->getPosition();\n float d = std::sqrt(diff.x*diff.x+diff.y*diff.y);\n if (d < config.world.mushroomReproductionDistance)\n count++;\n }\n }\n\n float delta = std::max(0, int(config.world.mushroomReproductionNearLimit-count))\n /(float) config.world.mushroomReproductionNearLimit;\n\n if (rand < config.world.mushroomReproductionRate*deltaTime*delta){\n auto angleDist = std::uniform_real_distribution<double>(0, 360);\n double angle = angleDist(randomEngine);\n sf::Vector2f pos = position+sf::Vector2f(\n std::sin(angle) * config.world.mushroomReproductionDistance,\n std::cos(angle) * config.world.mushroomReproductionDistance);\n\n auto mushroom = std::make_shared<Mushroom>(world, pos, config);\n world->addObject(mushroom);\n }\n }\n}\n\nvoid Mushroom::draw(sf::RenderWindow *window, float deltaTime) {\n sprite.setPosition(getPosition());\n window->draw(sprite);\n WorldObject::draw(window, deltaTime);\n}\n"
},
{
"alpha_fraction": 0.6970776915550232,
"alphanum_fraction": 0.7049180269241333,
"avg_line_length": 17.95945930480957,
"blob_id": "3a6e15e4892d04bfa4c9d84783c36755a69cd513",
"content_id": "dca241cc16586e2fbf78492d4074166bd5728608",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1403,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 74,
"path": "/OpenCL_Wrapper.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-27.\n//\n\n#ifndef HUNTERGATHERERS_OPENCL_WRAPPER_H\n#define HUNTERGATHERERS_OPENCL_WRAPPER_H\n\n\n#define CL_HPP_TARGET_OPENCL_VERSION 120\n#ifdef __APPLE__\n#include \"OpenCL/opencl.h\"\n#else\n#include <CL/cl.h>\n#endif\n\n\n#include \"WorldObject.h\"\n#include \"Agent.h\"\n\n#include <unordered_map>\n\n\n\nstruct AgentEntry {\n Agent *agent;\n std::vector<float> output;\n\n unsigned outputBandwidth;\n unsigned layerCount;\n\n std::vector<unsigned> layerSizes_Host;\n unsigned maxLayerSize;\n\n cl_mem layerSizes;\n cl_mem layerBiases;\n cl_mem layerWeights;\n\n cl_mem netActivationA;\n cl_mem netActivationB;\n};\n\nclass OpenCL_Wrapper {\npublic:\n explicit OpenCL_Wrapper(std::string deviceToUse);\n virtual ~OpenCL_Wrapper();\n\n void addAgent(Agent *agent);\n\n void removeAgent(Agent *agent);\n void think(std::shared_ptr<Agent> agent, const std::vector<float> &percept);\n\n void clFinishAll();\n\nprivate:\n cl_device_id device_id;\n cl_context context;\n cl_command_queue command_queue;\n cl_uint maxComputeUnits;\n\n cl_kernel perceptronKernel;\n\n std::unordered_map<Agent*, AgentEntry> agentRegister;\n\n const std::string loadFile(std::string filename);\n\n cl_program createAndCompileProgram(const std::string &source);\n\n\n static void responseCallback(cl_event e, cl_int status, void *data);\n\n};\n\n\n#endif //HUNTERGATHERERS_OPENCL_WRAPPER_H\n"
},
{
"alpha_fraction": 0.6910890936851501,
"alphanum_fraction": 0.7089108824729919,
"avg_line_length": 17.035715103149414,
"blob_id": "6bc6f412cdffe7dea6a59c952beab04eaaf228dd",
"content_id": "3cf611f99aa10ee8d8833623dbdd06022c971e38",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 505,
"license_type": "permissive",
"max_line_length": 66,
"num_lines": 28,
"path": "/Heart.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by axelw on 2019-01-05.\n//\n\n#ifndef HUNTERGATHERERS_HEART_H\n#define HUNTERGATHERERS_HEART_H\n\n\n#include \"WorldObject.h\"\n\nclass Heart : public WorldObject {\npublic:\n Heart(World *world, const sf::Vector2f &position);\n static void loadResources();\n\n void update(float deltaTime) override;\n void draw(sf::RenderWindow *window, float deltaTime) override;\n\nprivate:\n sf::Sprite sprite;\n\n static bool loaded;\n static sf::Texture texture;\n\n};\n\n\n#endif //HUNTERGATHERERS_HEART_H\n"
},
{
"alpha_fraction": 0.6094470024108887,
"alphanum_fraction": 0.6223502159118652,
"avg_line_length": 28.324323654174805,
"blob_id": "73e101954b8323456e1d6f4fa739f1429d40bf7c",
"content_id": "701902c7cf6c62b15d2cabe461355db1362e3667",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 4340,
"license_type": "permissive",
"max_line_length": 148,
"num_lines": 148,
"path": "/GUI.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by axelw on 2019-01-06.\n//\n\n#ifndef HUNTERGATHERERS_GUI_H\n#define HUNTERGATHERERS_GUI_H\n\n\n#include <SFML/Graphics/RenderWindow.hpp>\n#include \"Agent.h\"\n#include \"Camera.h\"\n#include \"utils.cpp\"\n#include \"World.h\"\n\nclass GUI {\npublic:\n explicit GUI(Config &config, sf::RenderWindow *window, World *world, Camera *camera);\n void draw(float deltaTime, float timeFactor);\n void selectAgent(std::shared_ptr<Agent> agent);\n bool click(sf::Vector2i pos);\n bool hover(sf::Vector2i pos);\n\n const std::shared_ptr<Agent> &getSelectedAgent() const;\n\nprivate:\n Config& config;\n sf::RenderWindow *window;\n const sf::Vector2i originalWindowSize;\n const World* world;\n Camera* camera;\n const sf::View &view;\n sf::Font font;\n\n struct Toggle {\n Toggle(const std::string& name, bool *value, std::vector<Toggle> subtoggles = std::vector<Toggle>(),\n sf::Color color = sf::Color(200, 200, 200));\n Toggle(const std::string& name, bool *value, Toggle* parent,\n sf::Color color = sf::Color(200, 200, 200));\n void click();\n void set(bool v);\n void update();\n sf::Text text;\n bool* value;\n sf::Color color;\n std::vector<Toggle> subToggles;\n bool exclusiveSubs = false;\n Toggle* parent;\n bool hovered;\n };\n\n struct SimulationInfo {\n sf::Text main;\n std::vector<sf::RectangleShape> populationDistribution;\n std::vector<Toggle> debug;\n } simulationInfo;\n\n struct LineGraph {\n void update(const World *world);\n void draw(sf::RenderWindow *window, sf::Vector2f orgSize);\n\n std::string name;\n sf::Color color;\n bool* shouldRender;\n unsigned yPixelOffset = 0;\n sf::Text valueText;\n unsigned maxPoints = 20;\n sf::VertexArray verts;\n sf::Vector2f min = {std::numeric_limits<float>::max(), std::numeric_limits<float>::max()};\n sf::Vector2f max = {std::numeric_limits<float>::min(), std::numeric_limits<float>::min()};\n unsigned lastUpdateFrame = 0;\n };\n\n\n std::vector<LineGraph> lineGraphs;\n\n struct Spectrogram {\n void update(const World *world);\n void draw(sf::RenderWindow *window, const sf::Vector2f orgSize);\n\n std::string name;\n bool* shouldRender;\n float stride = 1;\n unsigned markerWidth = 5;\n sf::Vector2u currentSize;\n unsigned startHeight = 20;\n std::size_t downsamplingTriggerH = 400;\n std::size_t downsamplingTriggerW = 2000;\n\n std::vector<std::vector<WorldStatistics::ColorValue>> newValues;\n unsigned perRow = 1;\n unsigned perColumn = 1; unsigned columnCounter = 0;\n std::vector<sf::Color> colorColumn;\n std::vector<unsigned> colorColumnCount;\n\n Contiguous2dVector<sf::Color> spectrogram;\n float minVal = std::numeric_limits<float>::max();\n float maxVal = std::numeric_limits<float>::min();\n unsigned lastUpdateFrame = 0;\n };\n\n std::vector<Spectrogram> spectrograms;\n\n struct VectorRenderer {\n std::size_t hover(sf::Vector2i pos);\n void draw(sf::RenderWindow *window, const std::vector<float> &vec, std::size_t selectedIndex = std::numeric_limits<std::size_t>::max());\n void drawCorr(sf::RenderWindow *window, const std::vector<float> &vec, std::size_t selectedIndex = std::numeric_limits<std::size_t>::max());\n\n sf::Rect<float> bounds;\n std::vector<sf::RectangleShape> rectangles;\n };\n\n std::shared_ptr<Agent> selectedAgent;\n\n struct agentInfo {\n sf::Text agentIdentifier;\n sf::Text energyText;\n sf::RectangleShape energyBackground;\n sf::RectangleShape energyBar;\n\n sf::Text perceptText;\n VectorRenderer perceptVector;\n sf::Text actionsText;\n VectorRenderer actionVector;\n\n sf::Text infoText;\n\n std::vector<std::string> perceptLabels;\n std::vector<std::string> actionLabels;\n } agentInfo;\n\n enum AgentVectors {\n VECTOR_NONE,\n VECTOR_PERCEPT,\n VECTOR_ACTIONS\n };\n\n std::pair<AgentVectors, std::size_t> selectedInput;\n\n struct Tooltip {\n bool active = false;\n sf::Vector2i pos;\n std::string text;\n } tooltip;\n\n};\n\n\n#endif //HUNTERGATHERERS_GUI_H\n"
},
{
"alpha_fraction": 0.620164155960083,
"alphanum_fraction": 0.6436107754707336,
"avg_line_length": 21.394737243652344,
"blob_id": "3e6b6bc8393670e1493b9e8df00a0c2dda4b8f82",
"content_id": "69f9fdce42c1dd98c51cefee3e555267bccab88d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 853,
"license_type": "permissive",
"max_line_length": 61,
"num_lines": 38,
"path": "/Heart.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by axelw on 2019-01-05.\n//\n\n#include \"Heart.h\"\n#include \"World.h\"\n\nbool Heart::loaded = false;\nsf::Texture Heart::texture;\n\nvoid Heart::loadResources() {\n if (!loaded){\n loaded = true;\n texture.loadFromFile(\"resources/Heart.png\");\n }\n}\n\nHeart::Heart(World *world, const sf::Vector2f &position)\n: WorldObject(\"Heart\", world, position, false) {\n loadResources();\n sprite = sf::Sprite(texture);\n sprite.setOrigin(19, 36);\n setMass(0.5);\n setVelocity(sf::Vector2f(0, -40));\n}\n\nvoid Heart::update(float deltaTime) {\n WorldObject::update(deltaTime);\n if (2 < getAge()){\n world->removeObject(getSharedPtr(), false);\n }\n}\n\nvoid Heart::draw(sf::RenderWindow *window, float deltaTime) {\n sprite.setPosition(getPosition());\n window->draw(sprite);\n WorldObject::draw(window, deltaTime);\n}\n\n\n"
},
{
"alpha_fraction": 0.6516556143760681,
"alphanum_fraction": 0.6688741445541382,
"avg_line_length": 17.875,
"blob_id": "1fd580b33f9cd99ae00fc381c7581b9402f1d44a",
"content_id": "fc322653afbb25c07e50424d36dd9fd8e29794ba",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 755,
"license_type": "permissive",
"max_line_length": 55,
"num_lines": 40,
"path": "/Populator.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by axelw on 2018-12-26.\n//\n\n#ifndef HUNTERGATHERERS_POPULATOR_H\n#define HUNTERGATHERERS_POPULATOR_H\n\n#include <string>\n#include <map>\n#include <random>\n#include \"Config.h\"\n\nclass World;\n\nclass Populator {\npublic:\n explicit Populator(World *world);\n\n struct Entry {\n std::string type;\n unsigned count;\n unsigned targetCount;\n float rate;\n bool enabled;\n };\n\n void populate(float deltaT);\n const Entry& getEntry(std::string type) const;\n void changeCount(std::string type, int deltaCount);\n void entryEnabled(std::string type, bool enabled);\n\nprivate:\n std::mt19937 randomEngine;\n\n std::map<std::string, Entry>& entries;\n World *world;\n};\n\n\n#endif //HUNTERGATHERERS_POPULATOR_H\n"
},
{
"alpha_fraction": 0.6465277075767517,
"alphanum_fraction": 0.6581206321716309,
"avg_line_length": 31.86080551147461,
"blob_id": "32e4e1beb81029e50237186f41e3ffdc4267d99d",
"content_id": "881296c35c880388619044102e95d3c17c1105ac",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 8971,
"license_type": "permissive",
"max_line_length": 160,
"num_lines": 273,
"path": "/World.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-23.\n//\n\n#include \"World.h\"\n#include \"BouncingBall.h\"\n#include \"Mushroom.h\"\n#include \"Heart.h\"\n\n#include \"PerlinNoise/PerlinNoise.hpp\"\n\nWorld::World(Config &config, sf::RenderWindow *window, OpenCL_Wrapper *openCL_wrapper) :\nworldTime(0), config(config), window(window), dimensions(config.world.dimensions), openCL_wrapper(openCL_wrapper),\npopulator(this), quadtree(Quadtree<float>(sf::Vector2<float>(0, 0), dimensions)),\nhistoryFrequency(3) {\n randomEngine = std::mt19937(config.seed++);\n quadtree.setLimit(config.world.quadtreeLimit);\n generateTerrain();\n\n agentSpawning = true;\n}\n\nvoid World::generateTerrain() {\n float f = 10;\n siv::PerlinNoise perlinNoise1;\n siv::PerlinNoise perlinNoise2;\n perlinNoise1.reseed(config.seed++);\n perlinNoise2.reseed(config.seed++);\n sf::Image background;\n background.create(config.world.terrainSquare, config.world.terrainSquare, sf::Color::Black);\n for (unsigned x = 0; x < background.getSize().x; x++){\n for (unsigned y = 0; y < background.getSize().y; y++){\n double n1 = perlinNoise1.octaveNoise((float) x / (float) background.getSize().x * f, (float) y / (float) background.getSize().y * f, 20) + 1;\n double n2 = perlinNoise2.octaveNoise((float) x / (float) background.getSize().x * f * 0.3, (float) y / (float) background.getSize().y * f * 0.3, 2);\n background.setPixel(x, y, sf::Color( 20 + n1*2, 30 + n1 * 15, 20 + n1*2));\n if (n2 < -0.1){\n background.setPixel(x, y, sf::Color(10 + n2*2, 14 + n2*3, 10 + n2*2));\n }\n }\n }\n\n terrainTexture.loadFromImage(background);\n terrain = sf::Sprite(terrainTexture);\n terrain.setScale(dimensions.x / config.world.terrainSquare, dimensions.y / config.world.terrainSquare);\n}\n\nvoid World::update(float deltaTime) {\n worldTime += deltaTime;\n openCL_wrapper->clFinishAll(); // More optimized to have this here?\n\n populator.entryEnabled(\"Agent\", agentSpawning);\n\n populator.populate(deltaTime);\n\n\n // World updates\n for (auto &object : objects) {\n object->update(deltaTime);\n }\n\n performDeletions();\n\n // AI updates\n for (auto &agent : agents) {\n agent->updatePercept(deltaTime);\n openCL_wrapper->think(agent, agent->getPercept());\n }\n\n updateStatistics();\n\n\n}\n\nvoid World::draw(float deltaTime) {\n if (!config.render.renderOnlyAgents){\n window->draw(terrain);\n }\n\n if (!config.render.renderOnlyAgents){\n for (auto &object : objects) {\n object->draw(window, deltaTime);\n }\n }\n else {\n for (auto &agent : agents){\n agent->draw(window, deltaTime);\n }\n }\n\n if (config.render.showQuadtree){\n quadtree.draw(window, config.render.showQuadtreeEntities);\n }\n}\n\nbool World::addObject(std::shared_ptr<WorldObject> worldObject) {\n\n if (worldObject->isCollider()){\n if (!quadtree.add(worldObject)){\n return false;\n }\n }\n\n worldObject->setQuadtree(&quadtree, worldObject);\n objects.insert(worldObject);\n\n if (typeid(*worldObject.get()) == typeid(Agent)){\n agents.insert(std::dynamic_pointer_cast<Agent>(worldObject));\n openCL_wrapper->addAgent((Agent*) worldObject.get());\n }\n\n populator.changeCount(worldObject->type, 1);\n\n return true;\n\n}\n\nbool World::removeObject(std::shared_ptr<WorldObject> worldObject, bool performImmediately) {\n if (!performImmediately){\n deletionList.push_back(worldObject);\n return true;\n }\n bool success = worldObject->isCollider() ? quadtree.remove(worldObject.get()) : true;\n if (success){\n if (typeid(*worldObject.get()) == typeid(Agent)){\n openCL_wrapper->removeAgent((Agent*) worldObject.get());\n agents.erase(std::dynamic_pointer_cast<Agent>(worldObject));\n }\n\n objects.erase(worldObject);\n populator.changeCount(worldObject->type, -1);\n return true;\n }\n return false;\n}\n\nvoid World::performDeletions() {\n for (auto &worldObject : deletionList) {\n removeObject(worldObject, true);\n }\n\n deletionList.clear();\n}\n\nconst sf::RenderWindow *World::getWindow() const {\n return window;\n}\n\nconst sf::Vector2f &World::getDimensions() const {\n return dimensions;\n}\n\nconst Quadtree<float> &World::getQuadtree() const {\n return quadtree;\n}\n\nOpenCL_Wrapper *World::getOpenCL_wrapper() const {\n return openCL_wrapper;\n}\n\nbool World::spawn(std::string type) {\n std::uniform_real_distribution<float> spawnX(25, dimensions.x-50);\n std::uniform_real_distribution<float> spawnY(25, dimensions.y-50);\n\n\n if (type == \"Agent\"){\n sf::Vector2<float> position(spawnX(randomEngine), spawnY(randomEngine));\n float orientation = std::uniform_real_distribution<float>(0, 360)(randomEngine);\n auto agent = std::make_shared<Agent>(config.agents, this, position, orientation);\n agent->setVelocity(sf::Vector2f(0, 0));\n return addObject(agent);\n }\n else if (type == \"Mushroom\"){\n sf::Vector2<float> position(spawnX(randomEngine), spawnY(randomEngine));\n std::shared_ptr<Mushroom> w(new Mushroom(this, position, config));\n return addObject(w);\n }\n else if (type == \"BouncingBall\"){\n sf::Vector2<float> position(spawnX(randomEngine), spawnY(randomEngine));\n std::shared_ptr<BouncingBall> w(new BouncingBall(this, position, 10.f));\n std::uniform_real_distribution<float> velocity(-30, 30);\n w->setVelocity({velocity(randomEngine), velocity(randomEngine)});\n return addObject(w);\n }\n return false;\n}\n\nvoid World::reproduce(Agent &a) {\n auto agent = std::make_shared<Agent>(a, config.agents.mutation);\n agent->setGeneration(agent->getGeneration()+1);\n agent->setQuadtree(&quadtree, agent);\n\n agent->setOrientation(std::uniform_real_distribution<float>(0, 360)(randomEngine));\n float totalEnergy = agent->getEnergy();\n a.setEnergy(totalEnergy*config.agents.energyToParent);\n agent->setEnergy(totalEnergy*config.agents.energyToChild);\n a.setChildCount(a.getChildCount()+1);\n addObject(agent);\n addObject(std::make_shared<Heart>(this, a.getPosition()));\n printf(\"Reproduced to gen %u : %s -> %s\\n\", agent->getGeneration(), a.getName().c_str(), agent->getName().c_str());\n}\n\nConfig & World::getConfig() {\n return config;\n}\n\nconst std::set<std::shared_ptr<Agent>> &World::getAgents() const {\n return agents;\n}\n\nconst std::set<std::shared_ptr<WorldObject>> &World::getObjects() const {\n return objects;\n}\n\nvoid World::updateStatistics() {\n WorldStatistics statistics;\n if (!historicalStatistics.empty())\n if (worldTime - historicalStatistics.back().timestamp <= historyFrequency)\n return; // Not time yet\n\n statistics.timestamp = worldTime;\n statistics.populationCount = agents.size();\n statistics.mushroomCount = populator.getEntry(\"Mushroom\").count;\n\n statistics.generation.reserve(agents.size());\n statistics.perceptrons.reserve(agents.size());\n statistics.age.reserve(agents.size());\n statistics.children.reserve(agents.size());\n statistics.murders.reserve(agents.size());\n statistics.energy.reserve(agents.size());\n statistics.mushrooms.reserve(agents.size());\n statistics.speed.reserve(agents.size());\n\n statistics.lowestGeneration = std::numeric_limits<unsigned>::max();\n statistics.highestGeneration = 0;\n for (auto& agent : agents){\n unsigned g = agent->getGeneration();\n\n if (g < statistics.lowestGeneration){\n statistics.lowestGeneration = g;\n }\n if (statistics.highestGeneration < g){\n statistics.highestGeneration = g;\n }\n\n statistics.generation.push_back({agent->getColor(), (float) agent->getGeneration()});\n statistics.perceptrons.push_back({agent->getColor(), (float) agent->getNetworkStatistics().perceptronCount});\n statistics.age.push_back({agent->getColor(), agent->getAge()});\n statistics.children.push_back({agent->getColor(), (float) agent->getChildCount()});\n statistics.murders.push_back({agent->getColor(), (float) agent->getMurderCount()});\n statistics.energy.push_back({agent->getColor(), std::max(0.f, agent->getEnergy())});\n statistics.mushrooms.push_back({agent->getColor(), (float) agent->getInventory().mushrooms});\n if (agent->getSpeed() < 400)\n statistics.speed.push_back({agent->getColor(), agent->getSpeed()});\n }\n historicalStatistics.push_back(statistics);\n}\n\nconst std::deque<WorldStatistics> &World::getHistoricalStatistics() const {\n return historicalStatistics;\n}\n\nvoid World::clearStatistics() {\n printf(\"Clearing all statistics\\n\");\n historicalStatistics.clear();\n}\n\nconst float World::getHistoryFrequency() const {\n return historyFrequency;\n}\n\nconst Populator &World::getPopulator() const {\n return populator;\n}\n"
},
{
"alpha_fraction": 0.5740740895271301,
"alphanum_fraction": 0.5833333134651184,
"avg_line_length": 27.528301239013672,
"blob_id": "394f8293fdc2bd4a2687c930dc8d6dc52059b9fd",
"content_id": "7c230ef1bc7fd3232e8b8217f2d78e0d5842d063",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1512,
"license_type": "permissive",
"max_line_length": 103,
"num_lines": 53,
"path": "/Populator.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by axelw on 2018-12-26.\n//\n\n#include <ctime>\n#include \"World.h\"\n#include \"Populator.h\"\n\n\nPopulator::Populator(World *world) : world(world), entries(world->getConfig().world.populatorEntries) {\n randomEngine = std::mt19937(world->getConfig().seed++);\n}\n\nvoid Populator::populate(float deltaT) {\n for (auto &entry : entries) {\n if (entry.second.count < entry.second.targetCount && entry.second.enabled){\n auto d = std::poisson_distribution(entry.second.rate*deltaT);\n int newCount = d(randomEngine);\n for (unsigned i = 0; i < newCount; i++){\n if (!world->spawn(entry.first)){\n fprintf(stderr, \"Could not spawn: \\\"%s\\\"\\n\", entry.first.c_str());\n }\n if (entry.second.targetCount <= entry.second.count ){\n break;\n }\n }\n }\n }\n}\n\n\nconst Populator::Entry &Populator::getEntry(std::string type) const {\n auto itr = entries.find(type);\n if (itr != entries.end()){\n return entries.at(type);\n }\n\n throw std::runtime_error(\"Couldn't find populator entry \"+type);\n}\n\nvoid Populator::changeCount(std::string type, int deltaCount) {\n auto itr = entries.find(type);\n if (itr != entries.end()){\n entries.at(type).count += deltaCount;\n }\n}\n\nvoid Populator::entryEnabled(std::string type, bool enabled) {\n auto itr = entries.find(type);\n if (itr != entries.end()){\n itr->second.enabled = enabled;\n }\n}\n"
},
{
"alpha_fraction": 0.5162056088447571,
"alphanum_fraction": 0.5308743715286255,
"avg_line_length": 40.42443084716797,
"blob_id": "a18479173a1e9838afbf9eefc0b1cd6b8cd8cf7c",
"content_id": "a043fcc5e1e61eaf8b616b17d5424771a9f8bcfd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 40017,
"license_type": "permissive",
"max_line_length": 148,
"num_lines": 966,
"path": "/GUI.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by axelw on 2019-01-06.\n//\n\n\n#include <sstream>\n#include <utility>\n#include <random>\n\n#include \"GUI.h\"\n#include \"World.h\"\n#include \"Camera.h\"\n\nGUI::GUI(Config &config, sf::RenderWindow *window, World *world, Camera *camera)\n : config(config), window(window), originalWindowSize(window->getSize()), view(window->getDefaultView()),\n world(world), camera(camera) {\n#ifdef WIN32\n font.loadFromFile(R\"(C:\\Windows\\Fonts\\consola.ttf)\");\n#elif defined(linux) || defined(__linux)\n\tfont.loadFromFile(\"/usr/share/fonts/truetype/dejavu/DejaVuSans.ttf\");\n#endif\n sf::Color gray(120, 120, 120);\n\n simulationInfo.main = sf::Text(std::string(\"FPS: ###\\nTime factor: ##\\nPopulation: ###\\nMushrooms: ###\\n\"), font);\n simulationInfo.main.setCharacterSize(20);\n simulationInfo.main.setStyle(sf::Text::Regular);\n simulationInfo.main.setFillColor(gray);\n simulationInfo.main.setPosition(10, window->getSize().y-simulationInfo.main.getLocalBounds().height-5);\n\n\n simulationInfo.debug = {\n Toggle(\"agentSpawning\", &world->agentSpawning),\n Toggle(\"reloadConfig\", &config.shouldReload),\n Toggle(\"showWorldObjectBounds\", &config.render.showWorldObjectBounds),\n Toggle(\"showQuadtree\", &config.render.showQuadtree),\n Toggle(\"showQuadtreeEntities\", &config.render.showQuadtreeEntities),\n Toggle(\"showVision\", &config.render.showVision),\n Toggle(\"showPaths\", &config.render.showPaths),\n Toggle(\"graphLine\", &config.render.graphLine),\n Toggle(\"graphSpectrogram\", &config.render.graphSpectrogram),\n Toggle(\"renderOnlyAgents\", &config.render.renderOnlyAgents),\n Toggle(\"visualizeGeneration\", &config.render.visualizeGeneration),\n Toggle(\"visualizeAge\", &config.render.visualizeAge),\n Toggle(\"visualizeChildren\", &config.render.visualizeChildren),\n Toggle(\"visualizeMurders\", &config.render.visualizeMurders),\n Toggle(\"visualizeMushrooms\", &config.render.visualizeMushrooms),\n Toggle(\"showSquare\", &config.render.visualizeColor)\n };\n\n const std::vector<std::array<int, 3>> subColors = {\n {133, 92, 117},\n {217, 175, 107},\n {175, 100, 88},\n {115, 111, 76},\n {82, 106, 131},\n {98, 83, 119},\n {104, 133, 92},\n {156, 156, 94},\n {160, 97, 119},\n {140, 120, 93},\n {70, 115, 120},\n {124, 124, 124}\n };\n\n for (unsigned i = 0; i < simulationInfo.debug.size(); i++){\n auto &toggle = simulationInfo.debug.at(i);\n toggle.text.setFont(font);\n toggle.text.setPosition(window->getSize().x-300, 10+i*25);\n\n if (toggle.text.getString().toAnsiString() == \"graphLine\"){\n toggle.subToggles = {\n Toggle(\"population\", &config.render.graphPopulation, &toggle),\n Toggle(\"mean gen.\", &config.render.graphMeanGeneration, &toggle),\n Toggle(\"mean perceptrons\", &config.render.graphMeanPerceptrons, &toggle),\n Toggle(\"mean age\", &config.render.graphMeanAge, &toggle),\n Toggle(\"mean children\", &config.render.graphMeanChildren, &toggle),\n Toggle(\"mean murders\", &config.render.graphMeanMurders, &toggle),\n Toggle(\"mean energy\", &config.render.graphMeanEnergy, &toggle),\n Toggle(\"mean mushrooms\", &config.render.graphMeanMushrooms, &toggle),\n Toggle(\"mean speed\", &config.render.graphMeanSpeed, &toggle)\n };\n\n for (std::size_t j = 0; j < toggle.subToggles.size(); j++){\n toggle.subToggles.at(j).text.setPosition(\n window->getSize().x-420,\n toggle.text.getPosition().y+j*12\n );\n toggle.subToggles.at(j).text.setCharacterSize(15);\n toggle.subToggles.at(j).text.setFont(font);\n auto col = subColors.at(j);\n toggle.subToggles.at(j).color = sf::Color(col[0], col[1], col[2]);\n toggle.subToggles.at(j).update();\n lineGraphs.push_back((LineGraph) {\n .name = toggle.subToggles.at(j).text.getString().toAnsiString(),\n .color = sf::Color(col[0], col[1], col[2]),\n .shouldRender = toggle.subToggles.at(j).value,\n .yPixelOffset = (unsigned) j,\n .valueText = sf::Text(\"###\", font, 12)\n });\n lineGraphs.back().valueText.setFillColor(sf::Color(col[0], col[1], col[2]));\n }\n }\n else if (toggle.text.getString().toAnsiString() == \"graphSpectrogram\"){\n toggle.subToggles = {\n Toggle(\"generation\", &config.render.graphGeneration, &toggle),\n Toggle(\"perceptrons\", &config.render.graphPerceptrons, &toggle),\n Toggle(\"age\", &config.render.graphAge, &toggle),\n Toggle(\"children\", &config.render.graphChildren, &toggle),\n Toggle(\"murders\", &config.render.graphMurders, &toggle),\n Toggle(\"energy\", &config.render.graphEnergy, &toggle),\n Toggle(\"mushrooms\", &config.render.graphMushrooms, &toggle)\n };\n\n auto blank = sf::Color(0, 0, 0, 0);\n spectrograms = {\n (Spectrogram) {.name = \"generation\", .shouldRender=&config.render.graphGeneration,\n .stride=2, .markerWidth=1, .startHeight=10, .spectrogram=Contiguous2dVector(blank)},\n (Spectrogram) {.name = \"perceptrons\", .shouldRender=&config.render.graphPerceptrons,\n .stride=5, .markerWidth=8, .startHeight=80, .spectrogram=Contiguous2dVector(blank)},\n (Spectrogram) {.name = \"age\", .shouldRender=&config.render.graphAge,\n .stride=0.1f, .markerWidth=5, .startHeight=100, .spectrogram=Contiguous2dVector(blank)},\n (Spectrogram) {.name = \"children\", .shouldRender=&config.render.graphChildren,\n .stride=1, .markerWidth=1, .startHeight=20, .spectrogram=Contiguous2dVector(blank)},\n (Spectrogram) {.name = \"murders\", .shouldRender=&config.render.graphMurders,\n .stride=1, .markerWidth=1, .startHeight=20, .spectrogram=Contiguous2dVector(blank)},\n (Spectrogram) {.name = \"energy\", .shouldRender=&config.render.graphEnergy,\n .stride=0.5, .markerWidth=4, .startHeight=100, .spectrogram=Contiguous2dVector(blank)},\n (Spectrogram) {.name = \"mushrooms\", .shouldRender=&config.render.graphMushrooms,\n .stride=1, .markerWidth=3, .startHeight=20, .spectrogram=Contiguous2dVector(blank)}\n };\n\n for (std::size_t j = 0; j < toggle.subToggles.size(); j++){\n toggle.subToggles.at(j).text.setPosition(\n window->getSize().x-420,\n toggle.text.getPosition().y+j*12\n );\n toggle.subToggles.at(j).text.setCharacterSize(15);\n toggle.subToggles.at(j).text.setFont(font);\n toggle.subToggles.at(j).update();\n toggle.exclusiveSubs = true;\n }\n }\n }\n\n sf::Rect<int> distributionBounds(450, window->getSize().y-10, 800, 90);\n simulationInfo.populationDistribution.resize(config.render.bins);\n int binWidth = distributionBounds.width / simulationInfo.populationDistribution.size();\n for (std::size_t i = 0; i < simulationInfo.populationDistribution.size(); i++){\n simulationInfo.populationDistribution.at(i).setFillColor(gray);\n simulationInfo.populationDistribution.at(i).setPosition(distributionBounds.left + i*binWidth, distributionBounds.top);\n simulationInfo.populationDistribution.at(i).setSize(sf::Vector2f(binWidth, distributionBounds.height));\n simulationInfo.populationDistribution.at(i).setOrigin(simulationInfo.populationDistribution.at(i).getSize());\n }\n\n // Agent info\n agentInfo.agentIdentifier = sf::Text(\"\", font);\n agentInfo.agentIdentifier.setCharacterSize(40);\n agentInfo.agentIdentifier.setStyle(sf::Text::Bold);\n agentInfo.agentIdentifier.setFillColor(gray);\n agentInfo.agentIdentifier.setPosition(10, 10);\n\n agentInfo.energyText = sf::Text(\"Energy:\", font);\n agentInfo.energyText.setCharacterSize(20);\n agentInfo.agentIdentifier.setStyle(sf::Text::Regular);\n agentInfo.energyText.setFillColor(gray);\n agentInfo.energyText.setPosition(10, 60);\n\n agentInfo.energyBackground = sf::RectangleShape(sf::Vector2f(300, 25));\n agentInfo.energyBackground.setPosition(10, 90);\n agentInfo.energyBackground.setFillColor(sf::Color(50, 50, 50));\n\n agentInfo.energyBar = sf::RectangleShape(sf::Vector2f(300, 25));\n agentInfo.energyBar.setPosition(10, 90);\n agentInfo.energyBar.setFillColor(sf::Color(120, 120, 120));\n\n agentInfo.perceptText = agentInfo.energyText;\n agentInfo.perceptText.setString(\"Percept:\");\n agentInfo.perceptText.setPosition(10, 125);\n agentInfo.perceptVector.bounds = sf::Rect<float>(10, 155, 300, 25);\n\n agentInfo.actionsText = agentInfo.energyText;\n agentInfo.actionsText.setString(\"Actions:\");\n agentInfo.actionsText.setPosition(10, 185);\n agentInfo.actionVector.bounds = sf::Rect<float>(10, 215, 300, 25);\n\n agentInfo.infoText = agentInfo.energyText;\n agentInfo.infoText.setString(\"Network layers ##\\nPerceptron count: ##\\nAge: ###\\nGeneration: ###\\nChildren: ###\\nMurders: ##\\nMushrooms: ##\\n\");\n agentInfo.infoText.setPosition(10, 245);\n\n // Correlation stuff\n selectedInput = {VECTOR_NONE, 0};\n\n}\n\nvoid GUI::draw(float deltaTime, float timeFactor) {\n if (!config.render.showInterface)\n return;\n // UI camera-view\n auto cameraView = window->getView();\n window->setView(view);\n\n { // Draw current world statistics\n simulationInfo.main.setString(\"FPS: \" + std::to_string(int(1.f / deltaTime))\n + \"\\nTime factor: \" + std::to_string(int(timeFactor))\n + \"\\nPopulation: \" + std::to_string(world->getAgents().size())\n + \"\\nMushrooms: \" + std::to_string(world->getHistoricalStatistics().back().mushroomCount)\n +\"\\n\");\n\n window->draw(simulationInfo.main);\n }\n\n if (config.render.graphLine){\n for (auto &lg : lineGraphs){\n if (!*lg.shouldRender) continue;\n lg.update(world);\n lg.draw(window, (sf::Vector2f) originalWindowSize);\n }\n }\n\n // Draw spectrogram\n if (config.render.graphSpectrogram){\n for (auto &sp : spectrograms){\n if (!*sp.shouldRender) continue;\n sp.update(world);\n sp.draw(window, (sf::Vector2f) originalWindowSize);\n }\n }\n\n // Draw debug info\n for (auto& t : simulationInfo.debug){\n t.update();\n window->draw(t.text);\n\n if (t.hovered){\n for (auto &sub : t.subToggles){\n window->draw(sub.text);\n }\n }\n }\n\n\n // Draw information about the selected agent (if there is one)\n {\n if (selectedAgent) {\n selectedAgent->queuePathDraw();\n window->draw(agentInfo.agentIdentifier);\n\n window->draw(agentInfo.energyText);\n agentInfo.energyBar.setScale(selectedAgent->getEnergy() / selectedAgent->getSettings().maxEnergy, 1);\n window->draw(agentInfo.energyBackground);\n window->draw(agentInfo.energyBar);\n\n if (selectedInput.first != VECTOR_NONE) {\n\n if (selectedInput.first == VECTOR_PERCEPT) {\n auto correlation = selectedAgent->getRegressionActions(selectedInput.second);\n\n window->draw(agentInfo.perceptText);\n agentInfo.perceptVector.draw(window, selectedAgent->getPercept(), selectedInput.second);\n\n window->draw(agentInfo.actionsText);\n agentInfo.actionVector.drawCorr(window, correlation);\n } else {\n auto correlation = selectedAgent->getRegressionPercept(selectedInput.second);\n\n window->draw(agentInfo.perceptText);\n agentInfo.perceptVector.drawCorr(window, correlation);\n\n window->draw(agentInfo.actionsText);\n agentInfo.actionVector.draw(window, selectedAgent->getActions(), selectedInput.second);\n }\n\n // http://ci.columbia.edu/ci/premba_test/c0331/s7/s7_5.html\n } else {\n window->draw(agentInfo.perceptText);\n agentInfo.perceptVector.draw(window, selectedAgent->getPercept());\n\n window->draw(agentInfo.actionsText);\n agentInfo.actionVector.draw(window, selectedAgent->getActions());\n }\n\n agentInfo.infoText.setString(\n \"Network layers: \" + std::to_string(selectedAgent->getNetworkStatistics().layers)\n + \"\\nPerceptron count: \" + std::to_string(selectedAgent->getNetworkStatistics().perceptronCount)\n + \"\\nAge: \" + std::to_string(selectedAgent->getAge())\n + \"\\nGeneration: \" + std::to_string(selectedAgent->getGeneration())\n + \"\\nChildren: \" + std::to_string(selectedAgent->getChildCount())\n + \"\\nMurders: \" + std::to_string(selectedAgent->getMurderCount())\n + \"\\nMushrooms: \" + std::to_string(selectedAgent->getInventory().mushrooms));\n\n window->draw(agentInfo.infoText);\n }\n }\n\n // Draw tooltip\n if (tooltip.active){\n sf::Text text(tooltip.text, font, 10);\n text.setPosition((sf::Vector2f) tooltip.pos + sf::Vector2f(0, 20));\n window->draw(text);\n }\n\n // Reset camera view\n window->setView(cameraView);\n}\n\nvoid GUI::selectAgent(std::shared_ptr<Agent> agent) {\n if (agent == nullptr){\n selectedAgent.reset();\n }\n else {\n selectedAgent = agent;\n agentInfo.agentIdentifier.setFillColor(agent->getColor());\n agentInfo.agentIdentifier.setString(agent->getName());\n\n auto settings = agent->getSettings();\n\n // Apply percept labels\n agentInfo.perceptLabels = std::vector<std::string>();\n if (settings.perceiveCollision) agentInfo.perceptLabels.emplace_back(\"colliding\");\n for (std::size_t i = 0; i < agent->getReceptors().size(); i++)\n agentInfo.perceptLabels.emplace_back(\"receptor \" + std::to_string(i));\n if (settings.perceiveColor){\n agentInfo.perceptLabels.emplace_back(\"receptors red\");\n agentInfo.perceptLabels.emplace_back(\"receptors green\");\n agentInfo.perceptLabels.emplace_back(\"receptors blue\");\n }\n if (settings.perceiveEnergyLevel) agentInfo.perceptLabels.emplace_back(\"energy\");\n if (settings.perceiveMushroomCount) agentInfo.perceptLabels.emplace_back(\"mushrooms\");\n for (std::size_t i = 0; i < agent->getMemory().size(); i++)\n agentInfo.perceptLabels.emplace_back(\"memory \"+std::to_string(i));\n\n // Apply action labels\n agentInfo.actionLabels = std::vector<std::string>();\n if (settings.canWalk) agentInfo.actionLabels.emplace_back(\"walk\");\n if (settings.canTurn) agentInfo.actionLabels.emplace_back(\"turn right\");\n if (settings.canTurn) agentInfo.actionLabels.emplace_back(\"turn left\");\n if (settings.canReproduce) agentInfo.actionLabels.emplace_back(\"reproduce\");\n if (settings.canEat) agentInfo.actionLabels.emplace_back(\"eat\");\n if (settings.canPlace) agentInfo.actionLabels.emplace_back(\"place mushroom\");\n if (settings.canPunch) agentInfo.actionLabels.emplace_back(\"punch\");\n for (std::size_t i = 0; i < agent->getMemory().size(); i++)\n agentInfo.actionLabels.emplace_back(\"memory \"+std::to_string(i));\n\n }\n\n selectedInput = {VECTOR_NONE, 0};\n}\n\nconst std::shared_ptr<Agent> &GUI::getSelectedAgent() const {\n return selectedAgent;\n}\n\nbool GUI::click(sf::Vector2i pos) {\n pos = sf::Vector2i(((float) pos.x / window->getSize().x) * originalWindowSize.x,\n ((float) pos.y / window->getSize().y) * originalWindowSize.y);\n\n if (config.render.showInterface){\n for (auto& t : simulationInfo.debug){\n if (pointInBox(sf::Vector2f(pos.x, pos.y), t.text.getGlobalBounds())){\n t.click();\n if (t.text.getString().substring(0, 9) == \"visualize\") {\n for (auto& c : simulationInfo.debug){\n if (c.text.getString() != t.text.getString() && c.text.getString().substring(0, 9) == \"visualize\"){\n c.set(false);\n }\n }\n }\n else if (t.text.getString().substring(0, 5) == \"graph\") {\n for (auto& c : simulationInfo.debug){\n if (c.text.getString() != t.text.getString() && c.text.getString().substring(0, 5) == \"graph\"){\n c.set(false);\n c.hovered = false;\n }\n }\n }\n return true;\n }\n if (t.hovered) {\n for (auto &sub : t.subToggles) {\n if (pointInBox(sf::Vector2f(pos.x, pos.y), sub.text.getGlobalBounds())) {\n sub.click();\n return true;\n }\n }\n }\n }\n }\n\n if (selectedAgent){\n if (pointInBox(sf::Vector2f(pos.x, pos.y), agentInfo.agentIdentifier.getGlobalBounds())){\n camera->followAgent(selectedAgent.get());\n return true;\n }\n\n if (pointInBox(sf::Vector2f(pos.x, pos.y), simulationInfo.main.getGlobalBounds())){\n printf(\"Click on info\\n\");\n }\n\n std::size_t perceptVectorPos = agentInfo.perceptVector.hover(pos);\n if (perceptVectorPos != std::numeric_limits<std::size_t>::max()){\n selectedInput = {VECTOR_PERCEPT, perceptVectorPos};\n return true;\n }\n\n std::size_t actionVectorPos = agentInfo.actionVector.hover(pos);\n if (actionVectorPos != std::numeric_limits<std::size_t>::max()){\n selectedInput = {VECTOR_ACTIONS, actionVectorPos};\n return true;\n }\n }\n\n return false;\n}\n\nbool GUI::hover(sf::Vector2i pos) {\n pos = sf::Vector2i(((float) pos.x / window->getSize().x) * originalWindowSize.x,\n ((float) pos.y / window->getSize().y) * originalWindowSize.y);\n\n if (config.render.showInterface){\n std::function<bool(Toggle&)> hoverToggle = [&](Toggle &t) -> bool {\n bool anyHovered = false;\n if (t.hovered){\n for (auto &sub : t.subToggles){\n anyHovered |= hoverToggle(sub);\n }\n }\n sf::FloatRect bounds = t.text.getGlobalBounds();\n if (t.parent != nullptr){\n bounds.width += 40;\n bounds.height += 40;\n }\n if (anyHovered || pointInBox(sf::Vector2f(pos.x, pos.y), bounds)){\n t.hovered = true;\n return true;\n }\n t.hovered = false;\n return false;\n };\n\n for (auto &toggle : simulationInfo.debug){\n bool h = hoverToggle(toggle);\n if (h && toggle.text.getString().substring(0, 5) == \"graph\") {\n for (auto& c : simulationInfo.debug){\n if (c.text.getString() != toggle.text.getString() && c.text.getString().substring(0, 5) == \"graph\"){\n c.hovered = false;\n }\n }\n }\n }\n }\n\n if (selectedAgent){\n std::size_t perceptVectorPos = agentInfo.perceptVector.hover(pos);\n if (perceptVectorPos != std::numeric_limits<std::size_t>::max()){\n tooltip.active = true;\n tooltip.pos = pos;\n tooltip.text = agentInfo.perceptLabels.at(perceptVectorPos);\n return true;\n }\n\n std::size_t actionVectorPos = agentInfo.actionVector.hover(pos);\n if (actionVectorPos!= std::numeric_limits<std::size_t>::max()){\n tooltip.active = true;\n tooltip.pos = pos;\n tooltip.text = agentInfo.actionLabels.at(actionVectorPos);\n return true;\n }\n }\n\n tooltip.active = false;\n return false;\n}\n\nvoid GUI::LineGraph::update(const World *world){\n auto &stats = world->getHistoricalStatistics();\n if (stats.empty())\n return;\n\n if (stats.size() <= verts.getVertexCount()){\n verts.clear();\n lastUpdateFrame = 0;\n }\n\n for (std::size_t i = lastUpdateFrame; i < stats.size(); i++){\n const auto &s = stats[i];\n float x, y;\n x = s.timestamp;\n if (name == \"population\")\n y = s.populationCount;\n else if (name == \"mean gen.\")\n y = (float) std::accumulate(std::begin(s.generation), std::end(s.generation), 0,\n [](float acc, const WorldStatistics::ColorValue& value){ return acc+value.value; })\n / s.populationCount;\n else if (name == \"mean perceptrons\")\n y = (float) std::accumulate(std::begin(s.perceptrons), std::end(s.perceptrons), 0,\n [](float acc, const WorldStatistics::ColorValue& value){ return acc+value.value; })\n / s.populationCount;\n else if (name == \"mean age\")\n y = (float) std::accumulate(std::begin(s.age), std::end(s.age), 0,\n [](float acc, const WorldStatistics::ColorValue& value){ return acc+value.value; })\n / s.populationCount;\n else if (name == \"mean children\")\n y = (float) std::accumulate(std::begin(s.children), std::end(s.children), 0,\n [](float acc, const WorldStatistics::ColorValue& value){ return acc+value.value; })\n / s.populationCount;\n else if (name == \"mean murders\")\n y = (float) std::accumulate(std::begin(s.murders), std::end(s.murders), 0,\n [](float acc, const WorldStatistics::ColorValue& value){ return acc+value.value; })\n / s.populationCount;\n else if (name == \"mean energy\")\n y = (float) std::accumulate(std::begin(s.energy), std::end(s.energy), 0,\n [](float acc, const WorldStatistics::ColorValue& value){ return acc+value.value; })\n / s.populationCount;\n else if (name == \"mean mushrooms\")\n y = (float) std::accumulate(std::begin(s.mushrooms), std::end(s.mushrooms), 0,\n [](float acc, const WorldStatistics::ColorValue& value){ return acc+value.value; })\n / s.populationCount;\n else if (name == \"mean speed\")\n y = (float) std::accumulate(std::begin(s.speed), std::end(s.speed), 0,\n [](float acc, const WorldStatistics::ColorValue& value){ return acc+value.value; })\n / s.populationCount;\n else\n throw std::runtime_error(\"Line Graph datum \"+name+\" doesn't exist\");\n\n y *= -1;\n min = sf::Vector2f(std::min(min.x, x), std::min(min.y, y));\n max = sf::Vector2f(std::max(max.x, x), std::max(max.y, y));\n\n verts.append(sf::Vertex({x, y}, color));\n }\n lastUpdateFrame = stats.size();\n}\n\nvoid GUI::LineGraph::draw(sf::RenderWindow *window, const sf::Vector2f orgSize) {\n // Set all the vertex data\n verts.setPrimitiveType(sf::LineStrip);\n\n // Draw graph\n sf::View view;\n view.reset(sf::FloatRect(sf::Vector2f(-2, -2) + min, max-min + sf::Vector2f(2, 2) ));\n view.setViewport(sf::FloatRect(0.1, 0.75, 0.8, 0.24));\n auto oldView = window->getView();\n window->setView(view);\n window->draw(verts);\n\n // Draw value in end\n sf::Vector2f lastPos = verts[verts.getVertexCount()-1].position;\n lastPos = view.getTransform().transformPoint(lastPos);\n lastPos = sf::Vector2f(lastPos.x*view.getViewport().width, -lastPos.y*view.getViewport().height);\n lastPos += sf::Vector2f(view.getViewport().left, view.getViewport().top+view.getViewport().height);\n lastPos = sf::Vector2f(lastPos.x*orgSize.x, lastPos.y*orgSize.y);\n if (!valueText.getString().isEmpty() && lastPos.x == lastPos.x && lastPos.y == lastPos.y){\n valueText.setPosition(lastPos);\n valueText.setString(std::to_string((int) verts[verts.getVertexCount()-1].position.y)+\" \"+name);\n window->draw(valueText);\n }\n window->setView(oldView);\n}\n\nvoid GUI::Spectrogram::update(const World *world) {\n auto &stats = world->getHistoricalStatistics();\n if (stats.empty())\n return;\n\n if (stats.size() < currentSize.x){\n spectrogram.clear();\n }\n\n if (spectrogram.getN() == 0 && spectrogram.getM() == 0){\n // Allocated the whole spectrogram\n spectrogram = Contiguous2dVector(downsamplingTriggerW, downsamplingTriggerH,\n sf::Color(0, 0, 0, 0));\n currentSize = sf::Vector2u(0, startHeight);\n lastUpdateFrame = 0;\n perRow = 1;\n perColumn = 1; columnCounter = 0;\n }\n\n std::size_t j = 0;\n for (std::size_t i = lastUpdateFrame; i < stats.size(); i++){\n const auto &s = stats.at(i);\n lastUpdateFrame++; j++;\n if (16 < j)\n break;\n\n std::vector<WorldStatistics::ColorValue> values;\n values.resize(s.populationCount);\n\n if (name == \"generation\")\n values = s.generation;\n else if (name == \"perceptrons\")\n values = s.perceptrons;\n else if (name == \"age\")\n values = s.age;\n else if (name == \"children\")\n values = s.children;\n else if (name == \"murders\")\n values = s.murders;\n else if (name == \"energy\")\n values = s.energy;\n else if (name == \"mushrooms\")\n values = s.mushrooms;\n else\n throw std::runtime_error(\"Spectrogram Graph datum \"+name+\" doesn't exist\");\n\n auto minIt = std::min_element(std::begin(values), std::end(values),\n [](const WorldStatistics::ColorValue& a, const WorldStatistics::ColorValue& b){\n return a.value < b.value;\n });\n\n auto maxIt = std::max_element(std::begin(values), std::end(values),\n [](const WorldStatistics::ColorValue& a, const WorldStatistics::ColorValue& b){\n return a.value < b.value;\n });\n\n if (minIt != std::end(values))\n minVal = std::fminf(minVal, float(*minIt));\n if (maxIt != std::end(values))\n maxVal = std::fmaxf(maxVal, float(*maxIt));\n\n newValues.push_back(values);\n }\n \n // Lambda for drawing in spectrogram\n auto mark = [](std::vector<std::array<unsigned, 4>> &vec,\n std::vector<float> &totals,\n const float ind, const sf::Color color,\n const float opacity, const unsigned size) {\n\n for (std::size_t i = ind-std::floor((float) size/2); i < ind+std::ceil((float) size/2); i++) {\n try {\n float dist = 1.5f/(fabsf(ind-i)+1.5f);\n vec.at(i).at(0) += color.r;\n vec.at(i).at(1) += color.g;\n vec.at(i).at(2) += color.b;\n vec.at(i).at(3) += opacity*255.f*dist;\n totals.at(i) += 1.0f;\n } catch (const std::out_of_range& e) {};\n }\n };\n\n // Go through each value and draw\n for (auto& values : newValues){\n unsigned currentY = (maxVal-minVal) * stride / (float) perRow;\n if (downsamplingTriggerH <= currentY){\n // Half existing column\n printf(\"Halving color column\\n\");\n std::vector<sf::Color> newColColumn(std::ceil(colorColumn.size()/2.f));\n std::vector<unsigned> newColCount(std::ceil(colorColumnCount.size()/2.f));\n for (std::size_t y = 0; y < newColColumn.size(); y++) {\n unsigned r = 0, g = 0, b = 0, a = 0;\n\n float total = 0;\n std::vector<sf::Color> colors = {colorColumn.at(y*2)};\n std::vector<unsigned> colorsCount = {colorColumnCount.at(y*2)};\n if (y*2+1 < colorColumn.size()){\n colors.push_back(colorColumn.at(y*2+1));\n colorsCount.push_back(colorColumnCount.at(y*2+1));\n }\n else {\n printf(\"Avoid color %zu\\n\", y*2+1);\n }\n\n\n for (auto &c : colors){\n if (c.r == 0 && c.g == 0 && c.b == 0 && c.a == 0){\n total += 0;\n }\n else {\n r += c.r;\n g += c.g;\n b += c.b;\n a += c.a;\n total += 1;\n }\n }\n\n\n unsigned colCount = 0;\n for (auto &c : colorsCount){\n colCount += c;\n }\n\n r /= total; g /= total;\n b /= total; a /= total;\n newColColumn.at(y) = sf::Color(r, g, b, a);\n newColCount.at(y) = colCount;\n }\n\n printf(\"Halfing spectrogram height\\n\");\n // Half height\n auto newSpec = Contiguous2dVector(spectrogram.getN(), spectrogram.getM(), spectrogram.getFillValue());\n for (std::size_t x = 0; x < currentSize.x; x++) {\n for (std::size_t y = 0; y < std::ceil(newSpec.getM()/2)-1; y++) {\n unsigned r = 0, g = 0, b = 0, a = 0;\n float total = 0;\n std::vector<sf::Color> colors;\n try {\n colors = {spectrogram.at(x, y*2)};\n } catch (const std::out_of_range &e){\n printf(\"Is outarange a\\n\");\n }\n if (y*2+1 < spectrogram.getM()){\n try {\n colors.push_back(spectrogram.at(x, y * 2 + 1));\n } catch (const std::out_of_range &e) {\n printf(\"Outarange b\\n\");\n }\n }\n for (auto &c : colors){\n if (c.r == 0 && c.g == 0 && c.b == 0 && c.a == 0){\n total += 0;\n }\n else {\n r += c.r;\n g += c.g;\n b += c.b;\n a += c.a;\n total += 1;\n }\n }\n r /= total; g /= total;\n b /= total; a /= total;\n try {\n newSpec.at(x, y) = sf::Color(r, g, b, a);\n } catch (const std::out_of_range &e){\n printf(\"IS outarange b\\n\");\n }\n }\n }\n\n spectrogram = newSpec;\n perRow *= 2;\n currentSize.y = std::ceil(downsamplingTriggerH/2.0);\n\n }\n try {\n std::vector<std::array<unsigned, 4>> column(currentY);\n\n std::vector<float> totals(column.size(), 0.f);\n // Global color column has to be of sufficient size\n if (colorColumn.size() < column.size()) {\n colorColumn.resize(column.size(), sf::Color(0, 0, 0, 0));\n colorColumnCount.resize(column.size(), 0);\n }\n\n // Draw values to column\n for (const auto &value : values) {\n std::size_t ind = (value.value - minVal) * (float) stride / (float) perRow;\n mark(column, totals, ind, value.color, .8f, markerWidth);\n }\n\n // Divide in column to average colors\n for (std::size_t i = 0; i < column.size(); i++) {\n column.at(i).at(0) /= totals.at(i);\n column.at(i).at(1) /= totals.at(i);\n column.at(i).at(2) /= totals.at(i);\n column.at(i).at(3) /= totals.at(i);\n colorColumnCount.at(i)++;\n }\n\n // Add to spectrogram\n columnCounter++;\n if (columnCounter == perColumn) {\n // Rescale the column if it too big\n for (std::size_t i = 0; i < column.size(); i++) {\n if (colorColumnCount.at(i) != 0) {\n colorColumn.at(i).r += column.at(i).at(0);\n colorColumn.at(i).g += column.at(i).at(1);\n colorColumn.at(i).b += column.at(i).at(2);\n colorColumn.at(i).a += column.at(i).at(3);\n colorColumnCount.at(i) = 0;\n }\n }\n\n auto p = spectrogram.at(currentSize.x);\n auto itCol = p.first;\n for (auto it = std::begin(colorColumn); it != std::end(colorColumn); it++) {\n *itCol = *it;\n itCol++;\n }\n\n currentSize.x += 1;\n currentSize.y = std::max((std::size_t) currentSize.y, colorColumn.size());\n colorColumn.clear();\n colorColumnCount.clear();\n columnCounter = 0;\n }\n\n // Check if spectrogram is too wide\n if (spectrogram.getN() <= currentSize.x) {\n // Half width\n auto newSpec = Contiguous2dVector(spectrogram.getN(), spectrogram.getM(), spectrogram.getFillValue());\n for (std::size_t x = 0; x < std::ceil(newSpec.getN() / 2.0); x++) {\n for (std::size_t y = 0; y < newSpec.getM(); y++) {\n unsigned r = 0, g = 0, b = 0, a = 0;\n float total = 0;\n for (auto &c : {spectrogram.at(x * 2, y), spectrogram.at(x * 2 + 1, y)}) {\n if (c.r == 0 && c.g == 0 && c.b == 0 && c.a == 0) {\n total += 0;\n } else {\n r += c.r;\n g += c.g;\n b += c.b;\n a += c.a;\n total += 1;\n }\n }\n r /= total;\n g /= total;\n b /= total;\n a /= total;\n newSpec.at(x, y) = sf::Color(r, g, b, a);\n }\n }\n\n spectrogram = newSpec;\n perColumn *= 2;\n currentSize.x = std::ceil(currentSize.x / 2.0);\n }\n } catch (const std::out_of_range &e){\n printf(\"Other outarange\\n\");\n }\n }\n\n newValues.clear();\n}\n\nvoid GUI::Spectrogram::draw(sf::RenderWindow *window, const sf::Vector2f orgSize) {\n if (currentSize.x == 0 || currentSize.y == 0)\n return;\n\n sf::Image image;\n image.create(currentSize.x, currentSize.y);\n\n for (std::size_t x = 0; x < image.getSize().x; x++){\n for (std::size_t y = 0; y < image.getSize().y; y++){\n image.setPixel(x, y, spectrogram.at(x, y));\n }\n }\n\n sf::Texture texture;\n texture.loadFromImage(image);\n auto sprite = sf::Sprite(texture);\n\n sf::View view;\n view.reset(sf::FloatRect(0, 0, currentSize.x, currentSize.y));\n view.setViewport(sf::FloatRect(0.1, 0.75, 0.8, 0.24));\n view.setSize(view.getSize().x, -view.getSize().y);\n auto oldView = window->getView();\n window->setView(view);\n\n window->draw(sprite);\n window->setView(oldView);\n}\n\nGUI::Toggle::Toggle(const std::string& name, bool *value, std::vector<Toggle> subToggles, sf::Color color) :\ncolor(color){\n text.setString(name);\n text.setCharacterSize(20);\n\n Toggle::value = value;\n Toggle::subToggles = std::move(subToggles);\n Toggle::hovered = false;\n Toggle::parent = nullptr;\n\n update();\n}\n\nGUI::Toggle::Toggle(const std::string& name, bool *value, Toggle* parent, sf::Color color) :\nToggle(name, value, std::vector<Toggle>(), color) {\n Toggle::parent = parent;\n}\n\nvoid GUI::Toggle::click() {\n *value = !(*value);\n update();\n}\n\nvoid GUI::Toggle::set(bool v) {\n *value = v;\n update();\n}\n\nvoid GUI::Toggle::update() {\n if (*value){\n text.setFillColor(color);\n if (parent != nullptr){\n if (parent->exclusiveSubs){\n for (auto &sub : parent->subToggles){\n if (&sub != this){\n sub.set(false);\n }\n }\n }\n }\n }\n else {\n text.setFillColor(sf::Color(80, 80, 80));\n for (auto &sub : subToggles){\n sub.hovered = false;\n }\n }\n}\n\nstd::size_t GUI::VectorRenderer::hover(sf::Vector2i pos) {\n for (std::size_t i = 0; i < rectangles.size(); i++){\n if (pointInBox(sf::Vector2f(pos.x, pos.y), rectangles.at(i).getGlobalBounds())){\n return i;\n }\n }\n return std::numeric_limits<std::size_t>::max();\n}\n\nvoid GUI::VectorRenderer::draw(sf::RenderWindow *window, const std::vector<float> &vec, std::size_t selectedIndex) {\n if (vec.size() != rectangles.size()){\n rectangles.clear();\n const auto c = vec.size();\n rectangles.reserve(c);\n for (std::size_t i = 0; i < c; i++){\n rectangles.emplace_back(sf::Vector2f(bounds.width/c, bounds.height));\n rectangles.back().setPosition(bounds.left + i*(bounds.width/c+2.f), bounds.top);\n rectangles.back().setOutlineThickness(1);\n rectangles.back().setOutlineColor(sf::Color(50, 50, 50));\n }\n }\n\n for (std::size_t i = 0; i < vec.size(); i++ ) {\n const float a = vec.at(i);\n rectangles.at(i).setFillColor(sf::Color(a * 200, a * 200, a * 200));\n if (i == selectedIndex){\n rectangles.at(i).setOutlineColor(sf::Color::Red);\n }\n window->draw(rectangles.at(i));\n if (i == selectedIndex){\n rectangles.at(i).setOutlineColor(sf::Color(50, 50, 50));\n }\n }\n}\n\nvoid GUI::VectorRenderer::drawCorr(sf::RenderWindow *window, const std::vector<float> &vec, size_t selectedIndex) {\n if (vec.size() != rectangles.size()){\n rectangles.clear();\n const auto c = vec.size();\n rectangles.reserve(c);\n for (std::size_t i = 0; i < c; i++){\n rectangles.emplace_back(sf::Vector2f(bounds.width/c, bounds.height));\n rectangles.back().setPosition(bounds.left + i*(bounds.width/c+2.f), bounds.top);\n rectangles.back().setOutlineThickness(1);\n rectangles.back().setOutlineColor(sf::Color(50, 50, 50));\n }\n }\n\n for (std::size_t i = 0; i < vec.size(); i++ ){\n const float val = (vec.at(i) + 1.f)/2.f;\n const float o = std::fabs(vec.at(i));\n rectangles.at(i).setFillColor(sf::Color(val*200, val*200, val*200, o*255));\n if (i == selectedIndex){\n rectangles.at(i).setOutlineColor(sf::Color::Red);\n }\n window->draw(rectangles.at(i));\n if (i == selectedIndex){\n rectangles.at(i).setOutlineColor(sf::Color(50, 50, 50));\n }\n }\n}\n\n"
},
{
"alpha_fraction": 0.7166666388511658,
"alphanum_fraction": 0.7333333492279053,
"avg_line_length": 17.620689392089844,
"blob_id": "341616cd9f54818bb0da9c078c9e421026631365",
"content_id": "dc75a873765904c5bfa04a9527242eb021578900",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 540,
"license_type": "permissive",
"max_line_length": 68,
"num_lines": 29,
"path": "/BouncingBall.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-22.\n//\n\n#ifndef FAMILYISEVERYTHING_BOUNCINGBALLS_H\n#define FAMILYISEVERYTHING_BOUNCINGBALLS_H\n\n#include \"WorldObject.h\"\n\n#include <SFML/Graphics.hpp>\n\n\nclass BouncingBall : public WorldObject {\npublic:\n BouncingBall(World *world, sf::Vector2f position, float radius);\n\n void update(float deltaTime) override;\n\n void draw(sf::RenderWindow *window, float deltaTime) override;\n\n float getRadius();\n\nprivate:\n float radius;\n sf::CircleShape c;\n};\n\n\n#endif //FAMILYISEVERYTHING_BOUNCINGBALLS_H\n"
},
{
"alpha_fraction": 0.6767123341560364,
"alphanum_fraction": 0.6910958886146545,
"avg_line_length": 24.172412872314453,
"blob_id": "c05ae55b683ef78ecfa78ba7eb0a90c1896fee22",
"content_id": "47dee8e5992386204db862b7e4fe67affd74acc3",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 1460,
"license_type": "permissive",
"max_line_length": 139,
"num_lines": 58,
"path": "/Quadtree.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-21.\n//\n\n#include <memory>\n#include <vector>\n#include <array>\n#include <cmath>\n#include <SFML/Graphics.hpp>\n\n#include \"WorldObject.h\"\n#include \"utils.cpp\"\n\n#ifndef FAMILYISEVERYTHING_QUADTREE_H\n#define FAMILYISEVERYTHING_QUADTREE_H\n\ntemplate<class T>\nclass Quadtree {\npublic:\n Quadtree(sf::Vector2<T> topLeft, sf::Vector2<T> dimensions);\n\n\n std::array<Quadtree<T> *, 4> getQuads();\n bool hasQuads();\n\n sf::Vector2<T> getPosition();\n sf::Vector2<T> getDimensions();\n\n T getLimit();\n void setLimit(T l);\n\n std::vector<std::shared_ptr<WorldObject> > searchNear(sf::Vector2<T> position, float distance) const; // TODO: do same as below\n void searchNearLine(std::vector<std::shared_ptr<WorldObject> > &wobjs, const sf::Vector2<T> &lineStart, const sf::Vector2<T> &lineEnd);\n bool contains(sf::Vector2<T> position);\n\n unsigned long long getSubNodeCount();\n std::vector<std::shared_ptr<WorldObject> > getNodes();\n\n bool add(std::shared_ptr<WorldObject> worldObject);\n bool remove(WorldObject *worldObject);\n\n bool move(sf::Vector2f oldPosition, WorldObject *worldObject);\n\n void draw(sf::RenderWindow *window, bool entities);\n\n\nprivate:\n sf::Vector2<T> topLeft;\n sf::Vector2<T> dimensions;\n T limit;\n std::array<std::unique_ptr<Quadtree<T> >, 4> quads;\n bool quadsCreated;\n\n std::vector<std::shared_ptr<WorldObject> > nodes;\n};\n\n\n#endif //FAMILYISEVERYTHING_QUADTREE_H\n"
},
{
"alpha_fraction": 0.5321664810180664,
"alphanum_fraction": 0.5463576316833496,
"avg_line_length": 29.200000762939453,
"blob_id": "ddd453325c80dd3cc872706843344a225f45446b",
"content_id": "e2347bf001711962e4dc4a2c7939717e67f9ed85",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2114,
"license_type": "permissive",
"max_line_length": 102,
"num_lines": 70,
"path": "/BouncingBall.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-22.\n//\n\n#include \"BouncingBall.h\"\n\n#include \"Quadtree.h\"\n#include \"World.h\"\n\nBouncingBall::BouncingBall(World *world, sf::Vector2f position, float radius)\n: WorldObject(\"BouncingBall\", world, position, true) {\n this->radius = radius;\n setBounds(sf::IntRect(-radius, -radius, radius, radius));\n\n c.setRadius(radius);\n c.setFillColor(sf::Color::Magenta);\n c.setOrigin(c.getRadius(), c.getRadius());\n}\n\nvoid BouncingBall::update(float deltaTime) {\n WorldObject::update(deltaTime);\n\n auto newPos = getPosition();\n if (world->getDimensions().x < newPos.x + radius || newPos.x < radius) {\n velocity.x *= -1.f;\n }\n\n if (world->getDimensions().y < newPos.y + radius || newPos.y < radius) {\n velocity.y *= -1.f;\n }\n\n if (quadtree != nullptr) {\n auto nl = quadtree->searchNear(newPos, 4*radius);\n c.setFillColor(sf::Color(20, 20, 20));\n for (auto &n : nl) {\n if (n.get() != this && typeid(*(n.get())) == typeid(BouncingBall)) {\n sf::Vector2f v = n->getPosition() - position;\n if (sqrtf(v.x * v.x + v.y * v.y) < ((BouncingBall *) n.get())->getRadius() + radius) {\n //c.setFillColor(sf::Color::Green);\n }\n }\n else if (n.get() != this){\n sf::FloatRect a(position.x-radius, position.y-radius, 2*radius, 2*radius);\n\n sf::FloatRect b(n->getPosition().x + n->getBounds().left,\n n->getPosition().y + n->getBounds().top,\n n->getBounds().width - n->getBounds().left,\n n->getBounds().height - n->getBounds().top);\n c.setFillColor(sf::Color(100, 100, 100));\n if (boxesIntersect(a, b)){\n c.setFillColor(sf::Color::Cyan);\n }\n\n }\n\n }\n }\n\n\n}\n\nvoid BouncingBall::draw(sf::RenderWindow *window, float deltaTime) {\n c.setPosition(getPosition());\n window->draw(c);\n WorldObject::draw(window, deltaTime);\n}\n\nfloat BouncingBall::getRadius() {\n return radius;\n}\n"
},
{
"alpha_fraction": 0.6918367147445679,
"alphanum_fraction": 0.718367338180542,
"avg_line_length": 16.5,
"blob_id": "dcec5cdfdfb13207da5a83616018ec70c9b75b18",
"content_id": "137b88113f4eab2effe2b0afd6940ad03b0697d4",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 490,
"license_type": "permissive",
"max_line_length": 59,
"num_lines": 28,
"path": "/MarkovNames.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2019-01-08.\n//\n\n#ifndef HUNTERGATHERERS_MARKOVNAMES_H\n#define HUNTERGATHERERS_MARKOVNAMES_H\n\n\n#include \"json/json.hpp\"\n\nclass MarkovNames {\npublic:\n MarkovNames(const bool random, unsigned long seed);\n\n static void loadResources();\n std::string generate(const std::vector<double> genome);\n\nprivate:\n const bool random;\n\n static bool loaded;\n static nlohmann::json chain;\n\n std::mt19937 randomEngine;\n};\n\n\n#endif //HUNTERGATHERERS_MARKOVNAMES_H\n"
},
{
"alpha_fraction": 0.5127536058425903,
"alphanum_fraction": 0.5303120017051697,
"avg_line_length": 27.969072341918945,
"blob_id": "d3c8c2a9dd433a91edbbcfc263e5abfcf8d043b9",
"content_id": "c6e2a65badcb518db47ee079c2412c51c67ff1ea",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 8429,
"license_type": "permissive",
"max_line_length": 149,
"num_lines": 291,
"path": "/Quadtree.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-21.\n//\n\n#include \"Quadtree.h\"\n\ntemplate<class T>\nQuadtree<T>::Quadtree(sf::Vector2<T> topLeft, sf::Vector2<T> dimensions) {\n this->topLeft = topLeft;\n this->dimensions = dimensions;\n limit = 10;\n quadsCreated = false;\n}\n\ntemplate<class T>\nsf::Vector2<T> Quadtree<T>::getPosition() {\n return topLeft;\n}\n\ntemplate<class T>\nsf::Vector2<T> Quadtree<T>::getDimensions() {\n return dimensions;\n}\n\ntemplate<class T>\nstd::array<Quadtree<T> *, 4> Quadtree<T>::getQuads() {\n std::array<Quadtree<T> *, 4> regularPointers{};\n for (int i = 0; i < 4; i++) {\n regularPointers[i] = quads[i].get();\n }\n return regularPointers;\n}\n\ntemplate<class T>\nstd::vector<std::shared_ptr<WorldObject> > Quadtree<T>::getNodes() {\n return nodes;\n}\n\ntemplate<class T>\nunsigned long long Quadtree<T>::getSubNodeCount() {\n unsigned long long c = 0;\n if (quadsCreated) {\n for (int i = 0; i < 4; i++) {\n c += quads[i]->getSubNodeCount();\n }\n }\n return c + nodes.size();\n}\n\ntemplate<class T>\nT Quadtree<T>::getLimit() {\n return limit;\n}\n\ntemplate<class T>\nvoid Quadtree<T>::setLimit(T l) {\n limit = l;\n}\n\ntemplate<class T>\nbool Quadtree<T>::hasQuads() {\n return quadsCreated;\n}\n\ntemplate<class T>\nbool Quadtree<T>::add(std::shared_ptr<WorldObject> worldObject) {\n // Return false because point is outside quadtree\n if (!contains(worldObject->getPosition())) {\n return false;\n }\n\n // If there are no child-quads\n if (not quadsCreated) {\n // Add node here if the tree can't be divided further\n if (dimensions.x / 2 < limit || dimensions.y / 2 < limit || nodes.empty()) {\n nodes.push_back(worldObject);\n return true;\n }\n // Divide the tree\n else {\n // Top left\n quads[0] = std::unique_ptr<Quadtree<T> >(new Quadtree(topLeft, dimensions / (T) 2));\n quads[0]->setLimit(limit);\n // Top right\n quads[1] = std::unique_ptr<Quadtree<T> >(\n new Quadtree(topLeft + sf::Vector2<T>(dimensions.x / 2, 0), dimensions / (T) 2));\n quads[1]->setLimit(limit);\n // Bottom left\n quads[2] = std::unique_ptr<Quadtree<T> >(\n new Quadtree(topLeft + sf::Vector2<T>(0, dimensions.y / 2), dimensions / (T) 2));\n quads[2]->setLimit(limit);\n // Bottom right\n quads[3] = std::unique_ptr<Quadtree<T> >(\n new Quadtree(topLeft + sf::Vector2<T>(dimensions.x / 2, dimensions.y / 2), dimensions / (T) 2));\n quads[3]->setLimit(limit);\n\n quadsCreated = true;\n\n for (auto &node : nodes) {\n for (int i = 0; i < 4; i++) {\n if (quads[i]->add(node)) {\n break;\n }\n }\n }\n nodes.clear();\n\n }\n }\n\n // Add the node\n for (int i = 0; i < 4; i++) {\n if (quads[i]->add(worldObject)) {\n return true;\n }\n }\n\n return false;\n}\n\ntemplate<class T>\nbool Quadtree<T>::remove(WorldObject *worldObject) {\n // Does not contain\n if (!contains(worldObject->getPosition())) {\n return false;\n }\n\n // Does contain\n if (not quadsCreated) {\n for (int i = 0; i < nodes.size(); i++) {\n if (nodes.at(i).get() == worldObject) {\n nodes.erase(nodes.begin() + i);\n return true;\n }\n }\n return false;\n }\n\n // Remove from quads\n bool removed = false;\n for (int i = 0; i < 4; i++) {\n if (quads[i]->remove(worldObject)) {\n removed = true;\n break;\n }\n }\n\n if (removed) {\n // Check if it is even worth having quads, or if they should be deconstructed\n if (quadsCreated and getSubNodeCount() <= 1) {\n quadsCreated = false;\n for (int i = 0; i < 4; i++) {\n for (auto &node : quads[i]->getNodes()) {\n nodes.push_back(node);\n }\n quads[i].reset();\n }\n }\n return true;\n }\n\n return false;\n}\n\ntemplate<class T>\nbool Quadtree<T>::move(sf::Vector2f oldPosition, WorldObject *worldObject) {\n // Does not contain\n if (!contains(oldPosition)) {\n return false;\n }\n\n if (not quadsCreated) {\n for (unsigned long long int i = 0; i < nodes.size(); i++) {\n if (nodes.at(i).get() == worldObject) {\n if (!contains(worldObject->getPosition())) {\n nodes.erase(nodes.begin() + i);\n return true;\n }\n return false;\n }\n }\n return false;\n }\n\n // Remove from quads\n for (int i = 0; i < 4; i++) {\n if (quads[i]->move(oldPosition, worldObject)) {\n if (add(worldObject->getSharedPtr())) {\n return false;\n } else {\n // Check if it is even worth having quads, or if they should be deconstructed\n if (quadsCreated and getSubNodeCount() <= 1) {\n quadsCreated = false;\n for (int j = 0; j < 4; j++){\n for (auto &node : quads[j]->getNodes()) {\n nodes.push_back(node);\n }\n quads[j].reset();\n }\n }\n\n return true;\n }\n }\n }\n\n return false;\n}\n\ntemplate<class T>\nbool Quadtree<T>::contains(sf::Vector2<T> position) {\n return topLeft.x < position.x && topLeft.y < position.y\n && position.x <= topLeft.x + dimensions.x && position.y <= topLeft.y + dimensions.y;\n}\n\n\ntemplate<class T>\nstd::vector<std::shared_ptr<WorldObject> >\nQuadtree<T>::searchNear(sf::Vector2<T> position, float distance) const {\n if (boxesIntersect(sf::FloatRect(topLeft.x, topLeft.y, dimensions.x, dimensions.y),\n sf::FloatRect(position.x-distance, position.y-distance, 2*distance, 2*distance))) {\n std::vector<std::shared_ptr<WorldObject> > n1 = nodes;\n\n if (quadsCreated) {\n for (int i = 0; i < 4; i++) {\n std::vector<std::shared_ptr<WorldObject> > n2 = quads[i]->searchNear(position, distance);\n n1.insert(n1.end(), n2.begin(), n2.end());\n }\n }\n\n\n return n1;\n }\n return std::vector<std::shared_ptr<WorldObject> >();\n}\n\ntemplate<class T>\nvoid Quadtree<T>::searchNearLine(std::vector<std::shared_ptr<WorldObject> > &wobjs, const sf::Vector2<T> &lineStart, const sf::Vector2<T> &lineEnd) {\n if (lineIntersectWithBox(lineStart, lineEnd, topLeft, dimensions)) {\n if (!nodes.empty()){\n wobjs.insert(wobjs.end(), nodes.begin(), nodes.end());\n }\n\n if (quadsCreated) {\n for (int i = 0; i < 4; i++) {\n quads[i]->searchNearLine(wobjs, lineStart, lineEnd);\n }\n }\n\n\n }\n}\n\ntemplate<class T>\nvoid Quadtree<T>::draw(sf::RenderWindow *window, bool entities) {\n sf::CircleShape c;\n c.setRadius(3);\n c.setFillColor(sf::Color::White);\n c.setOrigin(c.getRadius(), c.getRadius());\n\n if (entities) {\n for (const std::shared_ptr<WorldObject> &wo : getNodes()) {\n c.setPosition(sf::Vector2f(wo->getPosition()));\n window->draw(c);\n }\n }\n sf::VertexArray rect(sf::LineStrip, 5);\n rect[0].position = sf::Vector2f(getPosition());\n rect[1].position = sf::Vector2f(getPosition()) + sf::Vector2f(0, getDimensions().y);\n rect[2].position = sf::Vector2f(getPosition()) + sf::Vector2f(getDimensions().x, getDimensions().y);\n rect[3].position = sf::Vector2f(getPosition()) + sf::Vector2f(getDimensions().x, 0);\n rect[4].position = sf::Vector2f(getPosition());\n rect[0].color = sf::Color(100, 100, 100);\n rect[1].color = sf::Color(100, 100, 100);\n rect[2].color = sf::Color(100, 100, 100);\n rect[3].color = sf::Color(100, 100, 100);\n rect[4].color = sf::Color(100, 100, 100);\n window->draw(rect);\n\n if (quadsCreated) {\n for (int i = 0; i < 4; i++) {\n quads[i]->draw(window, entities);\n }\n }\n\n}\n\n\n//template class Quadtree<int>; <-- Triggers division errors\ntemplate class Quadtree<float>;\n//template class Quadtree<unsigned long>; <-- Triggers division errors"
},
{
"alpha_fraction": 0.7971163988113403,
"alphanum_fraction": 0.8053553104400635,
"avg_line_length": 59.5625,
"blob_id": "add023dad51b545722a777437e7be58b5c6ed53a",
"content_id": "91a02e7868343568823fb763f958f0a4077f6071",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "CMake",
"length_bytes": 971,
"license_type": "permissive",
"max_line_length": 394,
"num_lines": 16,
"path": "/CMakeLists.txt",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "cmake_minimum_required(VERSION 3.12)\nproject(HunterGatherers)\n\nset(CMAKE_CXX_STANDARD 17)\n\nfind_package(SFML 2.5.1 COMPONENTS graphics audio REQUIRED)\n\nfind_package(OpenCL REQUIRED)\n\nadd_executable(HunterGatherers main.cpp Quadtree.cpp Quadtree.h WorldObject.cpp WorldObject.h Agent.cpp Agent.h BouncingBall.cpp BouncingBall.h utils.cpp Camera.cpp Camera.h Config.h World.cpp World.h OpenCL_Wrapper.cpp OpenCL_Wrapper.h Gene.cpp Gene.h Populator.cpp Populator.h Mushroom.cpp Mushroom.h Heart.cpp Heart.h GUI.cpp GUI.h MarkovNames.cpp MarkovNames.h Config.cpp Skull.cpp Skull.h)\ntarget_link_libraries(HunterGatherers sfml-graphics sfml-audio)\ntarget_link_libraries(HunterGatherers OpenCL::OpenCL)\n\nfile(COPY \"resources\" DESTINATION \"${CMAKE_BINARY_DIR}\")\nfile(COPY Config.json DESTINATION \"${CMAKE_BINARY_DIR}\")\n#add_custom_command(TARGET HunterGatherers PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/cl $<TARGET_FILE_DIR:HunterGatherers>/cl)\n\n\n"
},
{
"alpha_fraction": 0.6977011561393738,
"alphanum_fraction": 0.7008620500564575,
"avg_line_length": 24.03597068786621,
"blob_id": "91ad88be7a0501a6fbbc97093c7323adb5ad07c4",
"content_id": "f3a40d3fd9ad473b9614935b2e228d5bc98ef005",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 3480,
"license_type": "permissive",
"max_line_length": 97,
"num_lines": 139,
"path": "/Agent.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-21.\n//\n\n#ifndef FAMILYISEVERYTHING_AGENT_H\n#define FAMILYISEVERYTHING_AGENT_H\n\n#include \"WorldObject.h\"\n#include \"Gene.h\"\n#include \"Config.h\"\n\n#include <SFML/Graphics.hpp>\n#include <vector>\n\nclass Agent : public WorldObject {\npublic:\n Agent(const AgentSettings &settings, World *world, sf::Vector2f position, float orientation);\n Agent(const Agent &other, float mutation);\n\n const AgentSettings &getSettings() const;\n\n const std::string &getName() const;\n\n static void loadResources();\n\n MapGenes *getGenes() const;\n\n unsigned int getGeneration() const;\n void setGeneration(unsigned int generation);\n\n unsigned int getChildCount() const;\n void setChildCount(unsigned int childCount);\n\n unsigned int getMurderCount() const;\n void setMurderCount(unsigned int murderCount);\n\n unsigned int getNewBirths();\n unsigned int getNewMurders();\n\n void update(float deltaTime) override;\n void draw(sf::RenderWindow *window, float deltaTime) override;\n\n void updatePercept(float deltaTime);\n\n const std::vector<float> &getPercept() const;\n void setPercept(const std::vector<float> &percept);\n\n const std::vector<float> &getActions() const;\n void setActions(const std::vector<float> &actions);\n\n float getOrientation() const;\n void setOrientation(float orientation);\n\n float getEnergy() const;\n void setEnergy(float energy);\n\n const std::vector<float> &getReceptors() const;\n const std::vector<float> &getMemory() const;\n\n std::vector<float> getRegressionPercept(unsigned id) const;\n std::vector<float> getRegressionActions(unsigned id) const;\n\n void clearPath();\n void queuePathDraw();\n\n struct Inventory {\n unsigned mushrooms;\n };\n\n const Inventory &getInventory() const;\n void setInventory(const Inventory &inventory);\n\n struct NetworkStatistics {\n unsigned layers;\n unsigned perceptronCount;\n };\n\n const NetworkStatistics &getNetworkStatistics() const;\n void setNetworkStatistics(const NetworkStatistics &networkStatistics);\n\nprivate:\n // General\n const AgentSettings& settings;\n unsigned generation;\n unsigned oldChildCount;\n unsigned childCount;\n unsigned oldMurderCount;\n unsigned murderCount;\n std::string name;\n\n // Input linear regression\n unsigned actionUpdates;\n std::vector<float> perceptMean;\n std::vector<float> actionsMean;\n std::vector<std::vector<float>> varX;\n std::vector<std::vector<float>> covXY;\n void networkRegression();\n\n float orientation; // In degrees\n float energy; // Between 0 and maxEnergy\n float actionCooldown;\n float punchTimer;\n Inventory inventory;\n\n // Rendering\n sf::Sprite sprite;\n\n sf::IntRect frame;\n unsigned frameIndex;\n float frameTimer;\n\n // AI\n std::shared_ptr<MapGenes> genes;\n NetworkStatistics networkStatistics;\n std::vector<float> percept;\n std::vector<float> memory;\n std::vector<float> actions;\n std::vector<float> receptors;\n\n // Agent path\n float pathTimer;\n bool drawPathNextFrame{};\n std::vector<sf::Vertex> path;\n bool alreadyRegularColor{};\n\n // Vision variables\n std::vector<sf::Vertex> lineOfSight;\n sf::Vertex orientationLine[2];\n\n static bool loaded;\n static sf::Texture walkingTexture;\n static sf::Texture punchTexture;\n\n void constructGenome(size_t inputCount, size_t outputCount);\n\n};\n\n\n#endif //FAMILYISEVERYTHING_AGENT_H\n"
},
{
"alpha_fraction": 0.6405770778656006,
"alphanum_fraction": 0.6415994763374329,
"avg_line_length": 41.52656936645508,
"blob_id": "1ae71909c03b94332804eea43da322335ef7f15c",
"content_id": "4cbb109a903220a9db9b94686e1cc5b8cbce6dde",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 8803,
"license_type": "permissive",
"max_line_length": 119,
"num_lines": 207,
"path": "/Config.cpp",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2019-01-10.\n//\n\n#include <chrono>\n#include <fstream>\n\n#include \"Config.h\"\n\nvoid Config::loadConfigFromFile(const std::string &filename) {\n std::ifstream file(filename);\n if (file.fail()){\n throw std::runtime_error(\"Can't open config file: \"+filename+\"\\n\");\n }\n std::printf(\"Loading config file: %s\\n\", filename.c_str());\n\n nlohmann::json json;\n file >> json;\n file.close();\n\n shouldReload = false;\n\n // Loading seed\n\n if (json[\"seed\"].is_string()){\n if (json[\"seed\"].get<std::string>() == \"TIME\"){\n Config::seed = static_cast<unsigned>(std::chrono::high_resolution_clock::now().time_since_epoch().count());\n }\n }\n else {\n Config::seed = json[\"seed\"].get<unsigned>();\n }\n\n // Loading World settings\n auto &WS = json[\"WorldSettings\"];\n auto worldWidth = WS[\"worldWidth\"].get<float>();\n auto worldHeight = WS[\"worldHeight\"].get<float>();\n\n world.mushroomReproductionRate = WS[\"mushroomReproductionRate\"].get<float>();\n world.mushroomReproductionDistance = WS[\"mushroomReproductionDistance\"].get<float>();\n world.mushroomReproductionNearLimit = WS[\"mushroomReproductionNearLimit\"].get<unsigned>();\n\n world.dimensions = {worldWidth, worldHeight};\n world.terrainSquare = WS[\"terrainSquare\"].get<unsigned>();\n world.quadtreeLimit = WS[\"quadtreeLimit\"].get<float>();\n\n auto &populatorEntries = WS[\"PopulatorEntries\"];\n for (auto &entry : populatorEntries){\n auto type = entry[\"type\"].get<std::string>();\n if (world.populatorEntries.find(type) == world.populatorEntries.end()){\n Populator::Entry e = {\n .type = type,\n .count = 0,\n .enabled = true\n };\n world.populatorEntries.insert(std::make_pair(type, e));\n }\n\n auto existingEntry = world.populatorEntries.find(type);\n existingEntry->second.targetCount = entry[\"targetCount\"].get<unsigned>();\n existingEntry->second.rate = entry[\"rate\"].get<float>();\n }\n\n for (auto &entry : world.populatorEntries){\n bool found = false;\n for (auto &configEntry : populatorEntries){\n if (entry.first == configEntry[\"type\"].get<std::string>()){\n found = true;\n break;\n }\n }\n\n if (!found){\n world.populatorEntries.erase(entry.first);\n }\n }\n\n\n // Loading agent settings\n auto &AS = json[\"AgentSettings\"];\n agents.mass = AS[\"mass\"].get<float>();\n agents.friction = AS[\"friction\"].get<float>();\n agents.maxSpeed = AS[\"maxSpeed\"].get<float>();\n agents.turnFactor = AS[\"turnFactor\"].get<float>();\n agents.punchTime = AS[\"punchTime\"].get<float>();\n agents.actionCooldown = AS[\"actionCooldown\"].get<float>();\n\n agents.energyToParent = AS[\"energyToParent\"].get<float>();\n agents.energyToChild = AS[\"energyToChild\"].get<float>();\n agents.energyLossRate = AS[\"energyLossRate\"].get<float>();\n agents.turnRateEnergyLoss = AS[\"turnRateEnergyLoss\"].get<float>();\n agents.movementEnergyLoss = AS[\"movementEnergyLoss\"].get<float>();\n agents.punchEnergy = AS[\"punchEnergy\"].get<float>();\n agents.punchDamage = AS[\"punchDamage\"].get<float>();\n agents.mushroomEnergy = AS[\"mushroomEnergy\"].get<float>();\n agents.maxEnergy = AS[\"maxEnergy\"].get<float>();\n agents.maxMushroomCount = AS[\"maxMushroomCount\"].get<unsigned>();\n\n agents.canReproduce = AS[\"canReproduce\"].get<bool>();\n agents.canWalk = AS[\"canWalk\"].get<bool>();\n agents.canTurn = AS[\"canTurn\"].get<bool>();\n agents.canEat = AS[\"canEat\"].get<bool>();\n agents.canPlace = AS[\"canPlace\"].get<bool>();\n agents.canPunch = AS[\"canPunch\"].get<bool>();\n\n agents.memory = AS[\"memory\"].get<unsigned>();\n agents.memoryReactivity = AS[\"memoryReactivity\"].get<float>();\n\n agents.perceiveCollision = AS[\"perceiveCollision\"].get<bool>();\n agents.receptorCount = AS[\"receptorCount\"].get<unsigned>();\n agents.perceiveColor = AS[\"perceiveColor\"].get<bool>();\n agents.perceiveEnergyLevel = AS[\"perceiveEnergyLevel\"].get<bool>();\n agents.perceiveMushroomCount = AS[\"perceiveMushroomCount\"].get<bool>();\n\n agents.FOV = AS[\"FOV\"].get<float>();\n agents.visibilityDistance = AS[\"visibilityDistance\"].get<float>();\n agents.visualReactivity = AS[\"visualReactivity\"].get<float>();\n\n agents.mutation = AS[\"mutation\"].get<float>();\n agents.layersMin = AS[\"layerMin\"].get<int>(); agents.layersMax = AS[\"layerMax\"].get<int>();\n agents.biasMin = AS[\"biasMin\"].get<float>(); agents.biasMax = AS[\"biasMax\"].get<float>();\n agents.weightMin = AS[\"weightMin\"].get<float>(); agents.weightMax = AS[\"weightMax\"].get<float>();\n agents.perceptronPerLayerMin = AS[\"perceptronPerLayerMin\"].get<int>();\n agents.perceptronPerLayerMax = AS[\"perceptronPerLayerMax\"].get<int>();\n\n\n // Loading controls\n auto &C = json[\"Controls\"];\n controls.pause = findKeyCode(C[\"pause\"].get<std::string>());\n controls.close = findKeyCode(C[\"close\"].get<std::string>());\n controls.showInterface = findKeyCode(C[\"showInterface\"].get<std::string>());\n controls.clearStats = findKeyCode(C[\"clearStats\"].get<std::string>());\n controls.up = findKeyCode(C[\"up\"].get<std::string>());\n controls.down = findKeyCode(C[\"down\"].get<std::string>());\n controls.left = findKeyCode(C[\"left\"].get<std::string>());\n controls.right = findKeyCode(C[\"right\"].get<std::string>());\n controls.slowDown = findKeyCode(C[\"slowDown\"].get<std::string>());\n controls.speedUp = findKeyCode(C[\"speedUp\"].get<std::string>());\n\n controls.upAmount = -C[\"keyboardCameraMove\"].get<float>();\n controls.downAmount = C[\"keyboardCameraMove\"].get<float>();\n controls.leftAmount = -C[\"keyboardCameraMove\"].get<float>();\n controls.rightAmount = C[\"keyboardCameraMove\"].get<float>();\n controls.timeFactorInitial = C[\"timeFactorInitial\"].get<float>();\n controls.timeFactorDelta = C[\"timeFactorDelta\"].get<float>();\n controls.timeFactorMax = C[\"timeFactorMax\"].get<float>();\n controls.scrollFactor = C[\"scrollFactor\"].get<float>();\n\n auto &RS = json[\"Rendering\"];\n render.graphLine = RS[\"graphLine\"].get<bool>();\n render.graphPopulation = RS[\"graphPopulation\"].get<bool>();\n render.graphMeanGeneration = RS[\"graphMeanGeneration\"].get<bool>();\n render.graphMeanPerceptrons = RS[\"graphMeanPerceptrons\"].get<bool>();\n render.graphMeanAge = RS[\"graphMeanAge\"].get<bool>();\n render.graphMeanChildren = RS[\"graphMeanChildren\"].get<bool>();\n render.graphMeanMurders = RS[\"graphMeanMurders\"].get<bool>();\n render.graphMeanEnergy = RS[\"graphMeanEnergy\"].get<bool>();\n render.graphMeanMushrooms = RS[\"graphMeanMushrooms\"].get<bool>();\n render.graphMeanSpeed = RS[\"graphMeanSpeed\"].get<bool>();\n\n render.graphSpectrogram = RS[\"graphSpectrogram\"].get<bool>();\n render.graphGeneration = RS[\"graphGeneration\"].get<bool>();\n render.graphPerceptrons = RS[\"graphPerceptrons\"].get<bool>();\n render.graphAge = RS[\"graphAge\"].get<bool>();\n render.graphEnergy = RS[\"graphEnergy\"].get<bool>();\n render.graphChildren = RS[\"graphChildren\"].get<bool>();\n render.graphMurders = RS[\"graphMurders\"].get<bool>();\n render.graphMushrooms = RS[\"graphMushrooms\"].get<bool>();\n render.graphSpeed = RS[\"graphSpeed\"].get<bool>();\n\n render.bins = RS[\"bins\"].get<unsigned>();\n render.showWorldObjectBounds = RS[\"showWorldObjectBounds\"].get<bool>();\n render.showInterface = RS[\"showInterface\"].get<bool>();\n render.showQuadtree = RS[\"showQuadtree\"].get<bool>();\n render.showQuadtreeEntities = RS[\"showQuadtreeEntities\"].get<bool>();\n render.showVision = RS[\"showVision\"].get<bool>();\n render.showPaths = false;\n render.renderOnlyAgents = false;\n render.visualizeGeneration = false;\n render.visualizeAge = false;\n render.visualizeMushrooms = false;\n render.visualizeChildren = false;\n render.visualizeMurders = false;\n render.visualizeColor = false;\n\n auto windowWidth = RS[\"windowWidth\"].get<unsigned>();\n auto windowHeight = RS[\"windowHeight\"].get<unsigned>();\n render.windowSize = {windowWidth, windowHeight};\n}\n\n\nsf::Keyboard::Key Config::findKeyCode(std::string key) {\n static const std::map<std::string, sf::Keyboard::Key> m { // TODO: add the rest\n {\"Space\", sf::Keyboard::Space},\n {\"Escape\", sf::Keyboard::Escape},\n {\"D\", sf::Keyboard::D},\n {\"C\", sf::Keyboard::C},\n {\"Up\", sf::Keyboard::Up},\n {\"Down\", sf::Keyboard::Down},\n {\"Left\", sf::Keyboard::Left},\n {\"Right\", sf::Keyboard::Right},\n {\"Comma\", sf::Keyboard::Comma},\n {\"Period\", sf::Keyboard::Period}\n };\n\n return m.at(key);\n}\n"
},
{
"alpha_fraction": 0.6828703880310059,
"alphanum_fraction": 0.6905864477157593,
"avg_line_length": 23.923076629638672,
"blob_id": "b0adf06f95210d3c1f2602256ecf2dc38217c819",
"content_id": "36d1d24a5cedc85ecb1a9b5b9cea0779ea5c37bd",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "C++",
"length_bytes": 2592,
"license_type": "permissive",
"max_line_length": 96,
"num_lines": 104,
"path": "/World.h",
"repo_name": "Axelwickm/HunterGatherers",
"src_encoding": "UTF-8",
"text": "//\n// Created by Axel on 2018-11-23.\n//\n\n#include <memory>\n#include <set>\n#include <deque>\n\n#ifndef HUNTERGATHERERS_WORLD_H\n#define HUNTERGATHERERS_WORLD_H\n\n#include <SFML/Graphics.hpp>\n#include \"Quadtree.h\"\n#include \"WorldObject.h\"\n#include \"Config.h\"\n#include \"OpenCL_Wrapper.h\"\n#include \"Populator.h\"\n\nstruct WorldStatistics {\n float timestamp = 0;\n std::size_t populationCount = 0;\n std::size_t mushroomCount = 0;\n unsigned lowestGeneration = 0;\n unsigned highestGeneration = 0;\n\n struct ColorValue {\n explicit operator float() const {\n return value;\n }\n sf::Color color;\n float value{};\n };\n\n std::vector<ColorValue> generation;\n std::vector<ColorValue> perceptrons;\n std::vector<ColorValue> age;\n std::vector<ColorValue> children;\n std::vector<ColorValue> murders;\n std::vector<ColorValue> energy;\n std::vector<ColorValue> mushrooms;\n std::vector<ColorValue> speed;\n};\n\nclass World {\npublic:\n World(Config &config, sf::RenderWindow *window, OpenCL_Wrapper *openCL_wrapper);\n\n OpenCL_Wrapper *getOpenCL_wrapper() const;\n\n void update(float deltaTime);\n void draw(float deltaTime);\n\n bool addObject(std::shared_ptr<WorldObject> worldObject);\n bool removeObject(std::shared_ptr<WorldObject> worldObject, bool performImmediately = true);\n void performDeletions();\n bool spawn(std::string type);\n bool agentSpawning;\n\n void reproduce(Agent &a);\n\n Config &getConfig();\n const Populator &getPopulator() const;\n\n const std::set<std::shared_ptr<Agent>> &getAgents() const;\n const std::set<std::shared_ptr<WorldObject>> &getObjects() const;\n\n const std::deque<WorldStatistics> &getHistoricalStatistics() const;\n void clearStatistics();\n\n const float getHistoryFrequency() const;\n\n const sf::RenderWindow *getWindow() const;\n\n const sf::Vector2f &getDimensions() const;\n const Quadtree<float> &getQuadtree() const;\n\n\nprivate:\n float worldTime;\n Config& config;\n sf::RenderWindow *window;\n const sf::Vector2f dimensions;\n sf::Texture terrainTexture;\n sf::Sprite terrain;\n\n std::list<std::shared_ptr<WorldObject>> deletionList;\n\n Populator populator;\n std::set<std::shared_ptr<Agent>> agents;\n std::set<std::shared_ptr<WorldObject>> objects;\n std::deque<WorldStatistics> historicalStatistics;\n const float historyFrequency;\n\n Quadtree<float> quadtree;\n OpenCL_Wrapper *openCL_wrapper;\n std::mt19937 randomEngine;\n\n void generateTerrain();\n\n void updateStatistics();\n};\n\n\n#endif //HUNTERGATHERERS_WORLD_H\n"
}
] | 35 |
charlesbernando/Titanic
|
https://github.com/charlesbernando/Titanic
|
0c0f99622c3803b6c58a3cf52e9ef216e6e2c2fc
|
e88fa77bfe5a5c8b19e4af837cbd2d0d4af87957
|
5bb18db59e79802fd76a406bf28ce6e238db9c1c
|
refs/heads/master
| 2021-01-10T13:36:09.839187 | 2016-04-04T01:10:39 | 2016-04-04T01:10:39 | 55,376,826 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.8194444179534912,
"alphanum_fraction": 0.8194444179534912,
"avg_line_length": 35,
"blob_id": "bd7cb41700de4a083b985ecddea0a15ccece01ae",
"content_id": "d94c2f10acb33c52e94768442ba18adcad6ef46e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 72,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 2,
"path": "/README.md",
"repo_name": "charlesbernando/Titanic",
"src_encoding": "UTF-8",
"text": "# Titanic\nProportion of the Titanic survivors based on class and gender\n"
},
{
"alpha_fraction": 0.6479912400245667,
"alphanum_fraction": 0.6887127757072449,
"avg_line_length": 37.7282600402832,
"blob_id": "030ac714414790e7bd7ba17e12ba5ac6629ea743",
"content_id": "d382f0a139cb71179a386691aeadb5822da01d92",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3659,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 92,
"path": "/tickets.py",
"repo_name": "charlesbernando/Titanic",
"src_encoding": "UTF-8",
"text": "\r\n\"\"\"\r\nCreated on Mon Mar 21 22:29:39 2016\r\n\r\n@author: charles\r\n\"\"\"\r\n\r\nimport csv as csv\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\n\r\n#Read the file and data\r\ncsv_file_object = csv.reader(open('train.csv', 'rb'))\r\nheader = csv_file_object.next() \t\t\t\t\t\t\r\ndata=[] \r\n\r\n#Put the data in an array\r\nfor row in csv_file_object: \t\t\t\t\t\t\t\r\n data.append(row[0:]) \t\t\t\t\t\t\t\t\r\ndata = np.array(data) \t\t\t\t\t\t\t\t\t\r\n\r\n#Separate the 1st class, 2nd class and 3rd class passengers\r\nc1_stats = data[0::,2] == \"1\" \t\r\nc2_stats = data[0::,2] == \"2\" \t\r\nc3_stats = data[0::,2] == \"3\"\r\n\r\n#Find male and female passengers in each class\r\nc1f_stats = data[c1_stats,4] == \"female\"\r\nc2f_stats = data[c2_stats,4] == \"female\"\r\nc3f_stats = data[c3_stats,4] == \"female\"\r\nc1m_stats = data[c1_stats,4] == \"male\"\r\nc2m_stats = data[c2_stats,4] == \"male\"\r\nc3m_stats = data[c3_stats,4] == \"male\"\r\n\r\n#Survival data of all passengers from each class\r\nc1_onboard = data[c1_stats,1].astype(np.float)\r\nc2_onboard = data[c2_stats,1].astype(np.float)\r\nc3_onboard = data[c3_stats,1].astype(np.float)\r\n\r\n#Proportion of all passengers who survived from each class\r\nprop_c1_survived = np.sum(c1_onboard) / np.size(c1_onboard)\r\nprop_c2_survived = np.sum(c2_onboard) / np.size(c2_onboard)\r\nprop_c3_survived = np.sum(c3_onboard) / np.size(c3_onboard)\r\n\r\n#Survival data of male and female passengers from each class\r\nc1f_onboard = data[c1f_stats,1].astype(np.float)\r\nc2f_onboard = data[c2f_stats,1].astype(np.float)\r\nc3f_onboard = data[c3f_stats,1].astype(np.float)\r\nc1m_onboard = data[c1m_stats,1].astype(np.float)\r\nc2m_onboard = data[c2m_stats,1].astype(np.float)\r\nc3m_onboard = data[c3m_stats,1].astype(np.float)\r\n\r\n#Proportion of male and female passengers who survived from each class\r\nprop_c1f_survived = np.sum(c1f_onboard) / np.size(c1f_onboard)\r\nprop_c2f_survived = np.sum(c2f_onboard) / np.size(c2f_onboard)\r\nprop_c3f_survived = np.sum(c3f_onboard) / np.size(c3f_onboard)\r\nprop_c1m_survived = np.sum(c1m_onboard) / np.size(c1m_onboard)\r\nprop_c2m_survived = np.sum(c2m_onboard) / np.size(c2m_onboard)\r\nprop_c3m_survived = np.sum(c3m_onboard) / np.size(c3m_onboard)\r\n\r\n#Print all results\r\nprint 'Proportion of people in class1 who survived is %s' % prop_c1_survived\r\nprint 'Proportion of people in class2 who survived is %s' % prop_c2_survived\r\nprint 'Proportion of people in class3 who survived is %s' % prop_c3_survived\r\n\r\nprint 'Proportion of female in class1 who survived is %s' % prop_c1f_survived\r\nprint 'Proportion of female in class2 who survived is %s' % prop_c2f_survived\r\nprint 'Proportion of female in class3 who survived is %s' % prop_c3f_survived\r\nprint 'Proportion of male in class1 who survived is %s' % prop_c1m_survived\r\nprint 'Proportion of male in class2 who survived is %s' % prop_c2m_survived\r\nprint 'Proportion of male in class3 who survived is %s' % prop_c3m_survived\r\n\r\n#Bar plots of the proportion of survivors \r\nN = 3\r\nt_prop = (prop_c1_survived, prop_c2_survived, prop_c3_survived)\r\nf_prop = (prop_c1f_survived, prop_c2f_survived, prop_c3f_survived)\r\nm_prop = (prop_c1m_survived, prop_c2m_survived, prop_c3m_survived)\r\nindex = np.arange(N)\r\nwidth = 0.25\r\n\r\np1 = plt.bar(index, t_prop, width, color='r')\r\np2 = plt.bar(index+width, f_prop, width, color='y')\r\np3 = plt.bar(index+(2*width), m_prop, width, color='b')\r\n \r\nplt.xlabel('Class')\r\nplt.ylabel('Proportion of the survivors')\r\nplt.title('Proportion of the Titanic survivors based on class and gender')\r\nplt.xticks(index + (3*width/2), ('1st class', '2nd class', '3rd class'))\r\nplt.yticks(np.arange(0, 0.71, 0.1))\r\nplt.legend((p1, p2, p3), ('Total','Female', 'Male'))\r\n\r\nplt.tight_layout()\r\nplt.show()\r\n\r\n"
}
] | 2 |
theihor/web-lab3
|
https://github.com/theihor/web-lab3
|
e7204fb74c9461a442dd3a881d8cd36278b86861
|
a86c3d87281c58781a7c9405741251e7f0191134
|
984a3b525ff8556c8d8c95187607957a9689a404
|
refs/heads/master
| 2021-01-10T02:31:35.999649 | 2015-12-29T19:23:05 | 2015-12-29T19:23:05 | 48,763,381 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6763636469841003,
"alphanum_fraction": 0.6872727274894714,
"avg_line_length": 29.16666603088379,
"blob_id": "3a5c5866b474e286112d75fb22a7de87a0f727bb",
"content_id": "5a17582bd67eb56f3f05a67b4fc64df30053f4a5",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 550,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 18,
"path": "/app/models.py",
"repo_name": "theihor/web-lab3",
"src_encoding": "UTF-8",
"text": "from __future__ import unicode_literals\n\nfrom django.db import models\n\n# Create your models here.\nclass Developer(models.Model):\n name = models.CharField(max_length=20)\n email = models.CharField(max_length=30)\n def __str__(self):\n return self.name\n\nclass Game(models.Model):\n name = models.CharField(max_length=20)\n release_date = models.DateTimeField('Release date')\n price = models.IntegerField()\n developer = models.ForeignKey(Developer, on_delete=models.CASCADE)\n def __str__(self):\n return self.name\n \n\n\n"
},
{
"alpha_fraction": 0.797468364238739,
"alphanum_fraction": 0.797468364238739,
"avg_line_length": 18.75,
"blob_id": "443415208bc286a07fed33f811372991756faeb6",
"content_id": "851305f78f257d8586d56711c03beca832efaf70",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 158,
"license_type": "no_license",
"max_line_length": 35,
"num_lines": 8,
"path": "/app/admin.py",
"repo_name": "theihor/web-lab3",
"src_encoding": "UTF-8",
"text": "from django.contrib import admin\n\nfrom .models import Developer, Game\n\nadmin.site.register(Developer)\nadmin.site.register(Game)\n\n# Register your models here.\n"
},
{
"alpha_fraction": 0.6509299278259277,
"alphanum_fraction": 0.6509299278259277,
"avg_line_length": 32.28571319580078,
"blob_id": "e8e0e588fcfd615faa1fd0fdbd35fb77095e464a",
"content_id": "1dc91a25dbece22da3427053906b5d11d653b65f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 699,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 21,
"path": "/app/views.py",
"repo_name": "theihor/web-lab3",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\nfrom django.http import HttpResponse\nfrom .models import *\n# Create your views here.\ndef index(request):\n games = [(g.id, g.name) for g in Game.objects.all()]\n print(games)\n return render (request,\"app/index.html\", {\"games\" : games})\n # return HttpResponse(\"Piece of shit.\")\n\ndef game(request):\n game_id = request.GET.get('id')\n print(\"I'm HERE!!\")\n info = {}\n g = Game.objects.get(id=game_id)\n info[\"game_name\"] = g.name\n info[\"price\"] = g.price\n info[\"release_date\"] = str(g.release_date)\n info[\"developer\"] = g.developer.name\n info[\"developer_email\"] = g.developer.email\n return render (request,\"app/game.html\", info)\n"
}
] | 3 |
PedroEid/J0go
|
https://github.com/PedroEid/J0go
|
fe086e52582176eb94d9243b3e179f7733bb3199
|
8eceaac524e86506cbc75f917de3bb156a98f4b7
|
f6080e360fefbdfef7f3681dda84039c19bbb568
|
refs/heads/master
| 2020-03-15T06:46:27.886197 | 2018-06-08T13:00:31 | 2018-06-08T13:00:31 | 132,014,933 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.45441561937332153,
"alphanum_fraction": 0.4990980923175812,
"avg_line_length": 30.755369186401367,
"blob_id": "5447668325b6ab0fdb248cf7a720fbc8b1ccd5fe",
"content_id": "5d9804bd60095c8463986ae5e250bab92945949b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 26626,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 838,
"path": "/J0g0.py",
"repo_name": "PedroEid/J0go",
"src_encoding": "UTF-8",
"text": "import pygame\nfrom random import randrange\nimport pygame.mixer\n\n\n# Cores.\nwhite = (255,255,255)\ngray = (125,125,125)\ngreen = (0,50,0)\nblack=(0,0,0)\nred = (255,0,0)\npurple =(150,150,255)\nblue=(20,20,255)\nazul=(0,200,250)\n\nFPS = 60\ngrav=20\ntela_y=500\ntela_x=900\ntela = pygame.display.set_mode([tela_x,tela_y])\ntela.fill(black)\nfundo = pygame.image.load(\"ceu novo.png\").convert()\nfundo = pygame.transform.scale(fundo,(tela_x,tela_y))\n\npygame.mixer.pre_init()\npygame.init()\n\n\nrelogio = pygame.time.Clock()\nchoro = pygame.mixer.Sound('choro2.ogg')\nmusica = pygame.mixer.Sound('music.ogg')\n\n\n# Criando classe bebe\nclass Bebe (pygame.sprite.Sprite):\n def __init__(self, imbebe, pos_x, pos_y,tela,vida,cortex1,cortex2):\n pygame.sprite.Sprite.__init__(self)\n self.image = pygame.image.load(imbebe) \n self.image = pygame.transform.scale(self.image,(180,150))\n self.image=pygame.transform.chop(self.image, (132, 120, cortex1,30 ))\n self.image=pygame.transform.chop(self.image, (0, 0, cortex2, 15))\n self.rect = self.image.get_rect()\n self.rect.x = pos_x\n self.rect.y = pos_y\n self.vida=vida\n\n\n \n def health(self):\n pygame.draw.rect(self.image,white,[0,0,100,3])\n if self.vida>0:\n pygame.draw.rect(self.image, red, [0,0,self.vida,3])\n \n \n#criando classe de plataforma\nclass Plataformas(pygame.sprite.Sprite): \n def __init__(self,pos_x,pos_y, imagem):\n pygame.sprite.Sprite.__init__(self)\n self.image = pygame.image.load(imagem) \n self.image = pygame.transform.scale(self.image,(120,100))\n self.image=pygame.transform.chop(self.image, (0, 65,0 ,35 ))\n self.image=pygame.transform.chop(self.image, (0, 0, 0, 40))\n self.rect = self.image.get_rect()\n self.rect.x = pos_x\n self.rect.y = pos_y\n \nclass Parede(pygame.sprite.Sprite): \n def __init__(self,pos_x,pos_y, width, height,cor):\n pygame.sprite.Sprite.__init__(self)\n # Set the background color and set it to be transparent\n self.image = pygame.Surface([width, height])\n # Draw the ellipse\n pygame.draw.ellipse(self.image, cor, [10, 10, width, height])\n self.image.fill(cor)\n self.rect = self.image.get_rect()\n self.rect.x = pos_x\n self.rect.y = pos_y\n \n \n \nclass Cookie(pygame.sprite.Sprite): \n def __init__(self,pos_x,pos_y, imagem):\n pygame.sprite.Sprite.__init__(self)\n self.image = pygame.image.load(imagem) \n self.image = pygame.transform.scale(self.image,(20,20))\n self.rect = self.image.get_rect()\n self.rect.x = pos_x\n self.rect.y = pos_y\n \n \n#criando classe de mamadeira \nclass Mamadeira (pygame.sprite.Sprite):\n def __init__(self, immadeira, pos_x, pos_y,vel_x,vel_y,g):\n pygame.sprite.Sprite.__init__(self)\n self.vx = vel_x\n self.vy = vel_y\n self.g = g\n self.image = pygame.image.load(immadeira)\n self.image = pygame.transform.scale(self.image,(40,40))\n self.image=pygame.transform.chop(self.image, (30, 26, 30,30 ))\n self.image=pygame.transform.chop(self.image, (0, 0, 0,15 ))\n self.rect = self.image.get_rect()\n self.rect.x = pos_x\n self.rect.y = pos_y\n self.movendo = False\n self.passos = 0 # DEBUG\n self.pre_vy=self.vy \n self.pre_x=self.rect.x-10 \n self.pre_y=self.rect.y\n self.pre_vx=self.vx\n\n def atira(self):\n self.movendo = True\n def parar_atirar(self):\n self.movendo=False\n def move(self):\n if self.movendo:\n self.vy += self.g/FPS\n self.rect.x += self.vx\n self.rect.y += self.vy\n def pre_move(self,tela):\n lista=[]\n self.pre_vy=self.vy \n self.pre_x=self.rect.x+20\n self.pre_y=self.rect.y+5\n self.pre_vx=self.vx\n lista.append([self.pre_x,self.pre_y]) \n for i in range(20):\n self.pre_vy+= self.g/FPS \n self.pre_x+=self.pre_vx\n self.pre_y+= self.pre_vy\n lista.append([self.pre_x,self.pre_y])\n listapre=[lista[i],lista[i+1]]\n if i%2!=0:\n pygame.draw.aalines(tela,black ,False,listapre)\n \n\nfont = pygame.font.SysFont(\"Boo.Fixed Sys\", (tela_x-850))\ntext = font.render(\"Bem Vindo ao Baby Fight\", True, (green))\nfont1 = pygame.font.SysFont(\"segoe ui\", tela_x-872)\nfont2= pygame.font.SysFont(\"segoe ui\", tela_x-880)\nfont3=pygame.font.SysFont('segoe ui',tela_x-880)\ntext1 = font1.render(\"JOGAR\", True, (blue))\ncontroles=font2.render(\"CONTROLES\",True,blue)\nregras=font2.render(\"REGRAS\",True,blue)\n#Controles\ncontrole0=font.render(\"CONTROLES\", True, (black))\ncontrole1=font3.render(\"SETAS PARA CIMA & BAIXO = CONTROLA A INCLINAÇÃO DO TIRO\", True, (green))\ncontrole2=font3.render(\"SETAS PARA OS LADOS = CONTROLE DA DIREÇÃO DO TIRO\", True, (green))\ncontrole3=font3.render(\"TECLAS A, BARRA DE ESPAÇO & D = MOVIMENTO DO BEBE\",True,(green))\ncontrole4=font3.render('TECLAS W & S = VELOCIDADE DO TIRO',True,(green))\n#Regras\nregra0=font.render(\"REGRAS\",True,black)\nregra1=font3.render(\"NESSE JOGO O SEU OBJETIVO É ACABAR COM OS OUTROS BEBES,\", True, (green))\nregra2=font3.render(\"MAS NÃO FAÇA ISSO ELES SÃO APENAS BEBES\", True, (green))\nregra3=font3.render(\"CADA JOGADOR TEM 3 MOVIMENTOS OU UM TIRO\",True,green)\nregra4=font3.render(\"NÃO USE HACK, CASO CONTRÁRIO FICARA DE CASTIGO\",True,green)\nregra5=font3.render('CASO CAIA NA LAVA, PERDERÁ VIDA', True, green)\nregra6=font3.render('ESTÁ CHEIO DE GULOSEIMAS, COMA E GANHE VIDA', True, green)\nvoltar=font2.render(\"VOLTAR\",True,black)\n\n \n#CRIANDO TELA\n\npygame.init()\n\ntela = pygame.display.set_mode([tela_x,tela_y])\npygame.display.set_caption(\"Bem vindo ao jogo\")\n\n\n#CRIANDO GRUPOS\n\n\ncookie= pygame.sprite.Group()\nbebe_1 = pygame.sprite.Group()\nmamadeira_1 = pygame.sprite.Group()\nmamadeira_2 = pygame.sprite.Group()\nbebe_2 = pygame.sprite.Group()\n\nplataforma_group=pygame.sprite.Group()\nparedeb=pygame.sprite.Group()\nparedebebe=pygame.sprite.Group()\nlava=pygame.sprite.Group()\n\n\n\n#CONSTANTES E VARIAVIES\n #DIMENSOES DOS BEBES\n \n \nmamadeira_bebe_y = 60\nmamadeira_bebe_x = 70\nplataforma_bebe_y=90\n\n #FALSE/TRUE\ncontrol=False\ntrocou_de_mao_1=False\ntrocou_de_mao_2=False\natirou2=False\natirou1=False\nrules=False\nsair=False\ninicio=True\nmovimento_1=False\nvelmax_x=False\nvelmin_x=False\nmorte=False\njump2=False\njump1=False\n\n #MOVIMENTO DOS BEBES\nm_bebe=0\n\n\n #GRAVIDADE\npulo2=0\npulo1=0\ng1=0\ng2=0\n\n#timer=0\n\n\n\n\n\n #LOCALIZACOES DOS BEBES:\n\n\n\nx = tela_x-220\ny = tela_y-400\nex = tela_x-800\ney = tela_y-400\n\n\n #LOCALIZACOES DAS PLATAFORMAS\n \n \npx1=randrange(200,600)\npy1=randrange(50,200)\n\n\npx2=randrange(200,600)\npy2=randrange(200,350)\n\n\npx3=randrange(200,600)\npy3=randrange(300,350)\n\n#CRIANDO\n #BEBES\n\n\nb_1= Bebe('bebe bonitinho0.png',x,y-10,tela,80,70,40)\nb_2= Bebe('bebe bonitinho(3).png',ex,ey-10,tela,80,70,40)\n\n #MAMADEIRAS\n\nm_1= Mamadeira('mamadeira2.png',(x+mamadeira_bebe_x),(y+mamadeira_bebe_y),10,(-10),(grav))\nm_2=Mamadeira('mamadeira2.png',(ex+mamadeira_bebe_x),(ey+mamadeira_bebe_y),8,(-10),(grav))\n\n #PLATAFORMAS\n\n\np_1=Plataformas(x,y+plataforma_bebe_y,'nuvens(1).png')\np_2=Plataformas(ex,ey+plataforma_bebe_y,'nuvens(1).png')\np_aleatoria1=Plataformas(px1,py1,'nuvens(1).png')\np_aleatoria3=Plataformas(px3,py3,'nuvens(1).png')\np_aleatoria2=Plataformas(px2,py2,'nuvens(1).png')\n\n\n #LAVA\np_baixo_direita=Parede(0,tela_y-10,10000,100,red)\n\n\n\n #PAREDES\n\n \n\n\n #BEBES\nparedebebe1=Parede(x+10,y+plataforma_bebe_y-10,65,15,red)\nparedebebe2=Parede(ex+10,ey+plataforma_bebe_y-10,65,15,red)\n\n\n #PLATAFORMAS \nparedebaixo=Parede(x+15,y+plataforma_bebe_y,90,5,black)\nparedebaixo0=Parede(ex+15,ey+plataforma_bebe_y,90,5,black)\nparedebaixo1=Parede(px1+15,py1+2,90,5,black)\nparedebaixo2=Parede(px2+15,py2+2,90,5,black)\nparedebaixo3=Parede(px3+15,py3+2,90,5,black)\n\n\n\n\n#ADICIONANDO AOS GRUPOS\n\n\n #PAREDE\nparedeb.add(paredebaixo)\nparedeb.add(paredebaixo0)\nparedeb.add(paredebaixo1)\nparedeb.add(paredebaixo2)\nparedeb.add(paredebaixo3)\n\n\n #LAVA\n \nlava.add(p_baixo_direita)\n\n #PLATAFORMA\n \nplataforma_group.add(p_aleatoria2)\nplataforma_group.add(p_aleatoria3)\nplataforma_group.add(p_aleatoria1)\nplataforma_group.add(p_1)\nplataforma_group.add(p_2)\n\n #BEBES\n \nbebe_1.add(b_1)\nbebe_2.add(b_2)\n\n\n #MAMADEIRAS\n \n \nmamadeira_1.add(m_1)\nmamadeira_2.add(m_2)\n\n\n#MUSICA DE FUNDO\n\n\npygame.mixer.music.load('babyfight.mp3')\npygame.mixer.music.play(-1)\n\n \n#VELOCIDADE INICIAL DA MAMADEIRA\n\n\nvy_inicial2=m_2.vy\nvy_inicial1=m_1.vy\nvx_inicial2=m_2.vx\nvx_inicial1=m_1.vx\n\n#LOOPING PRICIPAL\nwhile not sair:\n # MOVIMENTO DA MAMADEIRA\n \n m_2.move()\n m_1.move()\n \n #APARECIMENTO DO COOKIE\n \n aparece=randrange(0,600)\n \n for event in pygame.event.get():\n \n if event.type == pygame.QUIT:\n sair = True\n\n #TElA DE INICIO\n elif inicio:\n tela.fill(azul)\n tela.blit(text,(420 - text.get_width() // 2, 130 - text.get_height() // 2))\n jogar=tela.blit(text1,(425 - text1.get_width() // 2, 230 - text1.get_height() // 2))\n cont=tela.blit(controles,(416 - text1.get_width() // 2, 280 - text1.get_height() // 2))\n rule=tela.blit(regras,(435 - text1.get_width() // 2, 310 - text1.get_height() // 2))\n trocou_de_mao_1=False\n trocou_de_mao_2=False\n atirou2=False \n movimento_1=False\n m_bebe=0\n\n\n if event.type == pygame.MOUSEBUTTONDOWN: \n mouse_posicao=pygame.mouse.get_pos()\n if jogar.collidepoint(mouse_posicao):\n inicio=False\n control=False\n rules=False\n musica.play(-1)\n elif cont.collidepoint(mouse_posicao):\n inicio=False\n control=True\n rules=False\n elif rule.collidepoint(mouse_posicao):\n inicio=False\n control=False\n rules=True\n \n \n \n \n \n #TELA DE CONTROLES \n \n \n \n elif control:\n tela.fill(gray)\n tela.blit(controle0,(350 - text1.get_width() // 2, 100 - text1.get_height() // 2))\n tela.blit(controle1,(350 - text1.get_width() // 2, 205 - text1.get_height() // 2))\n tela.blit(controle2,(350 - text1.get_width() // 2, 280 - text1.get_height() // 2))\n tela.blit(controle3,(350 - text1.get_width() // 2, 355 - text1.get_height() // 2))\n tela.blit(controle4,(350 - text1.get_width() // 2, 430 - text1.get_height() // 2))\n volt=tela.blit(voltar,(170 - text1.get_width() // 2, 430 - text1.get_height() // 2))\n if event.type == pygame.MOUSEBUTTONDOWN: \n mouse_posicao=pygame.mouse.get_pos()\n if volt.collidepoint(mouse_posicao):\n control=False\n inicio=True\n \n \n #TELA DE REGRAS\n \n \n \n elif rules:\n tela.fill(gray)\n tela.blit(regra0,(525 - text.get_width() // 2, 100 - text.get_height() // 2))\n tela.blit(regra1,(350 - text1.get_width() // 2, 180 - text1.get_height() // 2))\n tela.blit(regra2,(350 - text1.get_width() // 2, 230 - text1.get_height() // 2))\n tela.blit(regra3,(350 - text1.get_width() // 2, 280 - text1.get_height() // 2))\n tela.blit(regra5,(350 - text1.get_width() // 2, 330 - text1.get_height() // 2))\n tela.blit(regra4,(350 - text1.get_width() // 2, 380 - text1.get_height() // 2))\n tela.blit(regra6,(350 - text1.get_width() // 2, 430 - text1.get_height() // 2))\n volt=tela.blit(voltar,(170 - text1.get_width() // 2, 430 - text1.get_height() // 2))\n if event.type == pygame.MOUSEBUTTONDOWN: \n mouse_posicao=pygame.mouse.get_pos()\n if volt.collidepoint(mouse_posicao):\n control=False\n inicio=True\n \n \n \n \n \n elif not morte:\n velmax_x=False\n velmin_x=False \n movimentou2=False\n movimentou1=False\n \n \n #MOVIMENTO DOS PERSONAGENS \n if not movimento_1:\n pode=True\n \n if event.type == pygame.KEYDOWN: \n \n if event.key== pygame.K_RETURN:\n m_2.atira()\n atirou2=True\n \n \n if event.key==pygame.K_LEFT and not trocou_de_mao_2 and not atirou2:\n m_2.rect.x-=mamadeira_bebe_y+15\n b_2.image=pygame.transform.flip(b_2.image, True, False)\n m_2.vx=-m_2.vx\n trocou_de_mao_2=True\n \n if event.key==pygame.K_RIGHT and trocou_de_mao_2 and not atirou2:\n m_2.rect.x+=mamadeira_bebe_y+15\n m_2.vx=-m_2.vx\n b_2.image=pygame.transform.flip(b_2.image, True, False)\n trocou_de_mao_2=False\n if event.key==pygame.K_UP and not atirou2:\n m_2.vy-=tela_y-498\n \n if event.key==pygame.K_DOWN and not atirou2:\n m_2.vy+=tela_y-498\n\n if event.key==pygame.K_w and not atirou2:\n if m_2.vx!=16 and m_2.vx!=-16:\n if trocou_de_mao_2:\n m_2.vx-=2\n else:\n m_2.vx+=2\n else:\n velmax_x=True\n if event.key==pygame.K_d and b_2.rect.x<(781) and not atirou2:\n \n \n b_2.rect.x+=50\n m_2.rect.x+=50\n paredebebe2.rect.x=b_2.rect.x+10\n m_bebe+=1\n movimentou2=True\n \n \n\n \n \n if event.key==pygame.K_s and not atirou2:\n if m_2.vx!=0:\n if trocou_de_mao_2:\n m_2.vx+=2\n else:\n m_2.vx-=2\n else:\n velmin_x=True\n if event.key==pygame.K_a and b_2.rect.x>0 and not atirou2:\n b_2.rect.x-=50\n m_2.rect.x-=50\n paredebebe2.rect.x=b_2.rect.x+10\n m_bebe+=1\n movimentou2=True\n if event.key==pygame.K_SPACE and not atirou2 and not jump2:\n \n pulo2=-10\n m_bebe+=1\n jump2=True\n\n\n \n\n\n\n\n if movimento_1:\n if event.type == pygame.KEYDOWN: \n if event.key== pygame.K_RETURN:\n m_1.atira()\n atirou1=True \n if event.key==pygame.K_LEFT and not trocou_de_mao_1 and not atirou1:\n m_1.rect.x-=mamadeira_bebe_y+15\n m_1.vx=-m_1.vx\n b_1.image=pygame.transform.flip(b_1.image, True, False)\n trocou_de_mao_1=True\n if event.key==pygame.K_RIGHT and trocou_de_mao_1 and not atirou1:\n m_1.rect.x+=mamadeira_bebe_y+15\n m_1.vx=-m_1.vx\n b_1.image=pygame.transform.flip(b_1.image, True, False)\n trocou_de_mao_1=False\n if event.key==pygame.K_w and not atirou1:\n if m_1.vx!=16 and m_1.vx!=(-16):\n if trocou_de_mao_1:\n m_1.vx-=2\n else:\n m_1.vx+=2\n else:\n velmax_x=True\n if event.key==pygame.K_s and not atirou1:\n if m_1.vx!=0:\n if trocou_de_mao_1:\n m_1.vx+=2\n else:\n m_1.vx-=2\n else:\n velmin_x=True\n \n \n if event.key==pygame.K_UP and not atirou1:\n m_1.vy-=tela_y-498\n \n if event.key==pygame.K_DOWN and not atirou1:\n m_1.vy+=tela_y-498\n if event.key==pygame.K_d and b_1.rect.x<(781) and not atirou1:\n b_1.rect.x+=50\n m_1.rect.x+=50\n paredebebe1.rect.x=b_1.rect.x+10\n m_bebe-=1\n movimentou1=True\n \n \n if event.key==pygame.K_a and b_1.rect.x>0 and not atirou1:\n b_1.rect.x-=50\n m_1.rect.x-=50\n paredebebe1.rect.x=b_1.rect.x+10\n m_bebe-=1\n movimentou1=True\n \n if event.key==pygame.K_SPACE and not atirou1 and not jump1:\n \n pulo1=-10\n m_bebe-=1\n jump1=True\n#TROCA DE MOVIMENTOS\n if m_bebe<=0:\n movimento_1=False\n mamadeira=mamadeira_2\n \n\n if m_bebe>=3:\n movimento_1=True\n mamadeira=mamadeira_1\n \n#GRAVIDADE DOS BEBES\n \n\n b_2.rect.y+=pulo2\n paredebebe2.rect.y=b_2.rect.y+90\n if not atirou2:\n m_2.rect.y=b_2.rect.y+mamadeira_bebe_y \n \n \n \n gravidadeb2_1=pygame.sprite.spritecollide(paredebebe2,paredeb, False) \n gravlava2=pygame.sprite.spritecollide(b_2,lava,False) \n if gravlava2:\n pulo2=0\n jump2=False\n if not morte:\n pulo2-=15\n b_2.vida-=10\n b_2.health()\n \n elif gravidadeb2_1 and pulo2>=0:\n pulo2=0\n jump2=False\n else:\n g2=grav*1/FPS\n pulo2+=g2\n \n\n\n\n\n b_1.rect.y+=pulo1\n paredebebe1.rect.y=b_1.rect.y+90\n if not atirou1:\n m_1.rect.y=b_1.rect.y+mamadeira_bebe_y\n \n \n \n \n \n \n gravidadeb1_1=pygame.sprite.spritecollide(paredebebe1,paredeb, False) \n gravlava1=pygame.sprite.spritecollide(b_1,lava,False) \n if gravlava1:\n pulo1=0\n jump1=False\n if not morte:\n pulo1-=15\n b_1.vida-=10\n b_1.health()\n \n elif gravidadeb1_1 and pulo1>=0:\n pulo1=0\n jump1=False\n else:\n g1=grav*1/FPS\n pulo1+=g1\n \n \n \n \n#COLISAO DAS MAMADEIRAS COM OS BEBES E PLATAFORMAS \n colisao_b_m2= pygame.sprite.spritecollide(b_1,mamadeira_2, False)\n colisao_m_p2=pygame.sprite.spritecollide(m_2,plataforma_group, False)\n if colisao_b_m2 or colisao_m_p2 and atirou2 or m_2.rect.x>900 or m_2.rect.x<0 or m_2.rect.y<-500:\n m_2.rect.x=b_2.rect.x+mamadeira_bebe_x\n m_2.rect.y=b_2.rect.y+mamadeira_bebe_y\n m_2.parar_atirar()\n if trocou_de_mao_2:\n b_2.image=pygame.transform.flip(b_2.image, True, False)\n if colisao_b_m2:\n b_1.vida-=20\n b_1.health()\n if b_1.rect.x<(781) and b_1.rect.x>0:\n if trocou_de_mao_2:\n b_1.rect.x-=50\n m_1.rect.x-=50\n paredebebe1.rect.x=b_1.rect.x+10\n elif not trocou_de_mao_2:\n b_1.rect.x+=50\n m_1.rect.x+=50\n paredebebe1.rect.x=b_1.rect.x+10\n m_2.vy=vy_inicial2\n m_2.vx=vx_inicial2\n m_bebe=3\n trocou_de_mao_2=False\n atirou2=False\n\n\n\n\n \n colisao_b_m1 = pygame.sprite.spritecollide(b_2,mamadeira_1, False)\n colisao_m_p1=pygame.sprite.spritecollide(m_1,plataforma_group, False)\n if colisao_b_m1 or colisao_m_p1 and atirou1 or m_1.rect.x>900 or m_1.rect.x<0 or m_1.rect.y<-500:\n m_1.rect.x=b_1.rect.x+mamadeira_bebe_x\n m_1.rect.y=b_1.rect.y+mamadeira_bebe_y\n m_1.parar_atirar()\n if trocou_de_mao_1:\n b_1.image=pygame.transform.flip(b_1.image, True, False)\n if colisao_b_m1:\n b_2.vida-=20\n b_2.health()\n if b_2.rect.x<(781) and b_2.rect.x>0:\n if trocou_de_mao_1:\n b_2.rect.x-=50\n m_2.rect.x-=50\n paredebebe2.rect.x=b_2.rect.x+10\n elif not trocou_de_mao_1:\n b_2.rect.x+=50\n m_2.rect.x+=50\n paredebebe2.rect.x=b_2.rect.x+10\n m_1.vy=vy_inicial1\n m_1.vx=vx_inicial1\n m_bebe=0\n atirou1=False\n trocou_de_mao_1=False\n \n \n \n#COLISAO COOKIE E BEBES\n\n \n comer2=pygame.sprite.spritecollide(b_2,cookie,True)\n comer1=pygame.sprite.spritecollide(b_1,cookie,True)\n if comer2 and b_2.vida<80:\n b_2.vida+=40\n b_2.health()\n if comer1 and b_1.vida<80:\n b_1.vida+=40\n b_1.health()\n \n \n#SISTEMA DE COLISAO COM PAREDES LATERAIS \n\n\n \n \n \n \n\n \n#TELA DO JOGO\n\n\n if not inicio and not control and not rules:\n lava.draw(tela)\n tela.blit(fundo, (0, 0))\n #CRIACAO DOS COOKIES\n if 10>len(cookie):\n if aparece==5: \n ck=Cookie(randrange(0,800),randrange(0,450),'índice.png')\n cookie.add(ck)\n\n# tela.fill(white)\n bebe_1.draw(tela)\n bebe_2.draw(tela)\n mamadeira_1.draw(tela)\n mamadeira_2.draw(tela)\n plataforma_group.draw(tela)\n cookie.draw(tela)\n m=0\n pygame.mixer.music.stop()\n \n if velmax_x:\n max_x=font3.render(\"Velocidade maxima\", True, (red))\n tela.blit(max_x,(350 - text.get_width() // 2, 500 - text.get_height() // 2))\n elif velmin_x:\n max_x=font3.render(\"Velocidade minima\", True, (red))\n tela.blit(max_x,(350 - text.get_width() // 2, 500 - text.get_height() // 2))\n if not atirou2 and not atirou1: \n for m in mamadeira:\n m.pre_move(tela)\n if (b_2.vida<=0 or b_1.vida<=0) and not morte:\n if b_2.vida<=0:\n b_choro= Bebe('bebe bonitinho(2).png',b_2.rect.x,b_2.rect.y,tela,100,0,0)\n bebe_2.add(b_choro)\n bebe_2.remove(b_2)\n morte=True\n if b_1.vida<=0:\n b_choro= Bebe('bebe bonitinho(1).png',b_1.rect.x,b_1.rect.y,tela,100,0,0)\n bebe_1.add(b_choro)\n bebe_1.remove(b_1)\n morte=True\n musica.stop()\n choro.play()\n mamadeira_2.remove(m_2)\n mamadeira_1.remove(m_1)\n final=font1.render(\"Parabéns, você fez o bebe chorar, seu MONSTRO\", True, (black))\n final_jogar=font3.render(\"Jogar de novo\", True, (blue))\n if morte:\n tela.blit(final,(400 - text.get_width() // 2, 20 - text.get_height() // 2))\n jogar_de_novo=tela.blit(final_jogar,(170 - text1.get_width() // 2, 430 - text1.get_height() // 2))\n if event.type == pygame.MOUSEBUTTONDOWN: \n mouse_posicao=pygame.mouse.get_pos()\n if jogar_de_novo.collidepoint(mouse_posicao):\n cookie= pygame.sprite.Group()\n choro.stop()\n morte=False\n musica.stop()\n pygame.mixer.music.play(-1)\n inicio=True\n control=False\n bebe_2 = pygame.sprite.Group()\n bebe_1 = pygame.sprite.Group()\n b_1= Bebe('bebe bonitinho0.png',x,y-10,tela,80,40,40)\n b_2= Bebe('bebe bonitinho(3).png',ex,ey-10,tela,80,40,40)\n #Recriando mamadeiras\n m_1= Mamadeira('mamadeira2.png',(x+mamadeira_bebe_x),(y+mamadeira_bebe_y),10,(-10),(grav))\n m_2=Mamadeira('mamadeira2.png',(ex+mamadeira_bebe_x),(ey+mamadeira_bebe_y),8,(-10),(grav))\n \n paredebebe1=Parede(x+10,y+plataforma_bebe_y,70,5,red)\n paredebebe2=Parede(ex+10,ey+plataforma_bebe_y,70,5,red)\n #adicionando nos grupos\n bebe_1.add(b_1)\n bebe_2.add(b_2)\n mamadeira_1.add(m_1)\n mamadeira_2.add(m_2)\n elif m_bebe<=0 or m_bebe>=3:\n if m_bebe<=0:\n vez=font3.render(\"Vez da Valentina\", True, (black))\n else:\n vez=font3.render(\"Vez do Enzo\", True, (black))\n tela.blit(vez,(450 - text1.get_width() // 2, 50 - text1.get_height() // 2))\n \n \n \n\n\n \n\n#desenho tela de inicio\n\n \n\n\n#desenho tela dos cpntroles\n \n\n pygame.display.update()\n relogio.tick(FPS)\n \nchoro.stop()\npygame.mixer.music.stop()\nmusica.stop()\n\npygame.display.quit()"
},
{
"alpha_fraction": 0.7032520174980164,
"alphanum_fraction": 0.7764227390289307,
"avg_line_length": 35.900001525878906,
"blob_id": "2bd6c26e00e4721eab68362826b6df40cb78558c",
"content_id": "326155b4d396f999c91caed27af4c87c3e2a863c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 748,
"license_type": "no_license",
"max_line_length": 115,
"num_lines": 20,
"path": "/README.md",
"repo_name": "PedroEid/J0go",
"src_encoding": "UTF-8",
"text": "Esse jogo é de catapulta, baseado em turnos, em qual o seu objetivo e fazer o outro bebe chorar, mas não faça isso,\neles são apenas bebes.\n\nMúsica da tela de inícial: Compilado de músicas infantis\nMúsica durante o jogo: Mii Channel music\n\n\nLink das músicas baixadas:\n<https://www.youtube.com/watch?v=9CFwU5RsD-I>\n<https://www.youtube.com/watch?v=E9s1ltPGQOo>\n\nLink do som do choro:\n<https://www.youtube.com/watch?v=0xRrUVVKigk>\n\nLink das imágens:\n<https://galeria.dibujos.net/familia/bebe-sorprendido-pintado-por--10181865.html>\n<https://br.pinterest.com/pin/457396905878920763/>\n<https://br.pinterest.com/pin/472526185886677299/>\n<https://br.pinterest.com/ardentheavyind/pixel-clouds/>\n<http://hdwallpapers2013.com/sky-wallpaper-2.html>\n"
}
] | 2 |
russchua/image_creator
|
https://github.com/russchua/image_creator
|
1d61f1e63ddec2adf4972fa6dd021c0b50faf516
|
d82c1f7c9c5693960be356b1c4dc830d15283697
|
f9864367b2d25e9257f10ba565cf689f6dbdc9f7
|
refs/heads/master
| 2023-04-04T19:06:54.578214 | 2019-12-07T07:43:07 | 2019-12-07T07:43:07 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6524990200996399,
"alphanum_fraction": 0.6698150038719177,
"avg_line_length": 28.546510696411133,
"blob_id": "b2cf0b6f127b6805d23708cfe289885d43a7e4a2",
"content_id": "c145564b1e5ffdea2ef90455d06256a05774f792",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2541,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 86,
"path": "/code_args.py",
"repo_name": "russchua/image_creator",
"src_encoding": "UTF-8",
"text": "import cv2\nimport numpy as np\nimport os\nimport argparse\n\n\nparser = argparse.ArgumentParser(\n description='''NIfTI_converter\n Convert files from mnc,ima or mha formats to the universally accepted NIfTI format.\n -i or --input-file: Input MRI ima folder / mnc or mha file path\n -o or --output-dir: A filepath to the output directory. \n -p or --page-num: A filepath to the output directory. \n ''')\nparser.add_argument('-i', '--input-file', type=str, required=True)\nparser.add_argument('-o', '--output-dir', type=str, required=False, default='output')\nparser.add_argument('-d', '--dim', type=int, required=False, default=512)\nargs = parser.parse_args()\n\n\n#Folder locations\ninput_folder = args.input_file\noutput_folder = args.input_file\nwidth = args.dim\nheight = args.dim\ndim = (width, height)\n\n\n#Flipping Functions\ndef flip_horizontal(img):\n horizontal_img = cv2.flip( img, 0 )\n return horizontal_img\n\ndef flip_vertical(img):\n vertical_img = cv2.flip( img, 1 )\n return vertical_img\n\n#Rotation Functions\ndef rotate_left(img):\n h,w,c = img.shape\n empty_img = np.zeros([h,w,c], dtype=np.uint8)\n for i in range(h):\n for j in range(w):\n empty_img[i,j] = img[j-1,i-1]\n empty_img = empty_img[0:h,0:w]\n return empty_img\n\ndef rotate_right(img):\n h,w,c = img.shape\n empty_img = np.zeros([h,w,c], dtype=np.uint8)\n for i in range(h):\n for j in range(w):\n empty_img[i,j] = img[h-j-1,w-i-1]\n empty_img = empty_img[0:h,0:w]\n return empty_img\n\ndef rotate_180(img):\n h,w,c = img.shape\n empty_img = np.zeros([h,w,c], dtype=np.uint8)\n for i in range(h):\n for j in range(w):\n empty_img[i,j] = img[h-i-1,w-j-1]\n empty_img = empty_img[0:h,0:w]\n return empty_img\n\n\n#Write to file\ndef write_files(resized,output_path):\n #Save the resized image\n cv2.imwrite(output_path+\"_resized.jpg\",resized)\n #Save the flipped images\n cv2.imwrite(output_path+\"_horizontal.jpg\",flip_horizontal(resized))\n cv2.imwrite(output_path+\"_vertical.jpg\",flip_vertical(resized))\n #Save the rotated images\n cv2.imwrite(output_path+\"_left.jpg\",rotate_left(resized))\n cv2.imwrite(output_path+\"_right.jpg\",rotate_right(resized))\n cv2.imwrite(output_path+\"_180.jpg\",rotate_180(resized))\n\n\nfor file in os.listdir(input_folder):\n file_name = file[0:-4]\n img = cv2.imread(input_folder+'/'+file, cv2.IMREAD_UNCHANGED)\n resized = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)\n\n #Write images\n output_path=output_folder+\"/\"+file_name\n write_files(resized,output_path)\n"
},
{
"alpha_fraction": 0.7835051417350769,
"alphanum_fraction": 0.7835051417350769,
"avg_line_length": 31.33333396911621,
"blob_id": "af02930f5aced9b2d2983e7db8962507db00d42c",
"content_id": "159ac39788a6e63ba8eb62f26fa375d258095aee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 194,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 6,
"path": "/README.md",
"repo_name": "russchua/image_creator",
"src_encoding": "UTF-8",
"text": "# image_creator\nAll this does is resize, flip, and rotate images\n\n# Usage\ngit clone https://github.com/calciver/image_creator.git\npython image_creator/code_args.py -i abnormal_elbow_radiograph/\n"
},
{
"alpha_fraction": 0.6015676259994507,
"alphanum_fraction": 0.616917073726654,
"avg_line_length": 30.231578826904297,
"blob_id": "495d4df7e731f034aa1ff9ceb617378b90f960be",
"content_id": "fc8f002339e9beb8a715eaa12d62588d65ed58f2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3062,
"license_type": "no_license",
"max_line_length": 99,
"num_lines": 95,
"path": "/code.py",
"repo_name": "russchua/image_creator",
"src_encoding": "UTF-8",
"text": "import cv2\r\nimport numpy as np\r\nimport os\r\nimport glob\r\nimport tqdm\r\nimport argparse\r\n\r\n#Folder locations\r\ninput_folder = 'input'\r\noutput_folder = 'output'\r\nwidth = 512\r\nheight = 512\r\ndim = (width, height)\r\n\r\n\r\n#Flipping Functions\r\ndef flip_horizontal(img):\r\n horizontal_img = cv2.flip( img, 0 )\r\n return horizontal_img\r\n\r\ndef flip_vertical(img):\r\n vertical_img = cv2.flip( img, 1 )\r\n return vertical_img\r\n\r\n#Rotation Functions\r\ndef rotate_left(img):\r\n h,w,c = img.shape\r\n empty_img = np.zeros([h,w,c], dtype=np.uint8)\r\n for i in range(h):\r\n for j in range(w):\r\n empty_img[i,j] = img[j-1,i-1]\r\n empty_img = empty_img[0:h,0:w]\r\n return empty_img\r\n\r\ndef rotate_right(img):\r\n h,w,c = img.shape\r\n empty_img = np.zeros([h,w,c], dtype=np.uint8)\r\n for i in range(h):\r\n for j in range(w):\r\n empty_img[i,j] = img[h-j-1,w-i-1]\r\n empty_img = empty_img[0:h,0:w]\r\n return empty_img\r\n\r\ndef rotate_180(img):\r\n h,w,c = img.shape\r\n empty_img = np.zeros([h,w,c], dtype=np.uint8)\r\n for i in range(h):\r\n for j in range(w):\r\n empty_img[i,j] = img[h-i-1,w-j-1]\r\n empty_img = empty_img[0:h,0:w]\r\n return empty_img\r\n\r\n\r\n#Write to file\r\ndef write_files(resized,output_path,base_name):\r\n output_path = os.path.join(output_path,base_name)\r\n #Save the resized image\r\n cv2.imwrite(output_path+\"_resized.jpg\",resized)\r\n #Save the flipped images\r\n cv2.imwrite(output_path+\"_horizontal.jpg\",flip_horizontal(resized))\r\n cv2.imwrite(output_path+\"_vertical.jpg\",flip_vertical(resized))\r\n #Save the rotated images\r\n cv2.imwrite(output_path+\"_left.jpg\",rotate_left(resized))\r\n cv2.imwrite(output_path+\"_right.jpg\",rotate_right(resized))\r\n cv2.imwrite(output_path+\"_180.jpg\",rotate_180(resized))\r\n\r\ndef dir_creator(file_path):\r\n if os.path.isdir(file_path):\r\n pass\r\n else:\r\n os.mkdir(file_path)\r\n print(f'The directory {file_path} does not exist. Creating directory.')\r\n\r\nif __name__ == '__main__':\r\n # Parse command-line arguments\r\n parser = argparse.ArgumentParser(\r\n description='Specify your input files and where you want to save output')\r\n parser.add_argument('--data', metavar='DIR', help='Path to the train dataset',default='Normal')\r\n parser.add_argument('--output', metavar='OUT', type=str,\r\n help='Path to the train dataset',default= 'Normal_Augmented')\r\n args = parser.parse_args()\r\n image_files = glob.glob(os.path.join(args.data, '*.jpg'))\r\n dir_creator(args.output)\r\n\r\n\r\n file_count = len(image_files)\r\n\r\n for i in tqdm.tqdm(range(file_count)):\r\n #for file in image_files:\r\n file = image_files[i]\r\n base_name_no_ext = os.path.basename(file)[0:-4]\r\n img = cv2.imread(file, cv2.IMREAD_UNCHANGED)\r\n resized = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)\r\n #output_path = os.path.join(args.output,file_name)\r\n write_files(resized,output_path = args.output,base_name = base_name_no_ext)\r\n"
}
] | 3 |
webclinic017/Stock-Data-Analysis
|
https://github.com/webclinic017/Stock-Data-Analysis
|
827a02fca0ee95a00a2a2ff6f5851bdb28a73e87
|
e65981490962a0ca40f6f33aad6047443c98b40c
|
2a8bd7685ae495bc8eb6d05c6dbcde48882d11bc
|
refs/heads/master
| 2022-04-27T01:29:14.310968 | 2020-03-23T21:57:27 | 2020-03-23T21:57:27 | null | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6700680255889893,
"alphanum_fraction": 0.7176870703697205,
"avg_line_length": 23.5,
"blob_id": "6af7c1d3fe96d5e51e0e910ad159dad17f2d1154",
"content_id": "b12088a3d23a4ea80d33c7e8a24c5a816aadb071",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "SQL",
"length_bytes": 294,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 12,
"path": "/stock_analysis.sql",
"repo_name": "webclinic017/Stock-Data-Analysis",
"src_encoding": "UTF-8",
"text": "create database stock;\ncreate table stock_table (\n\tSymbol varchar(50),\n Company varchar(100),\n Earnings_Date varchar(100),\n EPS_Estimate varchar(10),\n Reported_EPS varchar(20),\n Surprise_percent varchar(20)\n);\nselect * from stock_table;\ndesc stock_table;\ndrop table stock_table;\n"
},
{
"alpha_fraction": 0.5167526006698608,
"alphanum_fraction": 0.5257731676101685,
"avg_line_length": 36.41584014892578,
"blob_id": "96558fee8d677b83821b24b7df69ce4acb29b87c",
"content_id": "23d877eb04056fda2affd086b74078163967c328",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3880,
"license_type": "no_license",
"max_line_length": 146,
"num_lines": 101,
"path": "/stock.py",
"repo_name": "webclinic017/Stock-Data-Analysis",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Thu Mar 19 15:07:27 2020\r\n\r\n@author: User\r\n\"\"\"\r\nimport pandas as pd\r\nimport bs4\r\nimport requests\r\nfrom bs4 import BeautifulSoup\r\nimport mysql.connector\r\n\r\nmydb = mysql.connector.connect (\r\n host = \"localhost\",\r\n user = \"root\",\r\n passwd = \"password123\",\r\n database = \"stock\"\r\n )\r\n\r\nprint (mydb)\r\n\r\nmy_stocks = ['XOM', 'BYND', 'UBER', 'AC.TO', 'AMD']\r\n\r\n#scraping stock price\r\ndef stock_prices():\r\n print('PRICES')\r\n for url in range(0, len(my_stocks)):\r\n r=requests.get('https://finance.yahoo.com/quote/' + my_stocks[url])\r\n soup=bs4.BeautifulSoup(r.text, \"lxml\")\r\n price = soup.find('div',{'class':'My(6px) Pos(r) smartphone_Mt(6px)'}).find('span').text\r\n print(\"The current price of: \" +my_stocks[url]+ ' is ' +str(price))\r\n return \r\n\r\n#scraping PE ratio\r\ndef pe_ratio():\r\n print('\\n')\r\n print('PE RATIO')\r\n for url in range(0, len(my_stocks)):\r\n r=requests.get('https://finance.yahoo.com/quote/' + my_stocks[url])\r\n soup=bs4.BeautifulSoup(r.text, \"lxml\")\r\n pe_ratio = soup.find('td', {'data-test':'PE_RATIO-value'}).find('span').text\r\n print(\"The PE ratio of \" +my_stocks[url]+ ' is ' +str(pe_ratio))\r\n return\r\n \r\n#latest news article\r\ndef news():\r\n print(\"\\nLatest news articles: \")\r\n for url in range(0, len(my_stocks)):\r\n print(\"News for \" +my_stocks[url]+ \":\")\r\n r=requests.get('https://finance.yahoo.com/quote/' + my_stocks[url])\r\n soup=bs4.BeautifulSoup(r.text, \"lxml\")\r\n news_table = soup.find('ul', {'class':'My(0) Ov(h) P(0) Wow(bw)'})\r\n list = ''\r\n i = 0\r\n for url1 in news_table.find_all('a'):\r\n list += url1.get('href')\r\n print('https://ca.finance.yahoo.com' + str(list))\r\n i += 1\r\n if i == 1:\r\n break\r\n \r\n#Scarping relevant infomation from site (quarterly earnings) and importing into database MySQL\r\ndef company_earnings():\r\n for url in range(0, len(my_stocks)):\r\n r=requests.get('https://finance.yahoo.com/calendar/earnings?symbol=' +my_stocks[url])\r\n soup=bs4.BeautifulSoup(r.text, \"lxml\")\r\n table = soup.find('table', {'class':'W(100%)'})\r\n for bigtable in table.find_all('tr'):\r\n for symbol in bigtable.find_all('td', {'aria-label':'Symbol'}):\r\n a = (symbol.text)\r\n for company in bigtable.find_all('td',{'aria-label':'Company'}):\r\n b = (company.text) \r\n for date in bigtable.find_all('td',{'aria-label':'Earnings Date'}):\r\n c = (date.text)\r\n for estimate in bigtable.find_all('td', {'aria-label':'EPS Estimate'}):\r\n d = (estimate.text)\r\n for reported_eps in bigtable.find_all('td', {'aria-label':'Reported EPS'}):\r\n e = (reported_eps.text)\r\n for percent in bigtable.find_all('td',{'aria-label':'Surprise(%)'}):\r\n f = (percent.text)\r\n #print(a, b, c, d, e, f)\r\n stock_table = mydb.cursor()\r\n stock_table_values = \"\"\"insert into stock_table(symbol, company, earnings_date, eps_estimate, reported_eps, surprise_percent) \r\n values (%s, %s, %s, %s, %s, %s)\"\"\"\r\n value = (a, b, c, d, e, f)\r\n stock_table.execute(stock_table_values, value)\r\n mydb.commit()\r\n \r\n \"\"\"#fetching all the data in the table \r\n stock_table_values = \"select * from stock_table\"\"\r\n stock_table.execute(stock_table_values)\r\n for row in stock_table.fetchall():\r\n print(row)\"\"\"\r\n \r\n return table\r\n\r\n\r\nstock_prices()\r\npe_ratio()\r\nnews()\r\ncompany_earnings()\r\n"
},
{
"alpha_fraction": 0.8218390941619873,
"alphanum_fraction": 0.8218390941619873,
"avg_line_length": 173,
"blob_id": "c4db4763d94766bfe7f538c2217c7165e856c0b6",
"content_id": "a62558990db3ea90205c60eb1e89d6f4d0adf5af",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 348,
"license_type": "no_license",
"max_line_length": 325,
"num_lines": 2,
"path": "/README.md",
"repo_name": "webclinic017/Stock-Data-Analysis",
"src_encoding": "UTF-8",
"text": "# Stock-Data-Analysis\nProgrammed in Python utilizing BeautifulSoup for web scraping stock prices, recent articles, and other relevant information. Tables and charts such as company quarterly earnings were web-scraped and imported into a MySQL database. In the near future, I hope to use this data and to develop an algorithm for stock predictions.\n"
}
] | 3 |
ravgeetdhillon/gnome-hackers
|
https://github.com/ravgeetdhillon/gnome-hackers
|
98c6ade94f57bb5eb47697187df4e246f7ab9d93
|
7b961e0a4f465a1b81607920e34575630c48a90a
|
30da5b2ae0372a75b2534c2a078b314311e16666
|
refs/heads/master
| 2022-12-25T07:58:27.250616 | 2020-01-17T17:20:54 | 2020-01-17T17:20:54 | 225,340,665 | 10 | 1 |
MIT
| 2019-12-02T09:51:47 | 2020-05-08T23:50:37 | 2022-12-10T10:48:31 |
Python
|
[
{
"alpha_fraction": 0.469255656003952,
"alphanum_fraction": 0.6957928538322449,
"avg_line_length": 15.263157844543457,
"blob_id": "961e78e266dd294e329849993a307a67480f11d7",
"content_id": "1e52f86cf7d0b832df4511a0f6ed99e5d83a1752",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 309,
"license_type": "permissive",
"max_line_length": 22,
"num_lines": 19,
"path": "/requirements.txt",
"repo_name": "ravgeetdhillon/gnome-hackers",
"src_encoding": "UTF-8",
"text": "autopep8==1.4.4\ncertifi==2019.9.11\nchardet==3.0.4\nClick==7.0\nFlask==1.1.1\nFrozen-Flask==0.15\nidna==2.8\nitsdangerous==1.1.0\nJinja2==2.10.3\nMarkupSafe==1.1.1\nPillow==6.2.1\npycodestyle==2.5.0\npython-dateutil==2.8.1\npython-gitlab==1.13.0\npytz==2019.3\nrequests==2.22.0\nsix==1.13.0\nurllib3==1.25.7\nWerkzeug==0.16.0\n"
},
{
"alpha_fraction": 0.6188965439796448,
"alphanum_fraction": 0.6225982904434204,
"avg_line_length": 27.943878173828125,
"blob_id": "a603651ad77da40910a9c9cf20775490501c4e9a",
"content_id": "fbe4685b08540381080014c6b5694d0c1e3825b9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5673,
"license_type": "permissive",
"max_line_length": 132,
"num_lines": 196,
"path": "/src/fetch.py",
"repo_name": "ravgeetdhillon/gnome-hackers",
"src_encoding": "UTF-8",
"text": "from helpers import save_data, get_date_30_days_now, compress_image\nfrom variables import GITLAB_SERVER, PRIVATE_TOKEN\nimport requests\nimport gitlab\nimport json\nimport time\nimport os\n\n\ndef fetch_users(gl):\n '''\n Download all the users on the https://gitlab.gnome.org.\n '''\n \n start = time.time()\n print('Fetching users.')\n\n users = gl.users.list(all=True)\n users = [user.attributes for user in users]\n save_data(users, 'users.json')\n print(f'Downloaded and saved {len(users)} users.')\n\n finish = time.time()\n print(f'Took {round(finish-start, 2)} seconds.')\n\n\ndef fetch_groups(gl):\n '''\n Download all the groups on the https://gitlab.gnome.org.\n '''\n\n start = time.time()\n print('Fetching groups.')\n\n # donot include the `Archive` group\n blacklist = [4001]\n\n groups = json.loads(requests.get('https://gitlab.gnome.org/api/v4/groups', params={'per_page': 100}).text)\n save_data(groups, 'groups.json')\n print(f'Downloaded and saved {len(groups)} groups.')\n\n # create a list of group_ids for downloading the projects in the each group\n group_ids = []\n for group in groups:\n if group['id'] not in blacklist:\n group_ids.append(group['id'])\n\n finish = time.time()\n print(f'Took {round(finish-start, 2)} seconds.')\n\n return group_ids\n\n\ndef fetch_projects(gl, group_ids):\n '''\n Download all the projects on the https://gitlab.gnome.org.\n '''\n\n start = time.time()\n print('Fetching projects.')\n\n # get the all the projects in each group\n projects = []\n for group_id in group_ids:\n group = gl.groups.get(id=group_id, lazy=True)\n group_projects = group.projects.list(all=True)\n projects += group_projects\n\n projects = [project.attributes for project in projects]\n\n save_data(projects, 'projects.json')\n print(f'Downloaded and saved {len(projects)} projects.')\n\n # create a list of project_ids for downloading the issues, merge_requests, commits in the each project\n project_ids = []\n for project in projects:\n project_ids.append(project['id'])\n\n finish = time.time()\n print(f'Took {round(finish-start, 2)} seconds.')\n\n return project_ids\n\n\ndef fetch_projects_data(gl, project_ids):\n '''\n Download all the merge requests, issues and commits for each project on the https://gitlab.gnome.org.\n '''\n\n start = time.time()\n print('Fetching merge requests, issues and commits.')\n\n merge_requests = []\n issues = []\n commits = []\n\n for index, project_id in enumerate(project_ids):\n\n print(index, end=', ')\n project = gl.projects.get(id=project_id, lazy=True)\n\n since = get_date_30_days_now()\n\n try:\n project_merge_requests = project.mergerequests.list(all=True, query_parameters={'state': 'all', 'created_after': since})\n merge_requests += project_merge_requests\n except Exception as e:\n print(f'{e}. Raised while getting merge requests.')\n\n try:\n project_issues = project.issues.list(all=True, query_parameters={'created_after': since})\n issues += project_issues\n except Exception as e:\n print(f'{e}. Raised while getting issues.')\n\n try:\n project_commits = project.commits.list(all=True, query_parameters={'since': since})\n except Exception as e:\n print(f'{e}. Raised while getting commits.')\n\n for commit in project_commits:\n commit = commit.attributes\n commit = gl.projects.get(id=commit['project_id'], lazy=True).commits.get(id=commit['id'])\n commits.append(commit)\n\n merge_requests = [merge_request.attributes for merge_request in merge_requests]\n issues = [issue.attributes for issue in issues]\n commits = [commit.attributes for commit in commits]\n\n save_data(merge_requests, 'merge_requests.json')\n print(f'Downloaded and saved {len(merge_requests)} merge requests.')\n\n save_data(issues, 'issues.json')\n print(f'Downloaded and saved {len(issues)} issues.')\n\n save_data(commits, 'commits.json')\n print(f'Downloaded and saved {len(commits)} commits.')\n\n finish = time.time()\n print(f'Took {round(finish-start, 2)} seconds.')\n\n\ndef fetch_images(users):\n '''\n Download the user avatars.\n '''\n \n start = time.time()\n\n if not os.path.exists('static/img/users'):\n os.mkdir('static/img/users')\n\n for user in users:\n try:\n if 'https://secure.gravatar.com/' in user['avatar_url']:\n avatar_url = user['avatar_url'].split('?')[0]\n image = requests.get(f'{avatar_url}?s=200&d=identicon')\n else:\n image = requests.get(user['avatar_url'])\n \n with open(f'static/img/users/{user[\"id\"]}.png', 'wb') as f:\n f.write(image.content)\n \n compress_image(user['id'])\n \n except Exception as e:\n print(e)\n\n finish = time.time()\n print(f'Took {round(finish-start, 2)} seconds.')\n\n\ndef main():\n '''\n Main function for the fetch.py.\n '''\n\n # create a gitlab object and authenticate it\n gl = gitlab.Gitlab(GITLAB_SERVER, private_token=PRIVATE_TOKEN)\n gl.auth()\n\n # fetch the groups and get their group ids\n group_ids = fetch_groups(gl)\n\n # fetch the projects in each group and get their project ids\n project_ids = fetch_projects(gl, group_ids)\n\n # fetch the project's merge requests, issues and commits\n fetch_projects_data(gl, project_ids)\n\n # fetch all the users on the GNOME Gitlab instance\n fetch_users(gl)\n\n\nif __name__ == '__main__':\n main()\n"
},
{
"alpha_fraction": 0.7466410994529724,
"alphanum_fraction": 0.7907869219779968,
"avg_line_length": 85.83333587646484,
"blob_id": "4cd2d9e91410d66ef9b4bc507e7398e5eddbe198",
"content_id": "fc545494e0f2ff39be1e65b7c727d5e2c5ae1b4e",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 521,
"license_type": "permissive",
"max_line_length": 171,
"num_lines": 6,
"path": "/README.md",
"repo_name": "ravgeetdhillon/gnome-hackers",
"src_encoding": "UTF-8",
"text": "# GNOME Hackers\n\n[](https://github.com/ravgeetdhillon/gnome-hackers/actions)\n[](https://app.netlify.com/sites/gnome-hackers/deploys)\n\nA Leaderboard web app for GNOME hackers contributing to the projects hosted by GNOME. Available at [https://gnome-hackers.netlify.com/](https://gnome-hackers.netlify.com/)\n"
},
{
"alpha_fraction": 0.4905293881893158,
"alphanum_fraction": 0.5106847882270813,
"avg_line_length": 33.17427444458008,
"blob_id": "b331e8ce2106db5701309ad3c3f7e808bc522657",
"content_id": "7b9c4a7ddb884a833fa79d88e95ee6c598e21af7",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8236,
"license_type": "permissive",
"max_line_length": 85,
"num_lines": 241,
"path": "/src/helpers.py",
"repo_name": "ravgeetdhillon/gnome-hackers",
"src_encoding": "UTF-8",
"text": "from datetime import datetime, timedelta\nfrom variables import POINTS\nfrom PIL import Image\nimport dateutil.parser\nimport pytz\nimport json\nimport os\n\n\ndef days_from_now(date):\n '''\n Calculate the days from today to a past date.\n '''\n\n date = dateutil.parser.parse(date)\n now = pytz.utc.localize(datetime.utcnow())\n days = (now - date).days\n\n return days\n\n\ndef get_date_30_days_now():\n '''\n Get a past date which is 30 days from today.\n '''\n \n date = datetime.now() - timedelta(days=30)\n return date.strftime(\"%Y-%m-%dT%H:%M:%SZ\")\n\n\ndef load_data(file_name, directory='data'):\n '''\n Load the specified file from the given directory(optional).\n '''\n\n with open(f'{directory}/{file_name}', 'r') as f:\n data = json.load(f)\n\n return data\n\n\ndef save_data(data, file_name, directory='data'):\n '''\n Save the data to the specified file.\n '''\n\n if not os.path.exists(directory):\n os.mkdir(directory)\n\n with open(f'{directory}/{file_name}', 'w', encoding='utf-8') as f:\n json.dump(data, f, ensure_ascii=True, indent=2)\n\n\ndef create_user(data, method):\n '''\n Create a new user.\n '''\n\n new_user = {\n 'id': '',\n 'name': '',\n 'avatar_url': '',\n 'web_url': '',\n 'user_name': '',\n 'points': {\n 'days_1': 0,\n 'days_7': 0,\n 'days_15': 0,\n 'days_30': 0,\n },\n 'awards': {\n 'gold': 0,\n 'silver': 0,\n 'bronze': 0,\n 'top10': 0,\n },\n 'activity': {\n 'issues': 0,\n 'commits': 0,\n 'merge_requests': 0,\n }\n }\n\n new_user = update_user_name(new_user, data, method)\n new_user = update_user_points(new_user, data, method)\n\n return new_user\n\n\ndef update_user_info(user, data):\n '''\n Update the user's info.\n '''\n\n user['avatar_url'] = data['avatar_url']\n user['web_url'] = data['web_url']\n user['id'] = data['id']\n user['user_name'] = data['username']\n user['name'] = user['name'].title()\n\n return user\n\n\ndef update_user_name(user, data, method):\n '''\n Update the user's name.\n '''\n\n if method == 'commit':\n user['name'] = data['author_name']\n\n elif method == 'mr':\n user['name'] = data['author']['name']\n\n elif method == 'issue':\n user['name'] = data['author']['name']\n\n return user\n\n\ndef update_user_points(user, data, method):\n '''\n Update the user points according to the date and method.\n '''\n\n if method == 'commit':\n\n commit = data\n commit_date = commit['created_at']\n user['activity']['commits'] += 1\n\n if days_from_now(commit_date) <= 1:\n user['points']['days_1'] += commit['stats']['total'] * POINTS['commit']\n user['points']['days_7'] += commit['stats']['total'] * POINTS['commit']\n user['points']['days_15'] += commit['stats']['total'] * POINTS['commit']\n user['points']['days_30'] += commit['stats']['total'] * POINTS['commit']\n elif days_from_now(commit_date) <= 7:\n user['points']['days_7'] += commit['stats']['total'] * POINTS['commit']\n user['points']['days_15'] += commit['stats']['total'] * POINTS['commit']\n user['points']['days_30'] += commit['stats']['total'] * POINTS['commit']\n elif days_from_now(commit_date) <= 15:\n user['points']['days_15'] += commit['stats']['total'] * POINTS['commit']\n user['points']['days_30'] += commit['stats']['total'] * POINTS['commit']\n elif days_from_now(commit_date) <= 30:\n user['points']['days_30'] += commit['stats']['total'] * POINTS['commit']\n\n elif method == 'mr':\n\n mr = data\n mr_date = mr['created_at']\n mr_state = mr['state']\n user['activity']['merge_requests'] += 1\n\n if mr_state == 'merged':\n if days_from_now(mr_date) <= 1:\n user['points']['days_1'] += POINTS['closed_mr']\n user['points']['days_7'] += POINTS['closed_mr']\n user['points']['days_15'] += POINTS['closed_mr']\n user['points']['days_30'] += POINTS['closed_mr']\n elif days_from_now(mr_date) <= 7:\n user['points']['days_7'] += POINTS['closed_mr']\n user['points']['days_15'] += POINTS['closed_mr']\n user['points']['days_30'] += POINTS['closed_mr']\n elif days_from_now(mr_date) <= 15:\n user['points']['days_15'] += POINTS['closed_mr']\n user['points']['days_30'] += POINTS['closed_mr']\n elif days_from_now(mr_date) <= 30:\n user['points']['days_30'] += POINTS['closed_mr']\n\n elif mr_state == 'opened':\n if days_from_now(mr_date) <= 1:\n user['points']['days_1'] += POINTS['opened_mr']\n user['points']['days_7'] += POINTS['opened_mr']\n user['points']['days_15'] += POINTS['opened_mr']\n user['points']['days_30'] += POINTS['opened_mr']\n elif days_from_now(mr_date) <= 7:\n user['points']['days_7'] += POINTS['opened_mr']\n user['points']['days_15'] += POINTS['opened_mr']\n user['points']['days_30'] += POINTS['opened_mr']\n elif days_from_now(mr_date) <= 15:\n user['points']['days_15'] += POINTS['opened_mr']\n user['points']['days_30'] += POINTS['opened_mr']\n elif days_from_now(mr_date) <= 30:\n user['points']['days_30'] += POINTS['opened_mr']\n\n elif method == 'issue':\n\n issue = data\n issue_date = issue['created_at']\n issue_state = issue['state']\n user['activity']['issues'] += 1\n\n if issue_state == 'opened':\n if days_from_now(issue_date) <= 1:\n user['points']['days_1'] += POINTS['opened_issue']\n user['points']['days_7'] += POINTS['opened_issue']\n user['points']['days_15'] += POINTS['opened_issue']\n user['points']['days_30'] += POINTS['opened_issue']\n elif days_from_now(issue_date) <= 7:\n user['points']['days_7'] += POINTS['opened_issue']\n user['points']['days_15'] += POINTS['opened_issue']\n user['points']['days_30'] += POINTS['opened_issue']\n elif days_from_now(issue_date) <= 15:\n user['points']['days_15'] += POINTS['opened_issue']\n user['points']['days_30'] += POINTS['opened_issue']\n elif days_from_now(issue_date) <= 30:\n user['points']['days_30'] += POINTS['opened_issue']\n\n elif issue_state == 'closed':\n if days_from_now(issue_date) <= 1:\n user['points']['days_1'] += POINTS['closed_issue']\n user['points']['days_7'] += POINTS['closed_issue']\n user['points']['days_15'] += POINTS['closed_issue']\n user['points']['days_30'] += POINTS['closed_issue']\n elif days_from_now(issue_date) <= 7:\n user['points']['days_7'] += POINTS['closed_issue']\n user['points']['days_15'] += POINTS['closed_issue']\n user['points']['days_30'] += POINTS['closed_issue']\n elif days_from_now(issue_date) <= 15:\n user['points']['days_15'] += POINTS['closed_issue']\n user['points']['days_30'] += POINTS['closed_issue']\n elif days_from_now(issue_date) <= 30:\n user['points']['days_30'] += POINTS['closed_issue']\n \n user['points']['days_1'] = round(user['points']['days_1'])\n user['points']['days_7'] = round(user['points']['days_7'])\n user['points']['days_15'] = round(user['points']['days_15'])\n user['points']['days_30'] = round(user['points']['days_30'])\n\n return user\n\n\ndef compress_image(image_name):\n '''\n Reduce the image file size by reducing the image dimensions to 80x80. \n '''\n\n image = Image.open(f'static/img/users/{image_name}.png')\n x = min(48, image.size[0])\n image = image.resize((x, x), Image.LANCZOS)\n image.save(f'static/img/users/{image_name}_small.png', optimize=True, quality=95)\n"
},
{
"alpha_fraction": 0.5063334703445435,
"alphanum_fraction": 0.5118110179901123,
"avg_line_length": 28.654821395874023,
"blob_id": "d7dfebeb36ff2c435d943651f0f8a0cabd9baec9",
"content_id": "65e642cd2392a6ec8e85b3b8cd4c5c42adf0d806",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5842,
"license_type": "permissive",
"max_line_length": 130,
"num_lines": 197,
"path": "/src/process.py",
"repo_name": "ravgeetdhillon/gnome-hackers",
"src_encoding": "UTF-8",
"text": "from helpers import load_data, create_user, save_data, days_from_now, update_user_points, update_user_info\nfrom fetch import fetch_images\nimport json\nimport requests\n\n\ndef process_commits(users, commits):\n '''\n Process the commits and award users points.\n '''\n\n for commit in commits:\n for user in users:\n if set(commit['author_name'].split()).issubset( set(user['name'].split()) ):\n user = update_user_points(user, commit, 'commit')\n break\n else:\n new_user = create_user(commit, 'commit')\n users.append(new_user)\n\n return users\n\n\ndef process_merge_requests(users, merge_requests):\n '''\n Process the merge requests and award users points.\n '''\n\n for mr in merge_requests:\n for user in users:\n if set(user['name'].split()).issubset( set(mr['author']['name'].split()) ):\n user = update_user_points(user, mr, 'mr')\n break\n else:\n new_user = create_user(mr, 'mr')\n users.append(new_user)\n\n return users\n\n\ndef process_issues(users, issues):\n '''\n Process the issues and award users points.\n '''\n\n for index, issue in enumerate(issues):\n\n if issue['state'] == 'closed':\n \n for user in users:\n if issue['closed_by'] is not None:\n if set(user['name'].split()).issubset( set(issue['closed_by']['name'].split()) ):\n user = update_user_points(user, issue, 'issue')\n break\n else:\n break\n else:\n new_user = create_user(issue, 'issue')\n users.append(new_user)\n\n elif issue['state'] == 'opened':\n\n for user in users:\n if issue['author'] is not None:\n if set(user['name'].split()).issubset( set(issue['author']['name'].split()) ):\n user = update_user_points(user, issue, 'issue')\n break\n else:\n break\n else:\n new_user = create_user(issue, 'issue')\n users.append(new_user)\n\n return users\n\n\ndef process_users(users, all_users):\n '''\n Process the users and update their Gitlab related information.\n '''\n\n for user in users:\n for user_data in all_users:\n if set(user['name'].split()).issubset( set(user_data['name'].split()) ):\n user = update_user_info(user, user_data)\n\n return users\n\n\ndef process_awards():\n '''\n Process the awards for top 10 users.\n '''\n\n try:\n # awards = load_data('awards.json')\n awards = requests.get('https://raw.githubusercontent.com/ravgeetdhillon/gnome-hackers/website/artifacts/data/awards.json')\n awards = json.loads(awards.text)\n except:\n awards = []\n \n # sort the data for each criteria and save them in their respective json files\n criteria = ['days_1', 'days_7', 'days_15', 'days_30']\n for key in criteria:\n \n users = load_data('processed_users.json')\n users = sorted(users, key=lambda k: k['points'][key], reverse=True)[:10]\n\n for user in users:\n for u in awards:\n if user['id'] == u['id']:\n break\n else:\n awards.append(\n {\n 'id': user['id'],\n 'awards': {\n 'gold': 0,\n 'silver': 0,\n 'bronze': 0,\n 'top10': 0,\n }\n }\n )\n\n for u in awards:\n for index, user in enumerate(users, start=1):\n if u['id'] == user['id']:\n \n if index == 1:\n u['awards']['gold'] += 1\n elif index == 2:\n u['awards']['silver'] += 1\n elif index == 3:\n u['awards']['bronze'] += 1\n u['awards']['top10'] += 1\n\n break\n \n save_data(awards, 'awards.json')\n\n\ndef add_awards():\n '''\n Add the processed awards to the processed users.\n '''\n\n awards = load_data('awards.json')\n users = load_data('processed_users.json')\n \n for user in users:\n for u in awards:\n if u['id'] == user['id']:\n user['awards'] = u['awards']\n break\n \n save_data(users, 'processed_users.json')\n\n return users\n\n\ndef main():\n '''\n Main function for the process.py.\n '''\n\n # initialize the users array to store the data about the users contributing to the GNOME\n users = []\n\n # load the commits, merge requests and issues\n commits = load_data('commits.json')\n merge_requests = load_data('merge_requests.json')\n issues = load_data('issues.json')\n all_users = load_data('users.json')\n\n # process the commits, merge requests and issues and generate points for the users\n users = process_issues(users, issues)\n users = process_merge_requests(users, merge_requests)\n users = process_commits(users, commits)\n users = process_users(users, all_users)\n\n # download the avatar image from each user\n fetch_images(users)\n\n save_data(users, 'processed_users.json')\n\n\nif __name__ == '__main__':\n main()\n process_awards()\n users = add_awards()\n\n # sort the data for each criteria and save them in their respective json files\n criteria = ['days_1', 'days_7', 'days_15', 'days_30']\n for key in criteria:\n users = sorted(users, key=lambda k: k['points'][key], reverse=True)\n save_data(users, f'sorted_users_acc_{key}.json')\n"
},
{
"alpha_fraction": 0.6503340601921082,
"alphanum_fraction": 0.7082405090332031,
"avg_line_length": 22.63157844543457,
"blob_id": "7959a7a7174bf3b9c8b47223075346878be916f4",
"content_id": "98c4b3f4c6c368a4539398aeb2fbdf7378d5a1a9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 449,
"license_type": "permissive",
"max_line_length": 87,
"num_lines": 19,
"path": "/src/freeze.py",
"repo_name": "ravgeetdhillon/gnome-hackers",
"src_encoding": "UTF-8",
"text": "from flask_frozen import Freezer\nfrom app import app\nfrom shutil import copyfile\nfrom helpers import load_data\n\n\nfreezer = Freezer(app)\n\n\[email protected]_generator\ndef user_details():\n users = load_data('sorted_users_acc_days_30.json')\n for user in users:\n yield {'user_name': user['user_name']}\n\n\nif __name__ == '__main__':\n freezer.freeze()\n copyfile('static/google4e1a0869f2d05873.html', 'build/google4e1a0869f2d05873.html')\n"
},
{
"alpha_fraction": 0.43595263361930847,
"alphanum_fraction": 0.45425188541412354,
"avg_line_length": 24.452054977416992,
"blob_id": "8536c189f3bc3538b873523e22dac13eb2ad3075",
"content_id": "5118aabcd09d9a65cb8431f936a57ef396b704d9",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1858,
"license_type": "permissive",
"max_line_length": 156,
"num_lines": 73,
"path": "/src/app.py",
"repo_name": "ravgeetdhillon/gnome-hackers",
"src_encoding": "UTF-8",
"text": "from flask import Flask, render_template\nfrom variables import SITE_CONFIG\nfrom helpers import load_data\n\n\napp = Flask(__name__)\n\n\[email protected]('/')\ndef index():\n\n users = load_data('sorted_users_acc_days_1.json')\n users_days_1 = users[:10]\n users = load_data('sorted_users_acc_days_7.json')\n users_days_7 = users[:10]\n users = load_data('sorted_users_acc_days_15.json')\n users_days_15 = users[:10]\n users = load_data('sorted_users_acc_days_30.json')\n users_days_30 = users[:10]\n \n data = {\n 'page': {\n 'stats': [\n {\n 'type': 'Today',\n 'key': 'days_1',\n 'users': users_days_1,\n },\n {\n 'type': 'Week',\n 'key': 'days_7',\n 'users': users_days_7,\n },\n {\n 'type': 'Fortnight',\n 'key': 'days_15',\n 'users': users_days_15,\n },\n {\n 'type': 'Month',\n 'key': 'days_30',\n 'users': users_days_30,\n },\n ]\n },\n 'site': SITE_CONFIG\n }\n\n return render_template('index.html', data=data)\n\n\[email protected]('/<user_name>/')\ndef user_details(user_name):\n\n users = load_data('sorted_users_acc_days_30.json')\n for user in users:\n if user['user_name'] == user_name:\n break\n else:\n return 'Not Found.<br> If you think this link is broken, please file an <a href=\"https://github.com/ravgeetdhillon/gnome-hackers/issues\">issue</a>.'\n\n data = {\n 'page': {\n 'user': user\n },\n 'site': SITE_CONFIG\n }\n\n return render_template('user.html', data=data)\n\n\nif __name__ == '__main__':\n app.run()\n"
},
{
"alpha_fraction": 0.5995085835456848,
"alphanum_fraction": 0.6093366146087646,
"avg_line_length": 28.071428298950195,
"blob_id": "c1b15902a8ca5ecf629f0a0e8862ec7913b34ddf",
"content_id": "2db0e95b5292531fce9833e4c3083cd2eced7195",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 814,
"license_type": "permissive",
"max_line_length": 101,
"num_lines": 28,
"path": "/src/variables.py",
"repo_name": "ravgeetdhillon/gnome-hackers",
"src_encoding": "UTF-8",
"text": "import os\n\nGITLAB_SERVER = 'https://gitlab.gnome.org'\nPRIVATE_TOKEN = os.environ.get('GITLAB_PRIVATE_TOKEN')\n\nPOINTS = {\n 'commit': 0.01,\n 'opened_mr': 5,\n 'closed_mr': 10,\n 'opened_issue': 1,\n 'closed_issue': 2,\n}\n\nSITE_CONFIG = {\n 'title': 'GNOME Hackers',\n 'tagline': 'Leaderboard for hackers contributing to the GNOME',\n 'description': 'A Leaderboard web app for hackers contributing to the projects hosted by GNOME.',\n 'author': {\n 'name': 'Ravgeet Dhillon',\n 'website': 'https://ravgeetdhillon.github.io/',\n 'facebook': 'https://facebook.com/ravgeet.dhillon/',\n 'twitter': 'https://twitter.com/ravgeetdhillon/',\n 'twitter_username': 'ravgeetdhillon',\n },\n 'github': 'https://github.com/ravgeetdhillon/gnome-hackers',\n 'url': '',\n 'img': '',\n}\n"
}
] | 8 |
tztz8888/DjangoProjects
|
https://github.com/tztz8888/DjangoProjects
|
b6f0f5dca493bdd090b4ec935f63d047c5c6cc45
|
60581f99981de2f6d65ec0caafeb335d8ee71b7b
|
2979e91d4eca2134020ebfe92c86ffb903c800f7
|
refs/heads/master
| 2020-04-15T05:47:19.924327 | 2016-09-13T06:42:12 | 2016-09-13T06:42:12 | 68,078,766 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7655786275863647,
"alphanum_fraction": 0.7655786275863647,
"avg_line_length": 25,
"blob_id": "455c6eac0d056ccb4a80b9384b083ad584c4aa5b",
"content_id": "3a1f8811a727241a1d9b38cfbb09e35b4b0c1a82",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 337,
"license_type": "no_license",
"max_line_length": 57,
"num_lines": 13,
"path": "/mysite/polls/views.py",
"repo_name": "tztz8888/DjangoProjects",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render\nfrom django.http import HttpResponse,HttpResponseRedirect\nfrom django.urls import reverse\n\ndef index(request):\n return render(request, 'index.html')\n\ndef result(request):\n return render(request, 'result.html')\n\ndef good(request):\n return render(request, 'good.html')\n# Create your views here."
}
] | 1 |
GL-Li/medical_notes
|
https://github.com/GL-Li/medical_notes
|
93c731a27639255c799d70e1c7fcaf78fec7d9f0
|
a64fc541eafe4047618b67367a615d6c2df8dd60
|
ec70c63cff857270d20c0352fde1c1faf2956ba1
|
refs/heads/master
| 2020-12-26T15:43:48.683449 | 2020-03-04T16:07:45 | 2020-03-04T16:07:45 | 237,553,168 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6196857690811157,
"alphanum_fraction": 0.6326247453689575,
"avg_line_length": 25.072288513183594,
"blob_id": "2e503ef30c6e4d8c9f1e002c2ee4e3755825c101",
"content_id": "9edca89d2be42e1d324ddfb4c27ac63e65ba5013",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 2164,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 83,
"path": "/eda_analysis.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(data.table)\nlibrary(magrittr)\nlibrary(stringr)\nlibrary(ggplot2)\nlibrary(wordcloud)\nlibrary(RColorBrewer)\nsource(\"utilities.R\")\n\nmt <- fread(\"data/mtsamples_scraped.csv\", header = TRUE)\n\n# duplicated note\ndup_note <- mt[, .N, by = note]\ntable(as.integer(dup_note$N))\n\n# headers used in transcription ===============================================\n# all headers in individual word\nheaders <- mt$mt_headers %>%\n str_split(\", \") %>%\n unlist() %>%\n .[!. == \"\"]\n\n# count of each headers\nheader_count <- sort(table(headers), decreasing = TRUE)\n\n# header count distribution\nggplot() + \n geom_bar(aes(header_count)) +\n xlim(0, 100)\n\nheader_top <- header_count[1:50]\n\nggplot() + \n geom_col(aes(x = factor(names(header_top), levels = names(header_top)),\n y = as.integer(header_top))) +\n coord_flip()\n\n\nwordcloud(names(header_count), header_count)\nset.seed(1234)\nwordcloud(words = names(header_count), \n freq = header_count, \n scale = c(1.5, 0.2),\n min.freq = 5,\n rot.per = 0.2,\n random.order=FALSE, \n colors=brewer.pal(8, \"Dark2\"))\n\n# group the same headers but in different names ===============================\nnames(header_count)\n# ANESTHESIA\n# PROCEDURE\n# PREOPERATIVE DIAGNOSIS\n# POSTOPERATIVE DIAGNOSIS\n# PHYSICAL EXAMINATION\n# HISTORY OF PRESENT ILLNESS\n# IMPRESSION\n# ALLERGIES\n# PAST MEDICAL HISTORY\n# REVIEW OF SYSTEMS\n# SOCIAL HISTORY\n# PLAN\n# COMPLICATIONS\n# MEDICATIONS\n# FINDINGS\n# FAMILY HISTORY => social history?\n# ESTIMATED BLOOD LOSS\n# PROCEDURE PERFORMED\n# POSTOPERATIVE DIAGNOSES => POSTOPERATIVE DIAGNOSIS (SES -> SIS)\n# PREOPERATIVE DIAGNOSES => PREOPERATIVE DIAGNOSIS\n# CHIEF COMPLAINT\n# ASSESSMENT\n# HISTORY ?? many other histories above\n# DESCRIPTION OF PROCEDURE\n# \n\n\n# medical notes ===============================================================\n# select 100 from each of Gastroenterology and Neurology for machine learning\nnotes = mt[sample_type %in% c(\"Gastroenterology\", \"Neurology\"), \n .(sample_type, medical_transcription)] %>%\n .[, .SD[1:100], by = sample_type]\n\nfwrite(notes, file=\"selected_notes.csv\", row.names = FALSE)\n"
},
{
"alpha_fraction": 0.4425453543663025,
"alphanum_fraction": 0.47173285484313965,
"avg_line_length": 35.57692337036133,
"blob_id": "c6558c70a7156245146975f551793ea975752fdb",
"content_id": "3bcc8f19b0cb349f31b632123480133edde1a97b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 3803,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 104,
"path": "/shiny-apps/controllers/word_cloud.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "type_1 <- reactive(input$cloud_type_1)\ncol_1 <- reactive(input$cloud_method_1)\n\ntype_2 <- reactive(input$cloud_type_2)\ncol_2 <- reactive(input$cloud_method_2)\n\noutput$wordcloud_1 <- renderCachedPlot(\n {\n # type <- input$cloud_type_1\n # col < input$cloud_method_1\n word_count <- get_word_count(type_1(), col_1())\n #min_freq <- ifelse(col == \"Both\", 10, 3)\n \n # add a point to make the cloud look better\n if (type_1() == \"Neurology\" & col_1() == \"top_tfidf\"){\n tmp <- data.table(word = \"\", count = 30)\n word_count <- rbindlist(list(tmp, word_count))\n }\n \n set.seed(1234) # reproducible cloud\n par(mar = rep(0, 4)) # set wordcloud margin to 0\n wordcloud(words = word_count[, word], \n freq = word_count[, count], \n scale = c(3.5, 0.3), \n max.words = 300,\n min.freq = 2, \n rot.per = 0.2, \n random.order=FALSE, \n colors=brewer.pal(8, \"Dark2\"))\n \n },\n bg=\"transparent\",\n cacheKeyExpr = {list(input$cloud_type_1, input$cloud_method_1)}\n)\n\noutput$wordcloud_2 <- renderCachedPlot(\n {\n # type <- input$cloud_type_2\n # col = input$cloud_method_2\n word_count <- get_word_count(type_2(), col_2())\n #min_freq <- ifelse(col == \"Both\", 10, 3)\n \n # add a point to make the cloud look better. The maximum count was 9\n # too small compared to 33 in Gastroentoogy \n if (type_2() == \"Neurology\" & col_2() == \"top_tfidf\"){\n tmp <- data.table(word = \"\", count = 30)\n word_count <- rbindlist(list(tmp, word_count))\n }\n \n set.seed(1234) # reproducible cloud\n par(mar = rep(0, 4)) # set wordcloud margin to 0, base plot\n wordcloud(words = word_count[, word], \n freq = word_count[, count], \n scale = c(3.5, 0.3), \n max.words = 300,\n min.freq = 2, \n rot.per = 0.2, \n random.order=FALSE, \n colors=brewer.pal(8, \"Dark2\"))\n \n },\n bg=\"transparent\",\n cacheKeyExpr = {list(input$cloud_type_2, input$cloud_method_2)}\n)\n\noutput$bar_1 <- renderPlot(\n {\n word_count <- get_word_count(type_1(), col_1()) %>%\n .[1:10]\n ggplot(word_count, aes(word, count)) +\n geom_col(fill = \"grey80\") +\n geom_text(aes(word, count, label = count), hjust = 1, color = \"gray20\") +\n scale_y_continuous(expand = c(0, 0)) +\n coord_flip() +\n labs(x = NULL,\n y = NULL,\n title = \"Count of Top 10 Words\") +\n theme(panel.background = element_blank(),\n axis.ticks = element_blank(),\n axis.text.x = element_blank(),\n axis.text.y = element_text(size = 12))\n }, \n bg = \"transparent\"\n)\n\noutput$bar_2 <- renderPlot(\n {\n word_count <- get_word_count(type_2(), col_2()) %>%\n .[1:10]\n ggplot(word_count, aes(word, count)) +\n geom_col(fill = \"grey80\") +\n geom_text(aes(word, count, label = count), hjust = 1, color = \"gray20\") +\n scale_y_continuous(expand = c(0, 0)) +\n coord_flip() +\n labs(x = NULL,\n y = NULL,\n title = \"Count of Top 10 Words\") +\n theme(panel.background = element_blank(),\n axis.ticks = element_blank(),\n axis.text.x = element_blank(),\n axis.text.y = element_text(size = 12))\n }, \n bg = \"transparent\"\n)"
},
{
"alpha_fraction": 0.705456554889679,
"alphanum_fraction": 0.7082405090332031,
"avg_line_length": 31.071428298950195,
"blob_id": "92855e5b26bd9b0eac52d2d08252a6adf7bd0c8c",
"content_id": "e6c990be36b906a861fbab985c39766a98ba4761",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1796,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 56,
"path": "/python/extract_medical_named_entity_with_amazon_comprehend_medical.py",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "\"\"\"\nThis script is to extract medical entities using Amazon Comprehed Medical.\n\nWarning: keep in mind that the service is very expensive.\nThink carefully about the cost before using the service.\n\nYou need a AWS account and has enabled the AWS CLI.\nCheck for details here:\nhttps://docs.aws.amazon.com/cli/latest/userguide/install-cliv1.html\n\"\"\"\n#%% load modules and connect to Amazon Comprehend Medical\nimport boto3\nimport json\nfrom tqdm import tqdm\n\n# custom module\nfrom utilities import read_notes\n\n# use your own AWS account's region_name\nclient = boto3.client(service_name='comprehendmedical',\n region_name='us-east-1')\n\n# we will only extract medical entites (mes) of clinical notes of two\n# specialties: gastroenterology and neurology\n#dat = pd.read_csv(\"data/mtsample_gastroenterology_neurology.csv\")\n#notes = list(dat.note)\n\n#%% prepare data\ngas_neu_urol = read_notes(\"../data/mtsamples_scraped.csv\",\n specialties=[\"Gastroenterology\", \"Neurology\", \"Urology\"],\n randomize=False,\n clean=False)\nnotes = list(gas_neu_urol.note)\n\n\n#%% run comprehend medical and save result\n# Think thrice before ruiing, very expensive\n# save result after extraction\nconfirm = input(\"Type I am not drunk to run Amazon Comprehend Medical: \")\n\nif confirm == \"I am not drunk\":\n mes = []\n for note in tqdm(notes):\n me = client.detect_entities_v2(Text=note)\n me = me[\"Entities\"]\n mes.append(me)\n\n\n\n# save the extracted medical entities. Again, expensive\nwith open(\"comprehend_medical_gastroenterology_neurology_urology.txt\", \"w\") as f:\n json.dump(mes, f)\n\n## to use the extracted data, reload the saved json\n#with open(\"comprehend_medical_neurology_gastroenterology.txt\", \"r\") as f:\n# mes_loaded = json.load(f)\n"
},
{
"alpha_fraction": 0.7629310488700867,
"alphanum_fraction": 0.7629310488700867,
"avg_line_length": 45.400001525878906,
"blob_id": "918498950a8d7a594d670dfa1769cb6a463c361f",
"content_id": "1bed421bd54d307e1a33294944617e5450948608",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "RMarkdown",
"length_bytes": 232,
"license_type": "no_license",
"max_line_length": 211,
"num_lines": 5,
"path": "/shiny-apps/Rmd/classification_prediction.Rmd",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "---\ntitle: \" \"\n---\n\nA radial kernel SVM model is deployed and ready to make predictions. Provide the clinical notes by uploading a plain text file or pasting into the text input area. You can view and download the prediction below.\n"
},
{
"alpha_fraction": 0.5825173854827881,
"alphanum_fraction": 0.6063734889030457,
"avg_line_length": 25.242990493774414,
"blob_id": "e6cc906b180e182d8bcd9a07d07ef580f08e3c2a",
"content_id": "e2aa387247dd1bd2525691473ee835dfc4de7afb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 5617,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 214,
"path": "/machine-learning/gas_neu_uro_classification_tfidf_svm.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "# Use support vector machine to train model \n# three specialties: Gastroenterology, Neurology, and Urology\n# Use clinical notes or Amazon Comprehend Medical entities\n\n\nlibrary(e1071)\nlibrary(progress)\nsource(\"utilities.R\")\n\n\n# prepare data =================================================================\nset.seed(12345)\ndat <- read_notes(\n \"data/amazon_medacy_mtsamples_gastr_neuro_urolo.csv\",\n duplicate_rm = T,\n cols_keep = \"all\",\n y_label = TRUE\n)\ny <- as.factor(dat$y) # svm requires y to be factor\n\n\n\n\n# find best n_pca ==============================================================\n# result: keep first 20 - 50 principle components have the best accuracy.\n# We will use n_pca = 25 considering accuracy and speed.\n# choose from note or amazon_me\ntfidf <- tfidf_tm(dat$amazon_me, sparsity = 0.95)\n# run only if need pca\ntfidf <- prcomp(tfidf)$x\nX <- tfidf\n\nset.seed(1111)\nin_train <- caret::createDataPartition(y, p = 0.7, list = FALSE)\n\nX_train <- X[in_train,]\nX_test <- X[-in_train,]\n\ny_train <- y[in_train]\ny_test <- y[-in_train]\n\n\nsvm_pca <- function(n_pc, X_train, y_train, n_split){\n # Calculate the accuracy for number of principle components used in SVM\n #\n # n_pc: int, number of principle components to keep\n # X_train, y_train: trainig data\n # n_split: int, times of repeating train-validation split\n \n # split train and validation data\n X_train <- X_train[, 1:n_pc]\n \n # create 10 split index\n intrains <- caret::createDataPartition(\n y_train, times = n_split, p = 0.6, list = FALSE\n )\n \n acc <- 0\n for (i in 1:n_split){\n intrain <- intrains[, i]\n Xtrain <- X_train[intrain,]\n ytrain <- y_train[intrain]\n Xval <- X_train[-intrain,]\n yval <- y_train[-intrain]\n mdl <- svm(Xtrain, ytrain)\n ypred <- predict(mdl, Xval)\n tb <- table(truth = yval, predict = ypred)\n accuracy <- sum(diag(tb)) / length(yval)\n acc <- acc + accuracy / n_split\n }\n \n return(c(n_pc = n_pc, acc = accuracy))\n}\n\n\nplot_n_pca <- function(n_pcas, X_train, y_train, n_split = 100){\n # Plot accuracy ~ n_pca to find best n_pca values\n #\n # n_pcas: int, vector of number of principle components to keep\n # n_split: times of repearing train-val split to get average accuracy\n \n n <- length(n_pcas)\n pc_acc <- data.frame(n_pc = rep(0, n), acc = rep(0, n))\n \n pb <- progress_bar$new()\n for (i in 1:n) {\n pb$tick()\n n_pc <- n_pcas[i]\n m <- svm_pca(n_pc, X_train, y_train, n_split)\n pc_acc[i, ] <- m\n }\n \n plot(pc_acc$n_pc, pc_acc$acc, type = \"p\")\n}\n\n\n# # find the best n_pca\nn_pcas <- c(2:50, 2 * (26:50), 5 * (21:45))\nplot_n_pca(n_pcas, X_train, y_train, 10)\n\n\n\n# one model pca====\ntfidf <- tfidf_tm(dat$amazon_me, sparsity = 0.992)\n# run only if need pca\nX <- prcomp(tfidf)$x\n\nset.seed(1111)\nin_train <- caret::createDataPartition(y, p = 0.7, list = FALSE)\n\nX_train <- X[in_train,]\nX_test <- X[-in_train,]\n\ny_train <- y[in_train]\ny_test <- y[-in_train]\n\n# visually pick 25 as the best n_pca to train model\nmdl <- svm(X_train[, 1:25], y_train)\ny_pred <- predict(mdl, X_test[, 1:25])\ntable(truth = y_test, predict = y_pred)\n\n\n\nplot_pc1_pc2(X_test, color = y_test)\nplot_pc1_pc2(X_test, color = y_pred)\nplot_pc1_pc2(X_test, color = y_test == y_pred, color_map = c(\"black\", \"gray\"))\n\nclasses_x <- c(\"Gastronenterology\", \"Neurology\", \"Urology\")\nclasses_y <- c(\"Gastro-\\nenterology\", \"Neurology\", \"Urology\")\nplot_confusion_matrix(y_test, y_pred, classes_x, classes_y)\n\n\n# one model tfidf ====\n# result: svm does not work in this case\ntfidf <- tfidf_tm(dat$amazon_me, sparsity = 0.992)\nX <- tfidf\n\nset.seed(1111)\nin_train <- caret::createDataPartition(y, p = 0.7, list = FALSE)\n\nX_train <- X[in_train,]\nX_test <- X[-in_train,]\n\ny_train <- y[in_train]\ny_test <- y[-in_train]\n\nmdl <- svm(X_train, y_train)\ny_pred <- predict(mdl, X_test)\ntable(truth = y_test, predict = y_pred)\n\nclasses_x <- c(\"Gastronenterology\", \"Neurology\", \"Urology\")\nclasses_y <- c(\"Gastro-\\nenterology\", \"Neurology\", \"Urology\")\nplot_confusion_matrix(y_test, y_pred, classes_x, classes_y, type = \"precision\")\n\n\n# average f1 score and accuracy ==================\n# for convenience, copy data praparation here\nsvm_metrics <- function(corpus){\n # Calculate accuracy and f1 score\n #\n # corpus: string, \"note\" or \"amazon_me\"\n \n # choose from note or amazon_me\n tfidf <- tfidf_tm(dat[, get(corpus)])\n # run only if need pca\n X <- prcomp(tfidf)$x[, 1:25]\n y <- as.factor(dat$y) # svm requires y to be factor\n \n n_rep <- 100\n df_acc_f1 <- data.frame(\n acc = numeric(n_rep),\n f1_gas = numeric(n_rep),\n f1_neu = numeric(n_rep),\n f1_uro = numeric(n_rep)\n )\n set.seed(6789)\n in_trains <- caret::createDataPartition(y, times = n_rep, p = 0.7)\n for (i in 1:100){\n cat(i)\n in_train <- in_trains[[i]]\n X_train <- X[in_train, ]\n y_train <- y[in_train]\n X_test <- X[-in_train,]\n y_test <- y[-in_train]\n \n mdl <- svm(X_train, y_train)\n y_pred <- predict(mdl, X_test)\n \n tb <- table(y_test, y_pred)\n acc <- sum(diag(tb)) / length(y_test)\n \n f1_score <- function(tb, k){\n recall <- diag(tb)[k] / sum(y_test == k - 1)\n precision <- diag(tb)[k] / sum(y_pred == k - 1)\n f1 <- 2 * (recall * precision) / (recall + precision)\n }\n \n f1_gas <- f1_score(tb, 1)\n f1_neu <- f1_score(tb, 2)\n f1_uro <- f1_score(tb, 3)\n \n df_acc_f1[i, ] <- c(acc, f1_gas, f1_neu, f1_uro)\n }\n \n return(df_acc_f1)\n}\n# mean and standard deviation of accuracy and f1 score\ndf_acc_f1 <- svm_metrics(\"note\")\nsapply(df_acc_f1, mean)\nsapply(df_acc_f1, sd)\n\ndf_acc_f1 <- svm_metrics(\"amazon_me\")\nsapply(df_acc_f1, mean)\nsapply(df_acc_f1, sd)\n\n"
},
{
"alpha_fraction": 0.565606951713562,
"alphanum_fraction": 0.5906069278717041,
"avg_line_length": 26.90322494506836,
"blob_id": "002638411ce4e9723b561b16c026a58570195a2d",
"content_id": "7cbe0854dc57fd03efbb0b76737f0ca4c5086625",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 6920,
"license_type": "no_license",
"max_line_length": 135,
"num_lines": 248,
"path": "/machine-learning/multiclass_classification_embedding_neural_network.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(tensorflow)\nlibrary(keras)\nsource(\"utilities.R\")\n\n\n# prepare data =================================================================\nspecialties <- c(\n \"Gastroenterology\", \"Obstetrics / Gynecology\", \"Cardiovascular / Pulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\ncols <- c(\"specialty\", \"note\")\ndat <- read_notes(\n \"data/mtsamples_multi_class.csv\", \n duplicate_rm = TRUE,\n specialties = specialties,\n cols_keep = cols,\n id = TRUE,\n y_label = TRUE\n)\n\n# parameter tuning =============================================================\ncorpus <- dat[, note]\nlabels <- dat$y\ncnn_tune <- function(corpus,\n labels,\n max_words = 3000, \n seq_length = 500,\n dim_emb = 64, \n dropout = 0.2,\n n_filters = 16){\n # train and evaluate a cnn model using word embedding\n #\n # Arguments:\n # corpus: string vector, document to trained\n # y: integer, labels of documents in corpus\n # max_word: int, number of words to keep for text_tokenizer()\n # seq_length: int, length of each document after pad_sequence()\n # dim_emb: int, length of word embedding\n # droput: numeric, ratio to drop in dropout layer\n # n_filters: int, number of filters in layer_conv_1d()\n #\n # Return:\n # \n tk <- text_tokenizer(num_words = max_words)\n fit_text_tokenizer(tk, notes)\n X <- texts_to_sequences(tk, notes)\n X <- pad_sequences(X, seq_length)\n y_class <- labels\n n_class <- length(unique(y_class))\n y <- to_categorical(y_class, n_class)\n \n # split X and y into train and test\n in_train <- caret::createDataPartition(y_class, p = 0.7, list = FALSE)\n X_train <- X[in_train,]\n y_train <- y[in_train,] \n X_test <- X[-in_train,]\n y_test <- y[-in_train,]\n y_test_class <- y_class[-in_train]\n\n model <- keras_model_sequential() %>% \n layer_embedding(input_dim = max_words,\n output_dim = dim_emb,\n input_length = seq_length) %>%\n layer_dropout(dropout) %>%\n layer_conv_1d(filters = n_filters,\n kernel_size = 3,\n #activation = \"relu\",\n padding = \"valid\",\n strides = 1) %>%\n layer_dropout(dropout) %>%\n layer_global_average_pooling_1d() %>%\n layer_dense(units = 64, activation = \"relu\") %>%\n layer_dropout(dropout) %>%\n # output layer\n layer_dense(n_class, activation = \"softmax\")\n \n # summary(model)\n \n # compile, fit, and evaluate model in place\n compile(model,\n loss = \"categorical_crossentropy\",\n optimizer = \"adam\",\n metrics = \"accuracy\"\n )\n \n fit(model,\n x = X_train, \n y = y_train,\n epochs = 20,\n batch_size = 32,\n validation_split = 0.3,\n verbose = 3\n )\n \n eva <- evaluate(model, X_test, y_test, verbose = 0)\n \n return(eva$accuracy)\n}\n\n\n# tune the parameters\nplot_cnn <- function(){\n corpus <- dat[, note]\n labels <- dat$y\n \n n_rep <- 100\n param_df <- data.frame(\n max_words = integer(n_rep),\n seq_length = integer(n_rep),\n dim_emb = integer(n_rep), \n dropout = numeric(n_rep),\n n_filters = integer(n_rep),\n accuracy = numeric(n_rep)\n )\n \n for (i in 1:n_rep){\n cat(i)\n maxwords <- sample(3000:4000, 1)\n seqlength <- sample(300:400, 1)\n dimemb <- sample(100:200, 1)\n dropout <- sample(1:5/10, 1)\n nfilters <- sample(20:200, 1)\n \n acc <- cnn_tune(\n corpus,\n labels, \n max_words = maxwords,\n seq_length = seqlength,\n dim_emb = dimemb,\n dropout = dropout,\n n_filters = nfilters\n )\n \n param_df[i, ] <- c(maxwords, seqlength, dimemb, dropout, nfilters, acc)\n }\n return(param_df)\n}\n\n\n# eyeballing the best paramters\n# results: \n# first run\n# max_words: 3500 from range 2000:4000\n# seq_length: 350 from range 200:500\n# dim_enb: 120 from range 16:128\n# dropout: 0.4 from range 0.1:0.5\n# n_filters: 32 from range 8:64\n\n# second run\n# max_words: xxxx from range 3000:4000\n# seq_length: xxx from range 300:400\n# dim_enb: xx from range 100:200\n# dropout: xxx from range 0.1:0.5\n# n_filters: xx from range 20:100\n\ntry_1 <- plot_cnn()\ntry_2 <- plot_cnn()\n\nplot(param_df$max_words, param_df$accuracy)\nplot(param_df$seq_length, param_df$accuracy)\nplot(param_df$dim_emb, param_df$accuracy)\nplot(param_df$dropout, param_df$accuracy)\nplot(param_df$n_filters, param_df$accuracy)\n\n\n# one model ====================================================================\nmax_words = 3500\nseq_length = 350\ndim_emb = 120\ndropout = 0.4\nn_filters = 32\n\nnotes <- dat$note\ntk <- text_tokenizer(num_words = max_words)\nfit_text_tokenizer(tk, notes)\nX <- texts_to_sequences(tk, notes)\nX <- pad_sequences(X, seq_length)\ny_class <- dat$y\nn_class <- length(unique(y_class))\ny <- to_categorical(y_class, n_class)\n\n# split X and y into train and test\nset.seed(11111)\nin_train <- caret::createDataPartition(y_class, p = 0.7, list = FALSE)\nX_train <- X[in_train,]\ny_train <- y[in_train,] \nX_test <- X[-in_train,]\ny_test <- y[-in_train,]\ny_test_class <- y_class[-in_train]\n\nmodel <- keras_model_sequential() %>% \n layer_embedding(input_dim = max_words,\n output_dim = dim_emb,\n input_length = seq_length) %>%\n layer_dropout(dropout) %>%\n layer_conv_1d(filters = n_filters,\n kernel_size = 3,\n #activation = \"relu\",\n padding = \"valid\",\n strides = 1) %>%\n layer_dropout(dropout) %>%\n layer_global_average_pooling_1d() %>%\n layer_dense(units = 64, activation = \"relu\") %>%\n layer_dropout(dropout) %>%\n # output layer\n layer_dense(n_class, activation = \"softmax\")\n\n# summary(model)\n\n# compile, fit, and evaluate model in place\ncompile(model,\n loss = \"categorical_crossentropy\",\n optimizer = \"adam\",\n metrics = \"accuracy\"\n)\n\nfit(model,\n x = X_train, \n y = y_train,\n epochs = 20,\n batch_size = 32,\n validation_split = 0.3,\n verbose = 3\n)\n\n\ny_pred <- predict(model, X_test)\ny_pred_class <- predict_classes(model, X_test)\ntable(y_test_class, y_pred_class)\n\nclasses_x <- c(\n \"Gastroenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nclasses_y <- c(\n \"Gastro-\\nenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nggplot_multiclass_nn_embedding_recall <- plot_confusion_matrix(y_test_class, y_pred_class, classes_x, classes_y)\nggplot_multiclass_nn_embedding_precision <- plot_confusion_matrix(y_test_class, y_pred_class, classes_x, classes_y, type = \"precision\")\n\naccuracy_nn_embedding <- accuracy(y_test_class, y_pred_class)\n\n\nsave(ggplot_multiclass_nn_embedding_recall, \n ggplot_multiclass_nn_embedding_precision,\n accuracy_nn_embedding,\n file = \"shiny-apps/RData/ggplot_multiclass_nn_embedding.RData\")\n"
},
{
"alpha_fraction": 0.5157380700111389,
"alphanum_fraction": 0.5466715097427368,
"avg_line_length": 27.78645896911621,
"blob_id": "44f473db261215dd37358f1c8e40a00d4a919826",
"content_id": "4c6747d8109f73965b2740fae8d7868393330d59",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 5528,
"license_type": "no_license",
"max_line_length": 128,
"num_lines": 192,
"path": "/machine-learning/z_clustering_kmeans_three_classes.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(data.table)\nlibrary(magrittr)\nlibrary(stringr)\nlibrary(tm)\nlibrary(caret)\nlibrary(progress)\nlibrary(dendextend)\nlibrary(plotly)\n\nsource(\"utilities.R\")\n\n# Prepare data =================================================================\ndat <- read_notes(\"data/amazon_medacy_mtsamples_gastr_neuro_urolo.csv\",\n cols_keep = c(\"id\", \"amazon_me\", \"specialty\", \"note\"),\n clean = TRUE,\n y_label = TRUE)\n\n# tfidf matrix\ntfidf_note <- tfidf_tm(dat$note)\ntfidf_amazon <- tfidf_tm(dat$amazon_me)\ny_true <- dat$y\n\n\n# test how many clusters =======================================================\n# tsne analysis https://github.com/jkrijthe/Rtsne \ntfidf = tfidf_amazon\nK = 10\ninertia <- c()\nfor (k in 1:K){\n print(k)\n km <- kmeans(tfidf, k, iter.max = 100)\n inertia <- c(inertia, km$tot.withinss)\n}\nplot(1:K, inertia)\n\n\n# use amaxon_me, assume we know 3 clusters =====================================\ntfidf = tfidf_amazon\nset.seed(3721)\nkm <- kmeans(tfidf, 3, iter.max = 100)\n\ny_clusters = km$cluster\nme_1 <- dat$amazon_me[y_clusters == 1]\nme_2 <- dat$amazon_me[y_clusters == 2]\nme_3 <- dat$amazon_me[y_clusters == 3]\n\ncount_1 <- word_count(me_1)[1:100]\ncount_2 <- word_count(me_2)[1:100]\ncount_3 <- word_count(me_3)[1:100]\n\ncompare_count <- data.frame(me_1 = count_1,\n me_2 = count_2,\n me_3 = count_3)\n\n# how to match cluster to true class\ny_clusters <- km$cluster\ntable(y_true, y_clusters)\n# y_clusters\n# y 1 2 3\n# 0 76 87 67\n# 1 202 0 20\n# 2 51 0 103\n# best match would be\n# 1 --> 1, 2 --> 0, 3 --> 2\ny_clusters[y_clusters == 3] <- 0\ny_clusters[y_clusters == 1] <- 1\ny_clusters[y_clusters == 2] <- 2\n\ntable(y_true, y_clusters)\ncaret::confusionMatrix(as.factor(y_clusters), as.factor(y_true))\n\n# kmeans clustering ============================================================\n# https://uc-r.github.io/kmeans_clustering\nkmeans_metrics <- function(tfidf, iter=100, n_rep=1){\n # repeat kmeans to get average metrics based on known y label\n pred <- rep(0, nrow(dt))\n \n pb <- progress_bar$new(total = n_rep)\n for (i in 1:n_rep){\n pb$tick()\n k <- kmeans(dt, 2, iter.max = iter)\n pred_1 <- k$cluster\n pred_2 <- (3 - pred_1) %% 3 # 1 --> 2 and 2 --> 1\n # as kmeans randomly assign 1 and 2 to clusters, we will only take the \n # one with higher F1 value\n f <- function(pred){\n cm <- confusionMatrix(as.factor(pred), as.factor(target))\n cm$byClass[\"F1\"]\n } \n f1 <- f(pred_1)\n f2 <- f(pred_2)\n \n if (f1 > f2) {\n pred_0 <- pred_1\n } else {\n pred_0 <- pred_2\n }\n \n pred <- pred + pred_0 / n_rep\n }\n pred <- round(pred)\n confusionMatrix(as.factor(pred), as.factor(target))\n}\n\n\n\n# tsne ========================================================================\nset.seed(123)\ntsne <- Rtsne(tfidf, theta = 0.5)\nplot(tsne$Y, col = as.factor(y), asp = 1)\n\n\n# pca ==========================================================================\nset.seed(123)\npca <- prcomp(tfidf)\nplot(pca)\nplot(pca$x[, 1], pca$x[, 2], col = y + 1)\n\n# 3D plot\np <- plot_ly(as.data.frame(pca$x), x = ~PC1, y = ~PC2, z = ~PC3, color = ~as.factor(y), colors = c('red', 'green', \"black\")) %>%\n add_markers(size = 1) %>%\n layout(scene = list(xaxis = list(title = 'PC1'),\n yaxis = list(title = 'PC2'),\n zaxis = list(title = 'PC3')))\n\np\n\n\n\n\n# tfidf -- must normalize each sample to vector length 1\ntfidf_mtx <- df_tm[[\"tfidf_matrix\"]]\ntfidf_norm <- tfidf_mtx / sqrt(rowSums(tfidf_mtx * tfidf_mtx))\nget_kmeans(tfidf_norm)\n\n# tf\ntf_mtx <- df_tm[[\"tf_matrix\"]]\ntf_norm <- tf_mtx / sqrt(rowSums(tf_mtx * tf_mtx))\nget_kmeans(tf_norm)\n\n# hierarchical clustering ======================================================\n# https://cran.r-project.org/web/packages/textmineR/vignettes/b_document_clustering.html\n# https://uc-r.github.io/hc_clustering \nset.seed(1234)\ntfidf = tfidf_note\ncos_sim <- tfidf %*% t(tfidf)\npar(mar = rep(0, 4))\nimage(cos_sim * 256, col = gray(seq(0, 1, length = 256)))\nimage(cos_sim * 256, col = rgb(seq(0, 1, length = 256), 0, 0))\n\ndist <- as.dist(1 - cos_sim)\n# ward.D and ward.D2 are good for clustering, slight difference\nhc <- hclust(dist, \"ward.D\")\n#hc <- hclust(dist, \"ward.D2\") # one more correct\n# all below not good\n# hc <- hclust(dist, \"single\")\n# hc <- hclust(dist, \"complete\")\n# hc <- hclust(dist, \"average\")\n# hc <- hclust(dist, \"mcquitty\")\n# hc <- hclust(dist, \"median\")\n# hc <- hclust(dist, \"centroid\")\n\n\n# plot dendrogram\n# https://cran.r-project.org/web/packages/dendextend/vignettes/FAQ.html#introduction\ndend <- as.dendrogram(hc)\n\n# use true y to assign color\nsample_colors <- rep(character(0), nrow(tfidf))\nsample_colors[y == 0] <- \"red\"\nsample_colors[y == 1] <- \"blue\"\nsample_colors[y == 2] <- \"orange\"\n\ndend <- assign_values_to_leaves_edgePar(\n dend=dend, \n value = sample_colors[order.dendrogram(dend)], \n edgePar = \"col\"\n)\npar(mar = c(0, 0, 2, 0))\nplot(dend, main = \"Medical Notes Clustering\",\n leaflab = \"none\", yaxt = \"none\")\nrect.hclust(hc, 3, border = \"green\")\n\n\n\n# confusion matrix\nclustering <- cutree(hc, 2)\nconfusionMatrix(as.factor(clustering), as.factor(target))\n\n# save RData ==================================================================\nsave(tfidf_norm, cos_sim, dend, hc, clustering, target,\n file = \"./shiny-apps//RData/clustering.RData\")\n\n"
},
{
"alpha_fraction": 0.7849944233894348,
"alphanum_fraction": 0.7894737124443054,
"avg_line_length": 80,
"blob_id": "2ad389e1ed37600b0aace1caf28a2ded446d977c",
"content_id": "dd7812d6d706a64bc09f04ea52a4f3afb6c433a2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "RMarkdown",
"length_bytes": 893,
"license_type": "no_license",
"max_line_length": 382,
"num_lines": 11,
"path": "/shiny-apps/Rmd/clustering_kmeans.Rmd",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "---\ntitle: \" \"\n---\n\n### Method\nFor each corpus, we first generate a term frequency inverse document frequency (TFIDF) matrix for each corpus, in which each row is normalized to a vector of unit length. K-means clustering analysis is then performed on this matrix. For comparison, we also perform principle component (PCA) analysis on the TFIDF matrix to get a PCA matrix and run K-means clustering on this matrix.\n\nThe most challenge step for K-means clustering analysis is to determine the number of clusters. We take the liberty to assume that we already know the number to be three as the corpus has three medical specialties.\n\n### Results\nThe K-means clustering with and without PCA changes the identified specialty of a few samples but overall accuracy is almost the same. Using the medical named entities extracted with Amazon Comprehend Medical improves the accuracy from 64% to 70%. \n\n"
},
{
"alpha_fraction": 0.6496227979660034,
"alphanum_fraction": 0.6714165806770325,
"avg_line_length": 23.85416603088379,
"blob_id": "4ce81917db6cf67b45e8d9b4cf6fef3c5d709a2c",
"content_id": "e3a18760433a65fd300bc30f851e047333e058ed",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1193,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 48,
"path": "/python/extract_medical_named_entity_with_medacy.py",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "#%% load modules and model\nfrom medacy.model.model import Model\nimport pandas as pd\nfrom tqdm import tqdm\nimport json\n\nmdl = Model.load_external('medacy_model_clinical_notes')\n\n#%% prepare data\n\ndat = pd.read_csv(\"data/mtsamples_gastroenterology_neurology_urology.csv\")\nnotes = list(dat.note)\n\n#%% get annotaions\n# an annotation looks like:\n# [('Drug', 1405, 1413, 'peroxide'),\n# ('Drug', 2016, 2022, 'Vicryl'),\n# ('Route', 2023, 2035, 'subcutaneous')]\n# So the element of index 3 is the keyword\n\n\ndef get_annotations(notes):\n # notes: medical notes as a list of strings\n mes = []\n for note in tqdm(notes):\n annotation = mdl.predict(note)\n mes.append(annotation)\n return(mes)\n\n\nannotations = get_annotations(notes)\n\n#%% get medical entities from annotations\ndef get_medacy_me(annotations):\n # annotations: list of medaCy annotations\n mes = []\n for annotation in annotations:\n me = [ann[3].replace(\" \", \"-\") for ann in list(annotation)]\n me = \" \".join(me)\n mes.append(me)\n return(mes)\n\n\nmes = get_medacy_me(annotations)\n\n#%% save results\nwith open(\"data/medacy_gastroenterology_neurology_urology.txt\", \"w\") as f:\n json.dump(mes, f)\n"
},
{
"alpha_fraction": 0.5504148006439209,
"alphanum_fraction": 0.5575941205024719,
"avg_line_length": 30.17910385131836,
"blob_id": "e57f5977b1a0685ffda3f2d75a46b472afdd598b",
"content_id": "f13eba0d89d4e90651bd5d20d49c1791209f8793",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 6268,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 201,
"path": "/shiny-apps/global.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(data.table)\nlibrary(magrittr)\nlibrary(stringr)\nlibrary(dplyr)\nlibrary(ggplot2)\nlibrary(wordcloud)\nlibrary(RColorBrewer)\nlibrary(dendextend)\nlibrary(text2vec)\nlibrary(e1071)\n\n# clinical note ===============================================================\n# load all mtsamples as scraped for all specialties and then keep three columns \n# for shiny\nload(\"RData/mtsamples_all.RData\")\n\n# word_stats ==================================================================\n# load word statistics including n_documents, n_time, avg_tf, avg_tfidf for all\n# words in three selected specialites\nload(\"RData/word_stats.RData\")\n\n# note_bows ===================================================================\n# amazon_me, medacy_me, top_tf, top_tfidf for three selected specialties\nload(\"RData/note_bows.RData\")\n\n# get word count for wordcloud plot\nget_word_count <- function(type, col){\n bow <- note_bows[specialty == type, get(col)]\n if (type == \"All\"){\n bow = note_bows[, get(col)]\n }\n count <- tolower(bow) %>%\n str_split(\", | \") %>%\n unlist() %>%\n table() %>%\n as.data.table() %>%\n set_colnames(c(\"word\", \"count\")) %>%\n .[!word %in% tm::stopwords()] %>% # remove stopwords\n .[word != \"mg\"] %>% # unit of medication, too common for medacy \n .[word != \"\"] %>% # medaCy generate nothing from some notes\n .[order(-count)] %>%\n .[count > 1] %>% # delete useless info to save plotting time\n .[, word := factor(word, levels = word)]\n}\n\n# clustering ==============================================================\n# load dat, y_true, tfidf, pca for a particular read_notes randomnization to \n# keep data consistencey across pca, hclutering, and kmeans\nload(\"RData/pca_note_amazon_gas_neu_uro.RData\")\n\n# load pca results\nload(\"RData/pca_results.RData\")\n\nplot_pc1_pc2 <- function(pca, \n color = NULL, \n color_map = c(\"red\", \"blue\", \"cyan\"),\n pch = NULL,\n title = NULL){\n # Plot samples in PC1-PC2 space\n #\n # Arguments:\n # pca: matrix, pca of tfidf\n # color: int vector to mark the color of each sample. can be y_true, \n # y_clusters, y_pred, or other vector of the same length as pca\n # color_map: string, color to map color\n # pch: int vector, shape of data point\n # title: string, plot title\n \n PC1 <- pca[, 1]\n PC2 <- pca[, 2]\n \n sample_colors <- rep(\"black\", nrow(pca))\n if (!is.null(color)){\n sample_colors[color == 0] <- color_map[1]\n sample_colors[color == 1] <- color_map[2]\n sample_colors[color == 2] <- color_map[3]\n }\n \n if (!is.null(color) & !is.null(pch)){\n plot(PC1, PC2, col = sample_colors, pch = pch, main = title)\n } else if (!is.null(color)){\n plot(PC1, PC2, col = sample_colors, main = title)\n } else if (!is.null(pch)){\n plot(PC1, PC2, pch = pch, main = title)\n } else {\n plot(PC1, PC2, main = title)\n }\n}\n\n# h_cluster ====================================================================\nload(\"RData/hcluster_results.RData\")\n\nplot_dend <- function(hc, title = NULL){\n # plot dendgram of hierarchical clustering\n # hc: hclust created with function hc <- hclust(dist, \"ward.D\")\n dend <- as.dendrogram(hc)\n \n # use true y to assign color\n sample_colors <- rep(NA, length(hc$labels))\n sample_colors[y_true == 0] <- \"red\"\n sample_colors[y_true == 1] <- \"blue\"\n sample_colors[y_true == 2] <- \"cyan\"\n \n dend <- assign_values_to_leaves_edgePar(\n dend=dend, \n value = sample_colors[order.dendrogram(dend)], \n edgePar = \"col\"\n )\n \n par(mar = c(0, 0, 2, 0))\n plot(dend, main = title,\n leaflab = \"none\", yaxt = \"none\")\n legend(\"topleft\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n lty = 1,\n cex = 0.8,\n col = c(\"red\", \"blue\", \"cyan\"),\n bty = \"n\")\n rect.hclust(hc, 3, border = \"lightgreen\")\n}\n\n\n# kmeans =======================================================================\n# load kmeans results. pca model using first two PCs\n#y_pred_note_kmeans, y_pred_note_kmeans_pca,\n# y_pred_amazon_kmeans, y_pred_amazon_kmeans_pca\nload(\"RData/kmeans_results.RData\")\n\n\n# classification ===============================================================\n# .. multiclass ====\nload(\"RData/ggplot_multiclass_svm.RData\")\nload(\"RData/ggplot_multiclass_xgb.RData\")\nload(\"RData/ggplot_multiclass_nn.RData\")\nload(\"RData/ggplot_multiclass_nn_embedding.RData\")\n\n# .. load trained models ====\nload(\"RData/multiclass_classification_tfidf_pca_models.RData\")\nsvm_model_deploy <- readRDS(\"trained_models/svm_model_deploy.rds\")\n\n\n# .. text2vec functions ====\nget_iter <- function(corpus, ids = NULL, stem = TRUE){\n # create iterator for text2vec\n #\n # Arguments:\n # corpus: string vector\n # ids: id of corpus\n # stem: bool, use stem tokenizer if TRUE, word tokenizer if not\n #\n # Return:\n # a text2vec iterator\n #\n \n if (stem){\n tokenizer <- function(x) {\n word_tokenizer(x) %>% \n lapply( function(x) SnowballC::wordStem(x, language=\"en\"))\n }\n } else {\n tokenizer <- word_tokenizer\n }\n it <- itoken(corpus, tolower, tokenizer, ids = ids)\n}\n\n\nget_vocab <- function(corpus){\n # Create text2vec vocabularoy of a corpus\n it <- get_iter(corpus)\n vocab <- create_vocabulary(it, stopwords = tm::stopwords())\n}\n\n\nget_vectorizer <- function(corpus){\n # Create text2vec vectorizer from corpus for use in create_dtm\n vocab <- get_vocab(corpus)\n vocab_vectorizer(vocab)\n}\n\n\nget_dtm <- function(corpus, vectorizer){\n # Get dtm of a corpus using existing vectorizer\n it <- get_iter(corpus)\n dtm <- create_dtm(it, vectorizer)\n}\n\n\nfit_tfidf <- function(dtm){\n # create a tfidf model using dtm\n mdl <- TfIdf$new()\n fit_transform(dtm, mdl) # fit does not work\n return(mdl)\n}\n\n\ntransform_tfidf <- function(dtm, tfidf_model){\n # Get normalized tfidf matrix of dtm using tfidf_model\n tfidf <- transform(dtm, tfidf_model)\n tfidf <- as.matrix(tfidf)\n tfidf <- tfidf / sqrt(rowSums(tfidf * tfidf))\n}\n\n"
},
{
"alpha_fraction": 0.6569579243659973,
"alphanum_fraction": 0.6569579243659973,
"avg_line_length": 27.136363983154297,
"blob_id": "95adabfc4e6bdaa20dc4cbcdf49e7c4f9000c2ef",
"content_id": "43d894b15cef6c39045e9b6f181b17fa550e935d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 618,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 22,
"path": "/shiny-apps/server.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(shiny)\nlibrary(DT)\n\nserver <- function(input, output, session){\n # overview\n source(\"./controllers/clinical_notes.R\", local = TRUE)\n \n # bag of words and text analysis\n source(\"./controllers/bow_text.R\", local = TRUE)\n \n # word cloud\n source(\"./controllers/word_cloud.R\", local = TRUE)\n \n # clustering\n source(\"./controllers/clustering.R\", local = TRUE)\n \n # classification\n source(\"./controllers/classification_multiclass.R\", local = TRUE)\n \n # multiclass model deploy model \n source(\"./controllers/classification_multiclass_deploy_models.R\", local = TRUE)\n}"
},
{
"alpha_fraction": 0.4258793890476227,
"alphanum_fraction": 0.43592965602874756,
"avg_line_length": 26.929824829101562,
"blob_id": "387ed364bf7caf6592e07eb1479363202629ba4b",
"content_id": "0a598e0daf08ceba8f4df6acfecd579f97759eca",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 1592,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 57,
"path": "/shiny-apps/controllers/bow_text.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "# bag of words =================================================================\nbow_proxy <- dataTableProxy(\"bows\")\n\noutput$bows <- DT::renderDataTable({\n note_bows\n},\nrownames = FALSE,\n#filter = list(position = 'top', clear = FALSE),\noptions = list(\n columnDefs = list(list(className = 'dt-left', targets = \"_all\")),\n pageLength = 5,\n processing=FALSE,\n searchHighlight = TRUE\n))\n\nobserveEvent(input$word, {\n updateSearch(bow_proxy, keywords = list(global = input$word, columns = NULL))\n})\n\n# displace stats of a word =====================================================\noutput$word_stats <- renderText({\n wd <- input$word\n if (nchar(wd) > 0){\n word <- word_stats %>%\n filter(word == wd) \n \n n_doc <- word %>%\n pull(n_documents)\n n_time <- word %>%\n pull(n_times)\n avg_tf <- word %>%\n pull(avg_tf)\n avg_tfidf <- word %>%\n pull(avg_tfidf)\n if(purrr::is_empty(n_doc)){\n n_doc <- 0\n n_time <- 0\n avg_tf <- 0\n avg_tfidf <- 0\n }\n } else {\n n_doc <- 0\n n_time <- 0\n avg_tf <- 0\n avg_tfidf <- 0\n }\n \n #h2(str(count))\n paste0(\"<p>\",\n \"Appears in \", \n \"<font size='5'>\", n_doc, \"</font>\", \" documents \",\n \"<font size='5'>\", n_time, \"</font>\", \" times \",\n \"and has \",\n \"<font size='5'>\", avg_tf, \"</font>\", \" average term frequency \",\n \"<font size='5'>\", avg_tfidf, \"</font>\", \" average TFIDF\",\n \"</p>\")\n})\n"
},
{
"alpha_fraction": 0.5141907334327698,
"alphanum_fraction": 0.5221170783042908,
"avg_line_length": 40.59574508666992,
"blob_id": "b130a9fb32e4024f8dfc42626875a4bb7af986ad",
"content_id": "3321c3a179b2b88516da8d568b7f149f5dd86b1a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 3911,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 94,
"path": "/shiny-apps/controllers/clinical_notes.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "# The raw table of notes =======================================================\noutput$raw_table <- DT::renderDataTable({\n # only display selected columns\n dat_all <- mtsamples_all[, .(specialty, note, section_headers = sections)]\n \n # only display first sample for each specialty\n dt_first <- dat_all[, .SD[1], by = specialty]\n},\nrownames = FALSE,\n# filter = list(position = 'top', clear = FALSE),\noptions = list(\n pageLength = 5,\n processing=FALSE\n))\n\n\n# unique specialty count =======================================================\noutput$specialty_count <- renderPlot({\n # keep only selected medical specialties\n to_keep <- c(\"Allergy / Immunology\", \"Autopsy\", \"Bariatrics\",\n \"Cardiovascular / Pulmonary\", \n \"Chiropractic\", \"Cosmetic / Plastic Surgery\", \n \"Dentistry\", \"Dermatology\", \"Diets and Nutritions\", \n \"Endocrinology\", \"ENT - Otolaryngology\", \n \"Gastroenterology\", \"Hematology - Oncology\", \n \"Hospice - Palliative Care\", \n \"Nephrology\", \"Neurology\", \"Neurosurgery\", \"Obstetrics / Gynecology\", \n \"Ophthalmology\", \"Orthopedic\", \"Pain Management\", \n \"Pediatrics - Neonatal\", \"Physical Medicine - Rehab\", \"Podiatry\", \n \"Psychiatry / Psychology\", \"Rheumatology\", \"Sleep Medicine\", \n \"Speech - Language\", \"Urology\")\n dat_specialty <- mtsamples_all[specialty %in% to_keep]\n \n # count specialty include duplicates\n count <- sort(table(dat_specialty$specialty), decreasing = TRUE)\n count_top <- count[1:20]\n\n ggplot() + \n geom_col(aes(x = factor(names(count_top), levels = names(count_top)),\n y = as.integer(count_top)),\n fill = \"gray80\") +\n geom_text(aes(x = factor(names(count_top), levels = names(count_top)),\n y = as.integer(count_top),\n label = as.integer(count_top)),\n hjust = 1,\n color = \"gray20\") +\n scale_y_continuous(expand = c(0, 0)) +\n labs(x = NULL,\n y = NULL,\n title = \"Counts of Top 20 Medical Specialties\") +\n coord_flip() + \n theme(panel.background = element_blank(),\n axis.ticks = element_blank(),\n axis.text.x = element_blank())\n})\n\n\n# Section headers count =========================================================\noutput$section_count <- renderPlot({\n # remove duplicates but keep the first one\n rows_duplicated <- duplicated(mtsamples_all$note)\n dat_section <- mtsamples_all[!rows_duplicated, .(specialty, note, sections)]\n \n # failed to scrape section from some notes\n sections <- dat_section[sections != \"\", sections] %>%\n str_split(\", \") %>%\n unlist()\n \n # notes have sections\n N <- length(dat_section[sections != \"\", sections])\n \n # count of each sections\n count <- sort(table(sections), decreasing = TRUE)\n count_top <- count[1:20]\n \n ggplot() + \n geom_col(aes(x = factor(names(count_top), levels = names(count_top)),\n y = as.integer(count_top) / N),\n fill = \"gray80\") +\n geom_text(aes(x = factor(names(count_top), levels = names(count_top)),\n y = as.integer(count_top) / N,\n label = paste0(round(100 * as.integer(count_top) / N, 1), \"%\")),\n hjust = 1,\n color = \"gray20\") +\n scale_y_continuous(expand = c(0, 0), \n labels = scales::percent_format()) +\n labs(x = NULL,\n y = NULL,\n title = \"Frequencies of Top 20 Section Headers (As-Is)\") +\n coord_flip() + \n theme(panel.background = element_blank(),\n axis.ticks = element_blank(),\n axis.text.x = element_blank())\n})\n\n"
},
{
"alpha_fraction": 0.5844022631645203,
"alphanum_fraction": 0.619548499584198,
"avg_line_length": 35.064815521240234,
"blob_id": "e12afcab1a4941361444dddbc7ae1505c3daf438",
"content_id": "5bef137b4f4d435b154717b60364daf7e5b57b41",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 3898,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 108,
"path": "/machine-learning/clustering_pca_three_classes.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "# PCA clustering of clinical note in three specialties: Gastroenterology, \n# Neurology, and Urology. Plot with the first two principle components, we\n# are able to identify three clusters. As we know the specialty of each \n# sample, we compare the quality of the clustering. \n#\n# Results: \n# - Uisng tfidf generated from clinical notes, PCA clustering has 66% \n# accuracy.\n# - Using tfidf generagte from named entities extracted with Amazon Comprehend \n# Medical from clinical note, PCA clustering has 76% accuracy\n# - Both are better than base accuracy of 38% assuming all to be Gastroenterolgy\n\nsource(\"utilities.R\")\n\n# # Prepare data =================================================================\n# dat_gas_neu_uro <- read_notes(\"data/amazon_medacy_mtsamples_gastr_neuro_urolo.csv\",\n# cols_keep = c(\"id\", \"amazon_me\", \"specialty\", \"note\"),\n# clean = TRUE,\n# y_label = TRUE)\n# \n# # tfidf matrix, as sample order is randomized, so save all of them together with\n# # pcas\n# tfidf_note <- tfidf_tm(dat_gas_neu_uro$note)\n# tfidf_amazon <- tfidf_tm(dat_gas_neu_uro$amazon_me)\n# y_true <- dat_gas_neu_uro$y\n# \n# # the signs of pc1 and pc2 change randomly. For reproducibility in plot, save\n# # them as RData\n# pca_note <- prcomp(tfidf_note)\n# pca_amazon <- prcomp(tfidf_amazon)\n# save(dat_gas_neu_uro, tfidf_note, tfidf_amazon, y_true, pca_note, pca_amazon, \n# file = \"shiny-apps/RData/pca_note_amazon_gas_neu_uro.RData\")\n\n# load the same data for pca, kmeans, and hcluster\nload(\"shiny-apps/RData/pca_note_amazon_gas_neu_uro.RData\")\n\n# examine quality\nplot_pc1_pc2(pca_note$x, color = y_true)\nplot_pc1_pc2(pca_amazon$x, color = y_true)\n\n\n# pca using note ===============================================================\n# Plot samples in pc1 - pc2 space. Spot 3 clusters and add line boundary to \n# seperate the clusters. The boundaries only good for the saved pca\npca <- pca_note$x\nplot_pc1_pc2(pca)\na1 <- 8\na2 <- 0.8\na3 <- -2\ncurve(a1 * x, from = 0, to = 0.4, add = TRUE, lty = 2)\ncurve(a2 * x, from = 0, to = -0.4, add = TRUE, lty = 2)\ncurve(a3 * x, from = 0, to = 0.4, add = TRUE, lty = 2)\n\npc1 <- pca[, 1]\npc2 <- pca[, 2]\ncluster1 <- (pc2 >= a1 * pc1) & (pc2 > a2 * pc1)\ncluster2 <- (pc2 <= a2 * pc1) & (pc2 < a3 * pc1)\ncluster3 <- (pc2 >= a3 * pc1) & (pc2 < a1 * pc1)\n\n# assigne a label to each cluster\ny_clusters <- rep(999, length(y_true))\ny_clusters[cluster1] <- 1\ny_clusters[cluster2] <- 2\ny_clusters[cluster3] <- 3\n\ntable(y_true, y_clusters)\n# As we know the true labels y_true, we can use them to determine each cluster\n# identified by PCA\ny_pred_note_pca <- best_match(y_true, y_clusters)\ny_cluster_note_pca <- y_clusters\n\n\n# pca amazon_me ================================================================\n# PCA on tfidf generated with Amazon Comprehend Medical named entities. Keep \n# only PC1 and PC2 for clustering visualization and analysis\npca <- pca_amazon$x\n\n# Plot samples in pc1 - pc2 space. Spot 3 clusters and add line boundary to \n# seperate the clusters\nplot_pc1_pc2(pca)\na1 <- -0.8\na2 <- 3\na3 <- 0.8\ncurve(a1 * x, from = 0, to = 0.7, add = TRUE, lty = 2)\ncurve(a2 * x, from = 0, to = 0.6, add = TRUE, lty = 2)\ncurve(a3 * x, from = 0, to = -0.4, add = TRUE, lty = 2)\n\npc1 <- pca[, 1]\npc2 <- pca[, 2]\ncluster1 <- (pc2 >= a1 * pc1) & (pc2 < a2 * pc1)\ncluster2 <- (pc2 >= a2 * pc1) & (pc2 > a3 * pc1)\ncluster3 <- (pc2 <= a3 * pc1) & (pc2 < a1 * pc1)\n\n\ny_clusters <- rep(999, length(y_true))\ny_clusters[cluster1] <- 1\ny_clusters[cluster2] <- 2\ny_clusters[cluster3] <- 3\n\n# assigne a label to each cluster\ny_pred_amazon_pca <- best_match(y_true, y_clusters)\ny_cluster_amazon_pca <- y_clusters\n\n\n# save pca results for shiny ==================================================\nsave(y_pred_note_pca, y_cluster_note_pca, \n y_pred_amazon_pca, y_cluster_amazon_pca,\n file = \"shiny-apps/RData/pca_results.RData\")\n\n\n\n"
},
{
"alpha_fraction": 0.5633013844490051,
"alphanum_fraction": 0.5737656354904175,
"avg_line_length": 32.589107513427734,
"blob_id": "cd3a45d1e8d48ef949316cd474a9c16a504b82fd",
"content_id": "e4a158a6b05d2273c2ebadaa5e5e6658a15611fd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 6785,
"license_type": "no_license",
"max_line_length": 127,
"num_lines": 202,
"path": "/web_scraping_mtsample_com.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "# last reviewed: 20200211\n\n# To scrape all sample medical transcriptions at mtsamples.com.\n# Often scraping with the XPath and css selector returns nothing so we choose \n# to scrape all text and then use regular expression to extract the needed\n# data from the text.\n\nlibrary(rvest)\nlibrary(stringr)\n#library(progress)\n\n\n# scrape data of one sample\nscrape_one_sample <- function(sample_url){\n sample_page <- read_html(sample_url)\n \n # get string of section titles seperated by \",\". css selector works here\n sections <- sample_page %>% \n html_nodes(css = \"b\") %>%\n html_text() %>%\n str_extract(\"[A-Z][A-Z /]+[A-Z]\") %>%\n #str_remove(\":\") %>%\n .[!is.na(.)] %>%\n setdiff(\"NOTE\") %>%\n paste(collapse = \", \")\n \n # get all text of the sample. Other contents are extracted from sample_text\n sample_text <- sample_page %>%\n html_node(xpath = '//*[@id=\"sampletext\"]') %>%\n html_text() %>%\n # dotall = TRUE to match . to \\r and \\n\n str_remove(regex(\"^.+(?=Sample Type)\", dotall=TRUE))\n \n # extract everything between \"Medical Specialty: \" and \"Sample Name: \"\n sample_type <- str_extract(sample_text, \"(?<=Medical Specialty:).+(?=Sample Name:)\") %>%\n str_trim() %>% str_squish()\n sample_name <- str_extract(sample_text, \"(?<=Sample Name:).+(?=\\r\\n)\") %>%\n str_trim() %>% str_squish()\n \n sample_text_1 <- str_remove(sample_text, \"^.*\\r\\n\")\n description <- str_extract(sample_text_1, \"(?<=Description:).+(?=\\r\\n)\") %>%\n str_trim() %>% str_squish()\n \n sample_text_2 <- str_remove(sample_text_1, \"^.*\\r\\n\")\n transcription <- str_remove(sample_text_2, \"^.+Report\\\\)\") %>%\n str_remove(\"^[\\r\\n ]*\") %>%\n str_extract(\"^.*(?=\\r\\n)\") %>%\n str_remove_all(\"\\t\") %>%\n str_trim() %>% str_squish()\n \n keywords <- str_extract(sample_text, \"(?<=Keywords: \\r\\n).*(?=\\r\\n)\") %>%\n str_trim() %>% str_squish()\n \n # rename for easy use later\n df <- data.frame(\n specialty = sample_type, # sample type / medical specialty\n name = sample_name,\n description = description,\n note = transcription, # clinical note\n sections = sections,\n keywords = keywords,\n stringsAsFactors = FALSE\n )\n}\n# sample_url <- \"https://www.mtsamples.com/site/pages/sample.asp?Type=85-Surgery&Sample=1233-Adenocarcinoma%20&%20Mesothelioma\"\n# aaa <- scrape_one_sample(sample_url)\n\n\n# get url to each sample in one page\nget_sample_urls <- function(page_url){\n sample_urls <- read_html(page_url) %>%\n html_nodes(xpath = '//*[@id=\"Browse\"]') %>%\n html_nodes(\"a\") %>%\n html_attr(\"href\") %>%\n paste0(\"https://www.mtsamples.com\", .) %>%\n # replace \" \" with \"%20\" for legal url\n str_replace_all(\" \", \"%20\")\n return(sample_urls)\n}\n#page_url <- \"https://www.mtsamples.com/site/pages/browse.asp?type=21%2DEndocrinology&page=2\"\n# bbb <- get_sample_urls(page_url)\n\n\n# scrape all samples in one page\nscrape_one_page <- function(page_url){\n df_page <- data.frame(\n specialty = character(0),\n name = character(0),\n description = character(0),\n note = character(0),\n sections = character(0),\n keywords = character(0),\n stringsAsFactors = FALSE\n )\n for(sample_url in get_sample_urls(page_url)){\n df <- scrape_one_sample(sample_url)\n df_page <- rbind(df_page, df)\n }\n return(df_page)\n}\n# page_url <- \"https://www.mtsamples.com/site/pages/browse.asp?type=21%2DEndocrinology&page=2\"\n# ccc <- scrape_one_page(page_url)\n\n\n# get the number of pages of a sample type / medical specialty\nget_number_pages <- function(type_url){\n text <- read_html(type_url) %>%\n html_node(xpath = '//*[@id=\"wrapper\"]') %>%\n html_text() %>%\n str_remove_all(\"[\\r|\\n\\t]\")\n \n if(str_detect(text, \">\\\\s+>>\")){\n num <- str_extract(text, \"[0-9]+(?=\\\\s+>\\\\s+>>)\") %>%\n as.integer()\n return(num)\n } else {\n return(1)\n }\n}\n# type_url <- \"https://www.mtsamples.com/site/pages/browse.asp?type=85-Surgery\"\n# ddd <- get_number_pages(type_url)\n\n\n# get the url of each page of a Sample Type / Medical Specialty using the first \n# page url of a Sample Type Medical specialty\nget_page_urls <- function(type_url){\n number_pages <- get_number_pages(type_url)\n if (number_pages == 1){\n page_urls <- type_url\n } else {\n page_urls <- type_url\n for(i in 2:number_pages){\n url_i <- paste0(type_url, \"&page=\", i)\n page_urls <- c(page_urls, url_i)\n }\n }\n \n return(page_urls)\n}\n# type_url <- \"https://www.mtsamples.com/site/pages/browse.asp?type=85-Surgery\"\n# eee <- get_page_urls(type_url)\n\n\n# get the url for the first page of each Sample Type / Medical Specialty from\n# https://www.mtsamples.com/\nget_type_urls <- function(home_url){\n home_text <- read_html(home_url) %>%\n html_node(xpath = '//*[@id=\"MenuTypeLeft\"]') %>%\n html_nodes(\"a\") %>%\n html_attr(\"href\") %>%\n paste0(\"https://www.mtsamples.com\", .) %>%\n # replace \" \" with \"%20\" for legal url\n str_replace_all(\" \", \"%20\")\n return(home_text)\n}\n# home_url <- \"https://www.mtsamples.com/\"\n# fff <- get_type_urls(home_url)\n\n\n# scrape all samples from home site\nscrape_all_samples <- function(home_url){\n if (!dir.exists(\"data\")){\n cat(\"create a new directory data/ under current directory to save scraping.\")\n dir.create(\"data\")\n }\n \n page_count <- 0\n mt <- data.frame(\n specialty = character(0),\n name = character(0),\n description = character(0),\n note = character(0),\n sections = character(0),\n keywords = character(0),\n stringsAsFactors = FALSE\n )\n type_urls <- get_type_urls(home_url)\n \n # give an estimated number of total pages to scrape, used to track the \n # scraping progress. It should be larger than the real number.\n total_pages <- 510\n #pb <- progress_bar$new(total = total_pages)\n for (type_url in type_urls){\n page_urls <- get_page_urls(type_url)\n for (page_url in page_urls){\n #pb$tick()\n page_count <- page_count + 1\n cat(paste0(\"Scraping page \", page_count, \" of ~\", total_pages,\n \" --- \", page_url, \"\\n\"))\n df <- scrape_one_page(page_url)\n mt <- rbind(mt, df)\n }\n }\n \n # save the scraped data\n csv_file <- paste0(\"./data/mtsamples_\", str_remove_all(Sys.Date(), \"-\"), \".csv\")\n write.csv(mt, file=csv_file, row.names = FALSE)\n cat(paste0(\"Scraped data saved successfully to \", csv_file))\n}\n\nhome_url <- \"https://www.mtsamples.com/\"\nscrape_all_samples(home_url)\n"
},
{
"alpha_fraction": 0.4990328848361969,
"alphanum_fraction": 0.5232108235359192,
"avg_line_length": 26.062828063964844,
"blob_id": "fcea04bb17ca91a19b6818a066f3f694133a7dae",
"content_id": "4572c94d0f3964180a2764515c5843f76552b5f0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 5170,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 191,
"path": "/machine-learning/gas_neu_uro_classification_tfidf_xgb.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(xgboost)\nsource(\"utilities.R\")\n\n\n# prepare data =================================================================\nset.seed(12345)\ndat <- read_notes(\n \"data/amazon_medacy_mtsamples_gastr_neuro_urolo.csv\",\n duplicate_rm = T,\n cols_keep = \"all\",\n y_label = TRUE\n)\n\n# hyper-parameters tuning ======================================================\n# results: optimal paramters:\n# max_depth = 4\n# eta = 0.28\n# nround = 25\n\ntfidf <- tfidf_tm(dat$amazon_me)\n# run only if need pca\n#tfidf <- prcomp(tfidf)$x\nX <- tfidf\ny <- dat$y\n\nset.seed(12345)\nin_train <- caret::createDataPartition(y, p = 0.7, list = FALSE)\n\nX_train <- X[in_train,]\nX_test <- X[-in_train,]\n\ny_train <- y[in_train]\ny_test <- y[-in_train]\n\nn_rep <- 100\n# placeholder for hyperparamters and metrics\nparam_df = data.frame(max_depth = numeric(n_rep),\n eta = numeric(n_rep),\n metrics = numeric(n_rep),\n nround = integer(n_rep))\nmetrics_cv <- vector(\"list\", n_rep)\n\nn_class <- length(unique(y_train))\npb <- progress_bar$new(total = n_rep)\nfor (i in 1:n_rep) {\n pb$tick()\n maxdepth <- sample(2:10, 1)\n eta <- runif(1, 0.1, 0.5)\n \n param <- list(objective = \"multi:softmax\",\n num_class = n_class,\n eval_metric = \"merror\", # check for metric for multiclass\n max_depth = maxdepth,\n eta = eta\n )\n cv_nround = 30\n cv_nfold = 3\n xgb_cv <- xgb.cv(data=X_train, label = y_train, params = param, \n nthread=3, nfold=cv_nfold, nrounds=cv_nround,\n verbose = FALSE, early_stop_round=10, maximize=FALSE)\n \n cv_metrics <- xgb_cv$evaluation_log %>% \n set_colnames(c(\"iter\", \"train_mean\", \"train_std\", \"test_mean\", \"test_std\"))\n \n if(as.data.frame(param)[1, \"eval_metric\"] %in% c(\"auc\")){\n best_metric = max(cv_metrics[, test_mean])\n best_metric_round = which.max(cv_metrics[, test_mean])\n } else {\n best_metric = min(cv_metrics[, test_mean])\n best_metric_round = which.min(cv_metrics[, test_mean])\n }\n \n param_df[i, ] <- c(maxdepth, eta, best_metric, best_metric_round)\n metrics_cv[[i]] <- cv_metrics\n}\n\n# plot to find best parameters\nplot(param_df$max_depth, param_df$metrics)\nplot(param_df$eta, param_df$metrics)\nplot(param_df$nround, param_df$metrics)\n\n\n\n\n# one model ====================================================================\nparam <- list(objective = \"multi:softmax\",\n num_class = n_class,\n eval_metric = \"merror\",\n max_depth = 4,\n eta = 0.28\n)\nxgb <- xgboost(data = X_train, \n label = y_train, \n params = param,\n nthread = 3, \n nrounds = 25)\n\ny_pred <- predict(xgb, X_test)\ntable(y_test, y_pred)\n\n\n# repeat 100 times to get average metrics =====================================\nxgb_metrics <- function(corpus, pca = FALSE){\n # calculate average accuracy and f1 score out of 100 repeat\n # \n # Arguments:\n # corpus: string, \"note\" or \"amazon_me\"\n # pca: bool, if TRUE, process with pca\n #\n # Return:\n # numeric vector \n #\n tfidf <- tfidf_tm(dat[, get(corpus)])\n # run only if need pca\n if (pca){\n tfidf <- prcomp(tfidf)$x[, 1:25]\n }\n X <- tfidf\n y <- dat$y\n \n n_rep <- 100\n df_acc_f1 <- data.frame(\n acc = numeric(n_rep),\n f1_gas = numeric(n_rep),\n f1_neu = numeric(n_rep),\n f1_uro = numeric(n_rep)\n )\n set.seed(6789)\n in_trains <- caret::createDataPartition(y, times = n_rep, p = 0.7)\n \n for (i in 1:100){\n cat(i)\n in_train <- in_trains[[i]]\n X_train <- X[in_train, ]\n y_train <- y[in_train]\n X_test <- X[-in_train,]\n y_test <- y[-in_train]\n \n mdl <- xgboost(data = X_train, \n label = y_train, \n params = list(\n objective = \"multi:softmax\",\n num_class = 3,\n eval_metric = \"merror\",\n max_depth = 4,\n eta = 0.28\n ),\n nthread = 3, \n nrounds = 25,\n verbose = FALSE)\n y_pred <- predict(mdl, X_test)\n \n tb <- table(y_test, y_pred)\n acc <- sum(diag(tb)) / length(y_test)\n \n f1_score <- function(tb, k){\n recall <- diag(tb)[k] / sum(y_test == k - 1)\n precision <- diag(tb)[k] / sum(y_pred == k - 1)\n f1 <- 2 * (recall * precision) / (recall + precision)\n }\n \n f1_gas <- f1_score(tb, 1)\n f1_neu <- f1_score(tb, 2)\n f1_uro <- f1_score(tb, 3)\n \n # assert correct calculation\n stopifnot(acc <= 1 & f1_gas <= 1 & f1_neu <= 1 & f1_uro <= 1)\n stopifnot(acc >= 0 & f1_gas >= 0 & f1_neu >= 0 & f1_uro >= 0)\n \n df_acc_f1[i, ] <- c(acc, f1_gas, f1_neu, f1_uro)\n }\n \n return(df_acc_f1)\n}\n\n# mean and standard deviation\ndf_acc_f1 <- xgb_metrics(\"note\")\nsapply(df_acc_f1, mean)\nsapply(df_acc_f1, sd)\n\ndf_acc_f1 <- xgb_metrics(\"amazon_me\")\nsapply(df_acc_f1, mean)\nsapply(df_acc_f1, sd)\n\ndf_acc_f1 <- xgb_metrics(\"note\", pca = TRUE)\nsapply(df_acc_f1, mean)\nsapply(df_acc_f1, sd)\n\ndf_acc_f1 <- xgb_metrics(\"amazon_me\", pca = TRUE)\nsapply(df_acc_f1, mean)\nsapply(df_acc_f1, sd)\n\n"
},
{
"alpha_fraction": 0.8036175966262817,
"alphanum_fraction": 0.8036175966262817,
"avg_line_length": 75.80000305175781,
"blob_id": "ad74cc38a33b6378a4232f91eec10f24b521e811",
"content_id": "276ad90d29aeb6b01d28bded8fbe1fde4d9d729e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "RMarkdown",
"length_bytes": 387,
"license_type": "no_license",
"max_line_length": 363,
"num_lines": 5,
"path": "/shiny-apps/Rmd/clustering_intro.Rmd",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "---\ntitle: \" \"\n---\n\nWe use three methods to cluster the clinical notes: principle component analysis, hierarchical clustering, and K-means clustering. The corpora used are the original clinical notes and medical named entities extracted with Amazon Comprehend Medical. We will only include three medical specialties: gastroenterology, neurology, and urology for better visualization.\n \n"
},
{
"alpha_fraction": 0.7768924236297607,
"alphanum_fraction": 0.7768924236297607,
"avg_line_length": 49,
"blob_id": "4089cd8d4aacee4cfb8db33e53ed3a9f096b7353",
"content_id": "89f2fd1f9cf8b5be7e7b3f47d65e602a39429595",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "RMarkdown",
"length_bytes": 251,
"license_type": "no_license",
"max_line_length": 229,
"num_lines": 5,
"path": "/shiny-apps/Rmd/classification_intro.Rmd",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "---\ntitle: \" \"\n---\n\nIn this section we build supervised learning models to predict the medical specialties of clinical notes. The best model, a radial kernel support vector machine model, is deployed to predict the specialties of new clinical notes. \n"
},
{
"alpha_fraction": 0.5842271447181702,
"alphanum_fraction": 0.5962145328521729,
"avg_line_length": 27.303571701049805,
"blob_id": "3dcb3cd75ae7299397293af256bc9c92fd025069",
"content_id": "c90887ed12ba135e09bb382df24cba6efe49084e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1585,
"license_type": "no_license",
"max_line_length": 81,
"num_lines": 56,
"path": "/python/add_amazon_medacy_mes_to_mtsamples.py",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Feb 14 20:58:49 2020\n\n@author: gl\n\"\"\"\n#%% load data\nimport pandas as pd\nimport json\n\nmt = pd.read_csv(\"data/mtsamples_gastroenterology_neurology_urology.csv\")\n\nwith open(\"data/comprehend_medical_gastroenterology_neurology_urology.txt\") as f:\n amazon = json.load(f)\n \nwith open(\"data/medacy_gastroenterology_neurology_urology.txt\") as f:\n medacy_bow = json.load(f)\n \n#%% get medical terms from one Comprehend Medical entity\ndef get_amazon_bow(me):\n # me: medical entities extracted from Amazon Comprehend Medical \n # like aaa[\"Entities\"]\n text = [dic[\"Text\"] for dic in me]\n traits = [dic[\"Traits\"] for dic in me] # trait including negation\n \n negation = []\n for ele in traits:\n if len(ele) == 0:\n negation.append(\"\")\n else:\n count = 0\n for dic in ele:\n if dic[\"Name\"] == \"NEGATION\":\n count += 1\n if count == 0:\n negation.append(\"\")\n else:\n negation.append(\"not \")\n \n # attache \"-1\" to the text if it is negative\n bow = [n + t for t, n in zip(text, negation)]\n \n return(bow)\n\n\n#%% get amazon bag of words\namazon_bow = []\nfor me in amazon:\n amazon_bow.append(\", \".join(get_amazon_bow(me)))\n \n#%% add amazon_bow and medacy_bow to notes\nmt[\"amazon_me\"] = amazon_bow\nmt[\"medacy_me\"] = medacy_bow\nmt = mt[['id', 'amazon_me', 'medacy_me', 'specialty', 'note']]\nmt.to_csv(\"data/amazon_medacy_mtsamples_gastr_neuro_urolo.csv\", index=False)\n"
},
{
"alpha_fraction": 0.5023879408836365,
"alphanum_fraction": 0.5193344354629517,
"avg_line_length": 29.050926208496094,
"blob_id": "61bbfee3ffb31a412190dd7872f02b098389ac56",
"content_id": "3be870dd8631805cb5f64a8552172ac0df3e59c9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 6491,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 216,
"path": "/machine-learning/clustering_kmeans_three_classes.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "source(\"utilities.R\")\n\n# define functions ============================================================\nbest_kmeans <- function(tfidf, k = 3, pca = TRUE, n_pca = NULL, iter = 100,\n n_rep = 100){\n # Get best kmeans clustering of a tfidf matrix which has smallest total \n # withinss\n #\n # Arguments:\n # tfidf, matrix, normalized tfidf matrix of corpus\n # k: int, number of clusters\n # pca: bool, whether or not preprocess tfidf with pca\n # n_pca: int, first n_pca components used for clustering, default all\n # iter: int, maximum iteration of kmeans\n # n_rep: number of repeat time to search for best kmeans model\n # Return:\n # a kmeans model\n \n if (isTRUE(pca)){\n tfidf <- prcomp(tfidf)$x\n }\n if (!is.null(n_pca)){\n stopifnot(isTRUE(pca))\n stopifnot(is.integer(as.integer(n_pca)))\n tfidf <- tfidf[, 1:n_pca]\n }\n \n set.seed(12345)\n # # km_para to record seed number and withinss of each try\n # km_para <- data.frame(rand_seed = rep(0, n_rep),\n # withinss = rep(0, n_rep))\n rand_seeds <- sample(1:10000, n_rep)\n best_withinss <- Inf # total withinss\n \n pb <- progress_bar$new()\n for (i in 1:n_rep){\n pb$tick()\n rand_seed <- rand_seeds[i]\n set.seed(rand_seed)\n km <- kmeans(tfidf, k, iter.max = iter)\n withinss <- km$tot.withinss\n # km_para[i, ] <- c(rand_seed, withinss)\n \n if (best_withinss > withinss){\n best_withinss <- withinss\n best_rand_seed <- rand_seed\n }\n }\n set.seed(best_rand_seed)\n km <- kmeans(tfidf, k, iter.max = iter)\n return(km)\n}\n\n# best km functions with pca ===================================================\nbest_km <- function(corpus_name, n_pca = NULL){\n # Identify specialties with best kmeans\n # \n # Arguments:\n # corpus_name: string, \"note\" or \"amazon\", for clinical notes and \n # Amazone medical entities\n # n_pca: int, use the first n_pca components for kmeans\n # Return:\n # int vector of labels like c(0, 2, 1, 1, 0, ...)\n \n if (is.null(n_pca)){\n if (corpus_name == \"note\"){\n mtx <- tfidf_note\n } else if (corpus_name == \"amazon\"){\n mtx <- tfidf_amazon\n }\n } else {\n if (corpus_name == \"note\"){\n mtx <- pca_note$x[, 1:n_pca]\n } else if (corpus_name == \"amazon\") {\n mtx <- pca_amazon$x[, 1:n_pca]\n }\n }\n \n km <- best_kmeans(mtx)\n y_clusters <- km$cluster\n \n y_pred <- best_match(y_true, y_clusters)\n}\n\n# prepare data ================================================================\n# \n# dat <- read_notes(\"data/amazon_medacy_mtsamples_gastr_neuro_urolo.csv\",\n# cols_keep = c(\"id\", \"amazon_me\", \"specialty\", \"note\"),\n# clean = TRUE,\n# y_label = TRUE)\n# \n# # tfidf matrix\n# tfidf_note <- tfidf_tm(dat$note)\n# tfidf_amazon <- tfidf_tm(dat$amazon_me)\n# y_true <- dat$y\n\n# load the same data for pca, kmeans, and hcluster\nload(\"shiny-apps/RData/pca_note_amazon_gas_neu_uro.RData\")\n\n\n# # select the best number of clusters ===========================================\n# # results: no clear elbow to decide number of clusters. use prior knowledge\n# # as we know it is three\n# K = 10\n# inertia <- c()\n# for (k in 1:K){\n# print(k)\n# km <- kmeans(tfidf_amazon, k, iter.max = 100)\n# inertia <- c(inertia, km$tot.withinss)\n# }\n# plot(1:K, inertia)\n\n# kmeans with clinical notes ==================================================\n# no pca, results: accuracy 0.64\ny_pred_note_kmeans <- best_km(\"note\")\ny_pred_note_kmeans_pca <- best_km(\"note\", n_pca = 25)\n\npar(mfrow = c(2, 2))\n\nplot_pc1_pc2(pca_note$x, \n pch = y_true, \n title = \"K-means Clusters\")\nlegend(\"topright\", \n legend = c(\"cluster 1\", \"cluster 2\", \"cluster 3\"), \n pch = c(1, 2, 3),\n cex = 0.8)\n\nplot_pc1_pc2(pca_note$x, \n color = y_true, \n title = \"True Specialties\")\nlegend(\"topright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n\nplot_pc1_pc2(pca_note$x, \n y_pred_note_kmeans, \n title = \"Identified Specialties\")\nlegend(\"topright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n\nplot_pc1_pc2(prcomp(tfidf_note)$x, \n color = y_true == y_pred_note_kmeans,\n pch = y_true,\n title = \"Compare prediction to true specialties\")\nlegend(\"topleft\", \n legend = c(\"Correct\", \"Wrong\"),\n col = c(\"blue\", \"red\"),\n pch = 16,\n cex = 0.8)\nlegend(\"topright\", legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n pch = 0:2,\n cex = 0.8)\n\ndev.off()\n\n\n# kmeans with amazon entities ==================================================\n# no pca, results: accuracy 0.64\ny_pred_amazon_kmeans <- best_km(\"amazon\")\ny_pred_amazon_kmeans_pca <- best_km(\"amazon\", 25)\n\npar(mfrow = c(2, 2))\n\nplot_pc1_pc2(pca_amazon$x, \n pch = y_clusters, \n title = \"K-means Clusters\")\nlegend(\"topright\", \n legend = c(\"cluster 1\", \"cluster 2\", \"cluster 3\"), \n pch = c(1, 2, 3),\n cex = 0.8)\n\nplot_pc1_pc2(pca_amazon$x, \n color = y_true, \n title = \"True Specialties\")\nlegend(\"topright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n\nplot_pc1_pc2(pca_amazon$x, \n y_pred_amazon_kmeans_pca, \n title = \"Identified Specialties\")\nlegend(\"topright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n\nplot_pc1_pc2(pca_amazon$x, \n color = y_true == y_pred_amazon_kmeans,\n pch = y_true,\n title = \"Compare prediction to true specialties\")\nlegend(\"topleft\", \n legend = c(\"Correct\", \"Wrong\"),\n col = c(\"blue\", \"red\"),\n pch = 16,\n cex = 0.8)\nlegend(\"topright\", legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n pch = 0:2,\n cex = 0.8)\n\ndev.off()\n\n\n\n\n\nsave(y_pred_note_kmeans, y_pred_note_kmeans_pca,\n y_pred_amazon_kmeans, y_pred_amazon_kmeans_pca,\n file = \"shiny-apps/RData/kmeans_results.RData\")\n"
},
{
"alpha_fraction": 0.3711228668689728,
"alphanum_fraction": 0.3885549306869507,
"avg_line_length": 37.49536895751953,
"blob_id": "c54a025e46030e82e659e50ea3aae85f22e89205",
"content_id": "1b147fa5f2d2176460890c3c3f1f4270d770fd16",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 8318,
"license_type": "no_license",
"max_line_length": 85,
"num_lines": 216,
"path": "/shiny-apps/controllers/clustering.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "# render plots and texts for clustering sidebar\n\n# pca ==========================================================================\noutput$pca_plot <- renderPlot(\n {\n par(mfrow = c(2, 2))\n if (input$pca == \"clinical notes\"){\n pca <- pca_note$x\n \n plot_pc1_pc2(pca, title = \"PCA Clustering\")\n a1 <- 8\n a2 <- 0.8\n a3 <- -2\n curve(a1 * x, from = 0, to = 0.4, add = TRUE, lty = 2)\n curve(a2 * x, from = 0, to = -0.4, add = TRUE, lty = 2)\n curve(a3 * x, from = 0, to = 0.4, add = TRUE, lty = 2)\n\n plot_pc1_pc2(pca, color = y_pred_note_pca, \n title = \"Identified Specialties\")\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n \n plot_pc1_pc2(pca, color = y_true, title = \"True Specialties\")\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n \n plot_pc1_pc2(pca, \n color = y_true == y_pred_note_pca, \n color_map = c(\"orange\", \"gray\"),\n pch = y_true,\n title = \"Compare Identified to True Specialties\")\n legend(\"bottomleft\", \n legend = c(\"Correct\", \"Wrong\"),\n col = c(\"gray\", \"orange\"),\n pch = 16,\n cex = 0.8)\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n pch = 0:2,\n cex = 0.8)\n \n } else if (input$pca == \"amazon medical entities\"){\n pca <- pca_amazon$x\n plot_pc1_pc2(pca, title = \"PCA Clustering\")\n a1 <- -0.8\n a2 <- 3\n a3 <- 0.8\n curve(a1 * x, from = 0, to = 0.7, add = TRUE, lty = 2)\n curve(a2 * x, from = 0, to = 0.6, add = TRUE, lty = 2)\n curve(a3 * x, from = 0, to = -0.4, add = TRUE, lty = 2)\n\n plot_pc1_pc2(pca, color = y_pred_amazon_pca, \n title = \"Identified Specialties\")\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n\n plot_pc1_pc2(pca, color = y_true, title = \"True Specialties\")\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n \n plot_pc1_pc2(pca, \n color = y_true == y_pred_amazon_pca, \n color_map = c(\"orange\", \"gray\"),\n pch = y_true,\n title = \"Compare Identified Prediction to True Specialties\")\n legend(\"bottomleft\", \n legend = c(\"Correct\", \"Wrong\"),\n col = c(\"gray\", \"orange\"),\n pch = 16,\n cex = 0.8)\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n pch = 0:2,\n cex = 0.8)\n }\n \n }\n)\n\n\n\n# hierarchical clustering =====================================================\n\noutput$dend_plot <- renderPlot(\n {\n if (input$dend == \"clinical notes\"){\n plot_dend(hc_note, title = \"Dendrogram colored with true specialties\")\n } else if (input$dend == \"amazon medical entities\"){\n plot_dend(hc_amazon, title = \"Dendrogram colored with true specialties\")\n }\n }\n)\n\noutput$hcluster_plot <- renderPlot(\n {\n par(mfrow = c(1, 2))\n if (input$dend == \"clinical notes\"){\n pca <- pca_note$x\n plot_pc1_pc2(pca, color = y_true, title = \"True Specialties\")\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n \n plot_pc1_pc2(pca, \n color = y_pred_note_hcluster, \n title = \"Identified Specialties\")\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n \n # plot_pc1_pc2(pca, \n # color = y_true == y_pred_note_hcluster, \n # color_map = c(\"orange\", \"gray\"),\n # pch = y_true,\n # title = \"Compare Prediction to True Specialties\")\n # legend(\"bottomleft\", \n # legend = c(\"Correct\", \"Wrong\"),\n # col = c(\"gray\", \"orange\"),\n # pch = 16,\n # cex = 0.8)\n # legend(\"bottomright\", \n # legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n # pch = 0:2,\n # cex = 0.8)\n } else if (input$dend == \"amazon medical entities\"){\n pca <- pca_amazon$x\n plot_pc1_pc2(pca, color = y_true, title = \"True Specialties\")\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n \n plot_pc1_pc2(pca, \n color = y_pred_amazon_hcluster, \n title = \"Identified Specialties\")\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n }\n }\n)\n\n\n# kmeans =======================================================================\noutput$kmeans_plot <- renderPlot(\n {\n par(mfrow = c(1, 2))\n if (input$kmeans == \"clinical notes\"){\n pca <- pca_note$x\n plot_pc1_pc2(pca, color = y_true, title = \"True Specialties\")\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n \n if (input$kmeans_pca == \"yes\"){\n y_pred <- y_pred_note_kmeans_pca\n } else {\n y_pred <- y_pred_note_kmeans\n }\n \n plot_pc1_pc2(pca, \n color = y_pred, \n title = \"Identified Specialties\")\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n \n } else {\n pca <- pca_amazon$x\n plot_pc1_pc2(pca, color = y_true, title = \"True Specialties\")\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n \n if (input$kmeans_pca == \"yes\"){\n y_pred <- y_pred_amazon_kmeans_pca\n } else {\n y_pred <- y_pred_amazon_kmeans\n }\n \n plot_pc1_pc2(pca, \n color = y_pred, \n title = \"Identified Specialties\")\n legend(\"bottomright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n col = c(\"red\", \"blue\", \"cyan\"), \n pch = 1,\n cex = 0.8)\n }\n }\n)\n\n\n\n"
},
{
"alpha_fraction": 0.6474423408508301,
"alphanum_fraction": 0.6644935011863708,
"avg_line_length": 25.945945739746094,
"blob_id": "6fc9958804b002ffb916b3970832d456f4552581",
"content_id": "ece131897aa5f45425de1574eca8c65e2bd4ae03",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 1994,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 74,
"path": "/machine-learning/binary_classification_tfidf_neural_network.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(tensorflow)\nlibrary(keras)\nlibrary(data.table)\nlibrary(magrittr)\nlibrary(stringr)\nsource(\"utilities.R\")\n\n# prepare data starting from medical note text\ndat <- fread(\"data/mtsample_gastroenterology_neurology.csv\") %>%\n .[, note := str_replace_all(note, \"\\\\.\", \"\\\\. \")] %>%\n .[, y := as.integer(factor(specialty)) - 1]\n\nnotes <- dat[, note]\n# initialize tokenizer specifing maximum words\ntk <- text_tokenizer(num_words = 3000)\n# update tk in place with a vector or list of documents\nfit_text_tokenizer(tk, notes)\n# convert the documents into a matrix of tfidf\nX <- texts_to_matrix(tk, notes, mode = \"tfidf\")\n# normalize the matrix so that length of each row vector is 1\nX <- X / sqrt(rowSums(X * X))\ny <- dat[, y]\n\n# split X and y into train and test\nset.seed(1234)\nin_train <- sample(1:nrow(X), round(0.7 * nrow(X)))\nin_test <- setdiff(1:nrow(X), in_train) %>%\n sample() # to shuffle the row numbers\nX_train <- X[in_train,]\ny_train <- y[in_train] \nX_test <- X[in_test,]\ny_test <- y[in_test]\n\n\nmodel <- keras_model_sequential() %>% \n # input layer\n layer_dense(256, input_shape = dim(X_train)[2], activation = \"relu\") %>%\n layer_dropout(0.2) %>% \n # second layer\n layer_dense(units = 128, activation = \"relu\") %>% \n layer_dropout(0.2) %>% \n # output layer\n layer_dense(1, activation = \"sigmoid\")\n\nsummary(model)\n\n# compile, fit, and evaluate model in place\ncompile(model,\n loss = \"binary_crossentropy\",\n optimizer = \"adam\",\n metrics = \"accuracy\"\n)\n\nfit(model,\n x = X_train, y = y_train,\n epochs = 10,\n batch_size = 32,\n validation_split = 0.3,\n verbose = 2\n)\n\nevaluate(model, X_test, y_test, verbose = 0)\n\n\n# get model metrics use custum defined function\npred <- predict(model, X_test)\nmetrics_binary(y_test, pred)\n\n\n\nsave_model_tf(object = model, filepath = \"trained_models/binary_tfidf_neural_network\")\n\nreloaded_model <- load_model_tf(\"trained_models/binary_tfidf_neural_network\")\nall.equal(predict(model,X), predict(reloaded_model, X))\n"
},
{
"alpha_fraction": 0.6385470628738403,
"alphanum_fraction": 0.6512972712516785,
"avg_line_length": 28.064655303955078,
"blob_id": "a37fb13766f0a579751c9de219895059e1783acc",
"content_id": "06d398a2ebb92c1c94f3dd10940c039fd90e63c0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 6745,
"license_type": "no_license",
"max_line_length": 131,
"num_lines": 232,
"path": "/machine-learning/multiclass_classification_tfidf_neural_network.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(tensorflow)\nlibrary(keras)\nsource(\"utilities.R\")\n\n# prepare data =================================================================\nspecialties <- c(\n \"Gastroenterology\", \"Obstetrics / Gynecology\", \"Cardiovascular / Pulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\ncols <- c(\"specialty\", \"note\")\ndat <- read_notes(\n \"data/mtsamples_multi_class.csv\", \n duplicate_rm = TRUE,\n specialties = specialties,\n cols_keep = cols,\n id = TRUE,\n y_label = TRUE\n)\nnotes <- dat$note\n\n\n# try out different nn construction ============================================\n# result:\n# simple two dense layer with drop out regularization works just fine\n\n# initialize tokenizer specifing maximum words\ntk <- text_tokenizer(num_words = 3000)\n# update tk in place with a vector or list of documents\nfit_text_tokenizer(tk, notes)\n# convert the documents into a matrix of tfidf\nX <- texts_to_matrix(tk, notes, mode = \"tfidf\")\n# normalize the matrix so that length of each row vector is 1\nX <- X / sqrt(rowSums(X * X))\n\n# for multiclass, y should be converted to a matrix \ny_class <- dat$y\nn_class <- length(unique(y_class))\ny <- to_categorical(y_class, n_class)\n\n# split X and y into train and test\nset.seed(1234)\nin_train <- caret::createDataPartition(y_class, p = 0.7, list = FALSE)\nX_train <- X[in_train,]\ny_train <- y[in_train,] \nX_test <- X[-in_train,]\ny_test <- y[-in_train]\ny_test_class <- y_class[-in_train]\n\n\nmodel <- keras_model_sequential() %>% \n # input layer\n layer_dense(32, input_shape = dim(X_train)[2], activation = \"relu\") %>%\n layer_dropout(0.2) %>% \n # second layer\n layer_dense(units = 16, activation = \"relu\") %>% \n layer_dropout(0.2) %>% \n # output layer\n layer_dense(n_class, activation = \"softmax\")\n\n#summary(model)\n\n# compile, fit, and evaluate model in place\ncompile(model,\n loss = \"categorical_crossentropy\",\n optimizer = \"adam\",\n metrics = \"accuracy\"\n)\n\nfit(model,\n x = X_train, y = y_train,\n epochs = 20,\n batch_size = 32,\n validation_split = 0.3,\n verbose = 3\n)\n\ny_pred <- predict(model, X_test)\ny_pred_class <- predict_classes(model, X_test)\ntable(y_test_class, y_pred_class)\n\n# one model tfidf ==============================================================\n# initialize tokenizer specifing maximum words\ntk <- text_tokenizer(num_words = 3000)\n# update tk in place with a vector or list of documents\nfit_text_tokenizer(tk, notes)\n# convert the documents into a matrix of tfidf\nX <- texts_to_matrix(tk, notes, mode = \"tfidf\")\n# normalize the matrix so that length of each row vector is 1\nX <- X / sqrt(rowSums(X * X))\n\n# for multiclass, y should be converted to a matrix \ny_class <- dat$y\nn_class <- length(unique(y_class))\ny <- to_categorical(y_class, n_class)\n\n# split X and y into train and test\nset.seed(11112)\nin_train <- caret::createDataPartition(y_class, p = 0.7, list = FALSE)\nX_train <- X[in_train,]\ny_train <- y[in_train,] \nX_test <- X[-in_train,]\ny_test <- y[-in_train]\ny_test_class <- y_class[-in_train]\n\n\nmodel <- keras_model_sequential() %>% \n # input layer\n layer_dense(32, input_shape = dim(X_train)[2], activation = \"relu\") %>%\n layer_dropout(0.2) %>% \n # second layer\n layer_dense(units = 16, activation = \"relu\") %>% \n layer_dropout(0.2) %>% \n # output layer\n layer_dense(n_class, activation = \"softmax\")\n\n#summary(model)\n\n# compile, fit, and evaluate model in place\ncompile(model,\n loss = \"categorical_crossentropy\",\n optimizer = \"adam\",\n metrics = \"accuracy\"\n)\n\nfit(model,\n x = X_train, y = y_train,\n epochs = 20,\n batch_size = 32,\n validation_split = 0.3,\n verbose = 3\n)\n\ny_pred <- predict(model, X_test)\ny_pred_class <- predict_classes(model, X_test)\ntable(y_test_class, y_pred_class)\n\nclasses_x <- c(\n \"Gastroenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nclasses_y <- c(\n \"Gastro-\\nenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nggplot_multiclass_nn_tfidf_recall <- plot_confusion_matrix(y_test_class, y_pred_class, classes_x, classes_y)\nggplot_multiclass_nn_tfidf_precision <- plot_confusion_matrix(y_test_class, y_pred_class, classes_x, classes_y, type = \"precision\")\n\naccuracy_nn_tfidf <- accuracy(y_test_class, y_pred_class)\n\n\n\n\n# one model pca ==============================================================\n\n# initialize tokenizer specifing maximum words\ntk <- text_tokenizer(num_words = 3000)\n# update tk in place with a vector or list of documents\nfit_text_tokenizer(tk, notes)\n# convert the documents into a matrix of tfidf\nX <- texts_to_matrix(tk, notes, mode = \"tfidf\")\n# normalize the matrix so that length of each row vector is 1\nX <- X / sqrt(rowSums(X * X))\n\nX <- prcomp(X)$x[, 1:25]\n\n# for multiclass, y should be converted to a matrix \ny_class <- dat$y\nn_class <- length(unique(y_class))\ny <- to_categorical(y_class, n_class)\n\n# split X and y into train and test\nset.seed(11111)\nin_train <- caret::createDataPartition(y_class, p = 0.7, list = FALSE)\nX_train <- X[in_train,]\ny_train <- y[in_train,] \nX_test <- X[-in_train,]\ny_test <- y[-in_train]\ny_test_class <- y_class[-in_train]\n\n\nmodel <- keras_model_sequential() %>% \n # input layer\n layer_dense(32, input_shape = dim(X_train)[2], activation = \"relu\") %>%\n layer_dropout(0.2) %>% \n # second layer\n layer_dense(units = 16, activation = \"relu\") %>% \n layer_dropout(0.2) %>% \n # output layer\n layer_dense(n_class, activation = \"softmax\")\n\nsummary(model)\n\n# compile, fit, and evaluate model in place\ncompile(model,\n loss = \"categorical_crossentropy\",\n optimizer = \"adam\",\n metrics = \"accuracy\"\n)\n\nfit(model,\n x = X_train, y = y_train,\n epochs = 30,\n batch_size = 32,\n validation_split = 0.3,\n verbose = 3\n)\n\ny_pred <- predict(model, X_test)\ny_pred_class <- predict_classes(model, X_test)\ntable(y_test_class, y_pred_class)\n\nclasses_x <- c(\n \"Gastroenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nclasses_y <- c(\n \"Gastro-\\nenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nggplot_multiclass_nn_pca_recall <- plot_confusion_matrix(y_test_class, y_pred_class, classes_x, classes_y)\nggplot_multiclass_nn_pca_precision <- plot_confusion_matrix(y_test_class, y_pred_class, classes_x, classes_y, type = \"precision\")\n\naccuracy_nn_pca <- accuracy(y_test_class, y_pred_class)\n\n\n\n\n# save for shiny\nsave(ggplot_multiclass_nn_pca_recall, ggplot_multiclass_nn_pca_precision,\n ggplot_multiclass_nn_tfidf_recall, ggplot_multiclass_nn_tfidf_precision,\n accuracy_nn_tfidf, accuracy_nn_pca,\n file = \"shiny-apps/RData/ggplot_multiclass_nn.RData\")\n\n\n"
},
{
"alpha_fraction": 0.5719777345657349,
"alphanum_fraction": 0.5854314565658569,
"avg_line_length": 27.740331649780273,
"blob_id": "822c449cb3cc6dc8c071d5d2b6b14a936cd3a3b5",
"content_id": "57252d5bbba53d72f86643ca1b7904df9f6bce9a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 5203,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 181,
"path": "/machine-learning/multiclass_classification_tfidf_xgboost.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(xgboost)\nsource(\"utilities.R\")\n\n# prepare data =================================================================\nspecialties <- c(\n \"Gastroenterology\", \"Obstetrics / Gynecology\", \"Cardiovascular / Pulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\ncols <- c(\"specialty\", \"note\")\nset.seed(1234)\ndat <- read_notes(\n \"data/mtsamples_multi_class.csv\", \n duplicate_rm = TRUE,\n specialties = specialties,\n cols_keep = cols,\n id = TRUE,\n y_label = TRUE\n)\ntfidf <- tfidf_tm(dat$note)\ny <- dat$y\nn_class <- length(unique(y))\n\n\n\n# hyper-parameters tuning ======================================================\n# results: optimal paramters for both tfidf and pca25:\n# max_depth = 5\n# eta = 0.4\n# nround = 25\n\n# run only if need pca\ntfidf <- prcomp(tfidf)$x[, 1:25]\nX <- tfidf\n\nset.seed(12345)\nin_train <- caret::createDataPartition(y, p = 0.7, list = FALSE)\n\nX_train <- X[in_train,]\nX_test <- X[-in_train,]\n\ny_train <- y[in_train]\ny_test <- y[-in_train]\n\nn_rep <- 100\n# placeholder for hyperparamters and metrics\nparam_df = data.frame(max_depth = numeric(n_rep),\n eta = numeric(n_rep),\n metrics = numeric(n_rep),\n nround = integer(n_rep))\nmetrics_cv <- vector(\"list\", n_rep)\n\npb <- progress_bar$new(total = n_rep)\nfor (i in 1:n_rep) {\n pb$tick()\n maxdepth <- sample(2:10, 1)\n eta <- runif(1, 0.1, 0.5)\n \n param <- list(objective = \"multi:softmax\",\n num_class = n_class,\n eval_metric = \"merror\", # check for metric for multiclass\n max_depth = maxdepth,\n eta = eta\n )\n cv_nround = 30\n cv_nfold = 3\n xgb_cv <- xgb.cv(data=X_train, label = y_train, params = param, \n nthread=3, nfold=cv_nfold, nrounds=cv_nround,\n verbose = TRUE, early_stop_round=10, maximize=FALSE)\n \n cv_metrics <- xgb_cv$evaluation_log %>% \n set_colnames(c(\"iter\", \"train_mean\", \"train_std\", \"test_mean\", \"test_std\"))\n \n if(as.data.frame(param)[1, \"eval_metric\"] %in% c(\"auc\")){\n best_metric = max(cv_metrics[, test_mean])\n best_metric_round = which.max(cv_metrics[, test_mean])\n } else {\n best_metric = min(cv_metrics[, test_mean])\n best_metric_round = which.min(cv_metrics[, test_mean])\n }\n \n param_df[i, ] <- c(maxdepth, eta, best_metric, best_metric_round)\n metrics_cv[[i]] <- cv_metrics\n}\n\n# plot to find best parameters\nplot(param_df$max_depth, param_df$metrics)\nplot(param_df$eta, param_df$metrics)\nplot(param_df$nround, param_df$metrics)\n\n\n\n\n# one model: tfidf =============================================================\nX <- tfidf\n\nset.seed(11111)\nin_train <- caret::createDataPartition(y, p = 0.7, list = FALSE)\n\nX_train <- X[in_train,]\nX_test <- X[-in_train,]\n\ny_train <- y[in_train]\ny_test <- y[-in_train]\nparam <- list(objective = \"multi:softmax\",\n num_class = n_class,\n eval_metric = \"merror\",\n max_depth = 5,\n eta = 0.4\n)\nxgb <- xgboost(data = X_train, \n label = y_train, \n params = param,\n nthread = 3, \n nrounds = 25)\n\ny_pred <- predict(xgb, X_test)\ntable(y_test, y_pred)\n\nclasses_x <- c(\n \"Gastroenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nclasses_y <- c(\n \"Gastro-\\nenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nggplot_multiclass_xgb_tfidf_recall<- plot_confusion_matrix(y_test, y_pred, classes_x, classes_y)\nggplot_multiclass_xgb_tfidf_precision <- plot_confusion_matrix(y_test, y_pred, classes_x, classes_y, type = \"precision\")\n\naccuracy_xgb_tfidf <- accuracy(y_test, y_pred)\n\n\n\n\n# one model: pca =============================================================\nX <- prcomp(tfidf)$x[, 1:25]\n\nset.seed(11111)\nin_train <- caret::createDataPartition(y, p = 0.7, list = FALSE)\n\nX_train <- X[in_train,]\nX_test <- X[-in_train,]\n\ny_train <- y[in_train]\ny_test <- y[-in_train]\nparam <- list(objective = \"multi:softmax\",\n num_class = n_class,\n eval_metric = \"merror\",\n max_depth = 5,\n eta = 0.4\n)\nxgb <- xgboost(data = X_train, \n label = y_train, \n params = param,\n nthread = 3, \n nrounds = 25)\n\ny_pred <- predict(xgb, X_test)\ntable(y_test, y_pred)\n\nclasses_x <- c(\n \"Gastroenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nclasses_y <- c(\n \"Gastro-\\nenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nggplot_multiclass_xgb_pca_recall <- plot_confusion_matrix(y_test, y_pred, classes_x, classes_y)\nggplot_multiclass_xgb_pca_precision <- plot_confusion_matrix(y_test, y_pred, classes_x, classes_y, type = \"precision\")\n\naccuracy_xgb_pca <- accuracy(y_test, y_pred)\n\n\n\n\n# save for shiny\nsave(ggplot_multiclass_xgb_pca_recall, ggplot_multiclass_xgb_pca_precision,\n ggplot_multiclass_xgb_tfidf_recall, ggplot_multiclass_xgb_tfidf_precision,\n accuracy_xgb_tfidf, accuracy_xgb_pca,\n file = \"shiny-apps/RData/ggplot_multiclass_xgb.RData\")\n\n"
},
{
"alpha_fraction": 0.6199203133583069,
"alphanum_fraction": 0.6398406624794006,
"avg_line_length": 27.202247619628906,
"blob_id": "518e50da574e4e708c4d43a6e59b076fe824df88",
"content_id": "6737ba5f315bab01cdb5aea154a5e2b829d416f8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 2510,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 89,
"path": "/machine-learning/binary_classification_embedding_neural_network.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(tensorflow)\nlibrary(keras)\nlibrary(data.table)\nlibrary(magrittr)\nlibrary(stringr)\nsource(\"utilities.R\")\n\n\n# prepare data starting from medical note text\ndat <- fread(\"data/mtsample_gastroenterology_neurology.csv\") %>%\n .[, note := str_replace_all(note, \"\\\\.\", \"\\\\. \")] %>%\n .[, y := as.integer(factor(specialty)) - 1]\n\nnotes <- dat[, note]\n# initialize tokenizer specifing maximum words\nmax_words <- 3000\ntk <- text_tokenizer(num_words = max_words)\n# update tk in place with a vector or list of documents\nfit_text_tokenizer(tk, notes)\n# convert the documents into a list of sequence\nX <- texts_to_sequences(tk, notes)\n# # examine sequence length, the longest is 2471, mean 430\n# len <- sapply(X, function(x) length(x))\n# summary(len)\n# pad the sequence to get a matrix\nseq_length <- 500\nX <- pad_sequences(X, seq_length)\ny <- dat[, y]\n\n# split X and y into train and test\nset.seed(1234)\nin_train <- sample(1:nrow(X), round(0.7 * nrow(X)))\nin_test <- setdiff(1:nrow(X), in_train) %>%\n sample() # to shuffle the row numbers\nX_train <- X[in_train,]\ny_train <- y[in_train] \nX_test <- X[in_test,]\ny_test <- y[in_test]\n\ndim_emb <- 32\n\nmodel <- keras_model_sequential() %>% \n # input layer\n layer_embedding(input_dim = max_words, \n output_dim = dim_emb, \n input_length = seq_length) %>%\n layer_dropout(0.2) %>%\n layer_conv_1d(filters = 256, \n kernel_size = 3,\n activation = \"relu\",\n padding = \"valid\",\n strides = 1) %>%\n layer_dropout(0.2) %>%\n layer_global_average_pooling_1d() %>%\n layer_dense(units = 128, activation = \"relu\") %>% \n layer_dropout(0.2) %>%\n # output layer\n layer_dense(1, activation = \"sigmoid\")\n\nsummary(model)\n\n# compile, fit, and evaluate model in place\ncompile(model,\n loss = \"binary_crossentropy\",\n optimizer = \"adam\",\n metrics = \"accuracy\"\n)\n\nfit(model,\n x = X_train, y = y_train,\n epochs = 20,\n batch_size = 32,\n validation_split = 0.3,\n verbose = 2\n)\n\nevaluate(model, X_test, y_test, verbose = 0)\n\n\n# get model metrics use custum defined function\npred <- predict(model, X_test)\nmetrics_binary(y_test, pred)\nplot(y_test, pred)\n\n\n# save tensorflow model, different from traditional ML mode ====================\nsave_model_tf(object = model, filepath = \"trained_models/binary_embedding_neural_network\")\nreloaded_model <- load_model_tf(\"trained_models/binary_embedding_neural_network\")\nall.equal(predict(model,X), predict(reloaded_model, X))\n"
},
{
"alpha_fraction": 0.8029850721359253,
"alphanum_fraction": 0.8029850721359253,
"avg_line_length": 66,
"blob_id": "cce174b539c14d8b4ea96cbdb34a4d03fb70b6d5",
"content_id": "f3b42b7d92aac8e8552084a7211f51ceee197141",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "RMarkdown",
"length_bytes": 335,
"license_type": "no_license",
"max_line_length": 314,
"num_lines": 5,
"path": "/shiny-apps/Rmd/medical_named_entity_intro.Rmd",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "---\ntitle: \" \"\n---\n\nOne central task of clinical text mining is medical named entity recognition. In this section, we use two methods to extract medical named entities from clinical notes and visually represent the extractions with wordcloud, alongside top words picked by term frequency and term frequency inverse document frequency.\n"
},
{
"alpha_fraction": 0.7379912734031677,
"alphanum_fraction": 0.7379912734031677,
"avg_line_length": 44.79999923706055,
"blob_id": "2bdd9cfde7977e108b6405639aae984dccdc8cdd",
"content_id": "ead929a4c6725f186003c2d827005db970957ba6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "RMarkdown",
"length_bytes": 229,
"license_type": "no_license",
"max_line_length": 208,
"num_lines": 5,
"path": "/shiny-apps/Rmd/clinical_note_intro.Rmd",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "---\ntitle: \" \"\n---\n\nThis section answers what clinical note is, why it is important, and what the challenges are. We will also look into the details of clinical notes using samples at [mtsamples.com](https://www.mtsamples.com/).\n"
},
{
"alpha_fraction": 0.6426193118095398,
"alphanum_fraction": 0.648168683052063,
"avg_line_length": 30.418603897094727,
"blob_id": "dfb549fc601dca49cba4d48848ad976f0737624b",
"content_id": "fb3442168e2c73f47b17665e3067ba7a3dd09209",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 2703,
"license_type": "no_license",
"max_line_length": 91,
"num_lines": 86,
"path": "/machine-learning/multiclass_classification_generate_train_test_tfidf_pca_model.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(text2vec)\nsource(\"utilities.R\")\n\n# prepare data =================================================================\nspecialties <- c(\n \"Gastroenterology\", \"Obstetrics / Gynecology\", \"Cardiovascular / Pulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\ncols <- c(\"specialty\", \"note\")\nset.seed(1234)\ndat <- read_notes(\n \"data/mtsamples_multi_class.csv\", \n duplicate_rm = T,\n specialties = specialties,\n cols_keep = cols,\n id = TRUE,\n y_label = TRUE\n)\ny <- dat$y \nnotes <- dat$note\n\n\n# train-test split =============================================================\n# check functions in utilities.R under text2vec\nin_train <- caret::createDataPartition(y, p = 0.7, list = FALSE)\n\ntrain <- notes[in_train]\ntest <- notes[-in_train]\ntrain_y <- y[in_train]\ntest_y <- y[-in_train]\n\ntrain_vocab <- get_vocab(train) \ntrain_vocab <- train_vocab[str_detect(train_vocab$term, \"^[a-z]{2,}$\"),]\n\ntrain_vectorizer <- get_vectorizer(train_vocab)\ntrain_dtm <- get_dtm(train, train_vectorizer)\ntest_dtm <- get_dtm(test, train_vectorizer)\n\ntfidf_model <- fit_tfidf(train_dtm)\ntrain_tfidf <- transform_tfidf(train_dtm, tfidf_model)\ntest_tfidf <- transform_tfidf(test_dtm, tfidf_model)\n\npca_model <- fit_pca(train_tfidf)\ntrain_pca <- predict(pca_model, train_tfidf)\ntest_pca <- predict(pca_model, test_tfidf)\n\n\n# save data for multiclass classification and model deployment =================\nsave(train_tfidf, test_tfidf,\n train_pca, test_pca,\n train_y, test_y,\n train_vectorizer, tfidf_model, pca_model,\n file = \"machine-learning/multiclass_classification_train_test_tfidf_pca_models.RData\")\n\nsave(train_vectorizer, tfidf_model, pca_model,\n file = \"shiny-apps/RData/multiclass_classification_tfidf_pca_models.RData\")\n\n\n# create and saved model for deployment ========================================\nload(\"machine-learning/multiclass_classification_train_test_tfidf_pca_models.RData\")\n\nX_train <- train_pca[, 1:25]\nX_test <- test_pca[, 1:25]\n\ny_train <- as.factor(train_y)\ny_test <- as.factor(test_y)\n\nsvm_model_deploy <- svm(X_train, y_train)\ny_pred <- predict(svm_model_deploy, X_test)\n\nclasses_x <- c(\n \"Gastroenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nclasses_y <- c(\n \"Gastro-\\nenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\n\nplot_confusion_matrix(y_test, y_pred, classes_x, classes_y, type = \"recall\")\nplot_confusion_matrix(y_test, y_pred, classes_x, classes_y, type = \"precision\")\n\naccuracy_svm_tfidf <- accuracy(y_test, y_pred)\n\nsaveRDS(svm_model_deploy, \n file = \"shiny-apps/trained_models/svm_model_deploy.rds\")\n\n"
},
{
"alpha_fraction": 0.5911389589309692,
"alphanum_fraction": 0.599838137626648,
"avg_line_length": 30.88387107849121,
"blob_id": "46c179279cd7a71a05cfa3b7e575e9892e4f5075",
"content_id": "e17050f441c76b85f9c65616497857e10de2376a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 4943,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 155,
"path": "/machine-learning/clustering_hcluster_three_classes.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(dendextend)\n\nsource(\"utilities.R\")\n\n# # Prepare data =================================================================\n# dat <- read_notes(\"data/amazon_medacy_mtsamples_gastr_neuro_urolo.csv\",\n# cols_keep = c(\"id\", \"amazon_me\", \"specialty\", \"note\"),\n# clean = TRUE,\n# y_label = TRUE)\n# \n# # tfidf matrix\n# tfidf_note <- tfidf_tm(dat$note)\n# tfidf_amazon <- tfidf_tm(dat$amazon_me)\n# y_true <- dat$y\n\n# load the same data for pca, kmeans, and hcluster\nload(\"shiny-apps/RData/pca_note_amazon_gas_neu_uro.RData\")\n\n\n\n# hierarchical clustering note =================================================\n# https://cran.r-project.org/web/packages/textmineR/vignettes/b_document_clustering.html\n# https://uc-r.github.io/hc_clustering \ntfidf = tfidf_note\ncos_sim <- tfidf %*% t(tfidf)\ndist <- as.dist(1 - cos_sim)\n# ward.D and ward.D2 are good for clustering, slight difference\nhc_note <- hclust(dist, \"ward.D\")\n#hc <- hclust(dist, \"ward.D2\") # one more correct\n# all below not good\n# hc <- hclust(dist, \"single\")\n# hc <- hclust(dist, \"complete\")\n# hc <- hclust(dist, \"average\")\n# hc <- hclust(dist, \"mcquitty\")\n# hc <- hclust(dist, \"median\")\n# hc <- hclust(dist, \"centroid\")\n\n\n# plot dendrogram\n# https://cran.r-project.org/web/packages/dendextend/vignettes/FAQ.html#introduction\ndend <- as.dendrogram(hc_note)\n\n# use true y to assign color\nsample_colors <- rep(character(0), nrow(tfidf))\nsample_colors[y_true == 0] <- \"red\"\nsample_colors[y_true == 1] <- \"blue\"\nsample_colors[y_true == 2] <- \"cyan\"\n\ndend <- assign_values_to_leaves_edgePar(\n dend=dend, \n value = sample_colors[order.dendrogram(dend)], \n edgePar = \"col\"\n)\npar(mar = c(0, 0, 2, 0))\nplot(dend, main = \"Medical Notes Clustering\",\n leaflab = \"none\", yaxt = \"none\")\nlegend(\"topright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n cex = 0.8,\n lty = 1,\n col = c(\"red\", \"blue\", \"orange\"),\n bty = \"n\")\nrect.hclust(hc_note, 3, border = \"lightgreen\")\ndev.off()\n\n# confusion matrix\ny_cluster_note_hcluster <- cutree(hc_note, 3)\n\ny_pred_note_hcluster <- best_match(y_true, y_cluster_note_hcluster)\n\n\n# hierarchical clustering amazon =================================================\n# https://cran.r-project.org/web/packages/textmineR/vignettes/b_document_clustering.html\n# https://uc-r.github.io/hc_clustering \ntfidf = tfidf_amazon\ncos_sim <- tfidf %*% t(tfidf)\ndist <- as.dist(1 - cos_sim)\n# ward.D and ward.D2 are good for clustering, slight difference\nhc_amazon <- hclust(dist, \"ward.D\")\n#hc <- hclust(dist, \"ward.D2\") # one more correct\n# all below not good\n# hc <- hclust(dist, \"single\")\n# hc <- hclust(dist, \"complete\")\n# hc <- hclust(dist, \"average\")\n# hc <- hclust(dist, \"mcquitty\")\n# hc <- hclust(dist, \"median\")\n# hc <- hclust(dist, \"centroid\")\n\n\n# plot dendrogram\n# https://cran.r-project.org/web/packages/dendextend/vignettes/FAQ.html#introduction\ndend <- as.dendrogram(hc_amazon)\n\n# use true y to assign color\nsample_colors <- rep(character(0), nrow(tfidf))\nsample_colors[y_true == 0] <- \"red\"\nsample_colors[y_true == 1] <- \"blue\"\nsample_colors[y_true == 2] <- \"cyan\"\n\ndend <- assign_values_to_leaves_edgePar(\n dend=dend, \n value = sample_colors[order.dendrogram(dend)], \n edgePar = \"col\"\n)\npar(mar = c(0, 0, 2, 0))\nplot(dend, main = \"Medical Notes Clustering\",\n leaflab = \"none\", yaxt = \"none\")\nlegend(\"topright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n cex = 0.8,\n lty = 1,\n col = c(\"red\", \"blue\", \"orange\"),\n bty = \"n\")\nrect.hclust(hc_amazon, 3, border = \"lightgreen\")\ndev.off()\n\n# confusion matrix\ny_cluster_amazon_hcluster <- cutree(hc_amazon, 3)\n\ny_pred_amazon_hcluster <- best_match(y_true, y_cluster_amazon_hcluster)\n\n\nsave(hc_note, y_cluster_note_hcluster, y_pred_note_hcluster,\n hc_amazon, y_cluster_amazon_hcluster, y_pred_amazon_hcluster,\n file = \"shiny-apps/RData/hcluster_results.RData\")\n\n# plot_dend ====================================================================\nplot_dend <- function(hc){\n # plot dendgram of hierarchical clustering\n # hc: hclust created with function hc <- hclust(dist, \"ward.D\")\n dend <- as.dendrogram(hc)\n \n # use true y to assign color\n sample_colors <- rep(NA, length(hc$labels))\n sample_colors[y_true == 0] <- \"red\"\n sample_colors[y_true == 1] <- \"blue\"\n sample_colors[y_true == 2] <- \"orange\"\n \n dend <- assign_values_to_leaves_edgePar(\n dend=dend, \n value = sample_colors[order.dendrogram(dend)], \n edgePar = \"col\"\n )\n \n par(mar = c(0, 0, 2, 0))\n plot(dend, main = \"Medical Notes Clustering\",\n leaflab = \"none\", yaxt = \"none\")\n legend(\"topright\", \n legend = c(\"Gastroenterology\", \"Neurology\", \"Urology\"),\n cex = 0.8,\n lty = 1,\n col = c(\"red\", \"blue\", \"orange\"),\n bty = \"n\")\n rect.hclust(hc, 3, border = \"lightgreen\")\n}\n\n"
},
{
"alpha_fraction": 0.7663755416870117,
"alphanum_fraction": 0.7729257345199585,
"avg_line_length": 49.77777862548828,
"blob_id": "26eb093543fcdf274ca8430c7f9cba7185e8b31a",
"content_id": "f3237dfeedad94d3ae0a3383ba8759cbce47a538",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "INI",
"length_bytes": 458,
"license_type": "no_license",
"max_line_length": 313,
"num_lines": 9,
"path": "/python/.spyproject/workspace.ini",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "[workspace]\nrestore_data_on_startup = True\nsave_data_on_exit = True\nsave_history = True\nsave_non_project_files = False\n\n[main]\nversion = 0.1.0\nrecent_files = ['/home/gl/Dropbox/work-with-health-data/clinical_notes/python/utilities.py', '/home/gl/Dropbox/work-with-health-data/clinical_notes/python/add_amazon_medacy_mes_to_mtsamples.py', '/home/gl/Dropbox/work-with-health-data/clinical_notes/python/embedding_matrix_of_clinical_notes_using_biowordvec.py']\n\n"
},
{
"alpha_fraction": 0.6502782702445984,
"alphanum_fraction": 0.6502782702445984,
"avg_line_length": 34.93333435058594,
"blob_id": "6a19700d3e63a238c929382fccd3158ab186c9a7",
"content_id": "29b05934f14a5bcc57f2d8314c09f7614f7a1af3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 1078,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 30,
"path": "/create_bog_of_words.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "source(\"utilities.R\")\n\n# all mt samples as scraped ====================================================\nmtsamples_all <- fread(\"./data/mtsamples_scraped.csv\")\n\nsave(mtsamples_all, file = \"./shiny-apps/RData/mtsamples_all.RData\")\n\n\n# word statisctics including n_documents, n_time, avg_tf, avg_tfidf ============\n# only for three selected specialties \nnotes <- fread(\"data/mtsamples_gastroenterology_neurology_urology.csv\")\ntfidf_list <- top_tfidf(notes, \"note\") \nword_stats <- tfidf_list[[\"word_stats\"]]\n\nsave(word_stats, file = \"shiny-apps/RData/word_stats.RData\")\n\n\n# add amazon_me, medacy_me, top_tf and top_tfidf ===============================\n# to the three selected specialties\ntfidf <- tfidf_list[[\"tfidf\"]] %>%\n setDT()\n\n# get medical entities created by Amazon Comprehend Medical and medaCy\namazon_medacy <- fread(\"data/amazon_medacy_mtsamples_gastr_neuro_urolo.csv\") %>%\n .[, .(id, amazon_me, medacy_me)]\n\n# combine all for shiny server to save to RData\nnote_bows <- amazon_medacy[tfidf, on = \"id\"]\n\nsave(note_bows, file = \"./shiny-apps/RData/note_bows.RData\")\n"
},
{
"alpha_fraction": 0.5015087723731995,
"alphanum_fraction": 0.513276994228363,
"avg_line_length": 31.17475700378418,
"blob_id": "b5dd3f4ba3ffb877af35b7b8b4b93c1798c64c24",
"content_id": "ba9b2c1c04c8b1aa1db041ffc7450066c35ca164",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 3314,
"license_type": "no_license",
"max_line_length": 83,
"num_lines": 103,
"path": "/machine-learning/binary_classification_tfidf_xgboost.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(xgboost)\nlibrary(data.table)\nlibrary(magrittr)\n#library(caret)\nlibrary(progress)\nlibrary(stringr)\nsource(\"utilities.R\")\n\n# prepare data =================================================================\n\ndat <- fread(\"data/mtsample_gastroenterology_neurology.csv\") %>%\n .[, note := str_replace_all(note, \"\\\\.\", \"\\\\. \")] %>%\n .[, y := as.integer(factor(specialty)) - 1]\nX <- tfidf_tm(dat$note)\ny <- dat$y\n\nset.seed(1234)\nin_train <- sample(1:nrow(X), round(0.7 * nrow(X)))\nin_test <- setdiff(1:nrow(X), in_train) %>%\n sample()\nX_train <- X[in_train,]\ny_train <- y[in_train]\nX_test <- X[in_test,]\ny_test <- y[in_test]\n\n# parameter tuning =============================================================\nn_rep <- 100\nparam_df = data.frame(max_depth = numeric(n_rep),\n eta = numeric(n_rep),\n seed_number = integer(n_rep),\n best_metric = numeric(n_rep),\n best_metric_round = integer(n_rep))\nmetrics_cv <- vector(\"list\", n_rep)\n\nbest_metric <- Inf # when metric is the smaller the better\npb <- progress_bar$new(total = n_rep)\n\nfor (i in 1:n_rep) {\n pb$tick()\n param <- list(objective = \"binary:logistic\",\n eval_metric = \"error\",\n max_depth = sample(2:10, 1),\n eta = runif(1, 0.1, 0.5)\n )\n cv_nround = 100\n cv_nfold = 3 # small sample size\n seed_number = sample.int(10000, 1)\n set.seed(seed_number)\n xgb_cv <- xgb.cv(data=X_train, label = y_train, params = param, \n nthread=3, nfold=cv_nfold, nrounds=cv_nround,\n verbose = FALSE, maximize=FALSE)\n \n cv_metrics <- xgb_cv$evaluation_log %>% \n set_colnames(c(\"iter\", \"train_mean\", \"train_std\", \"test_mean\", \"test_std\"))\n \n # When eval_metric is the larger the better\n if(as.data.frame(param)[1, \"eval_metric\"] %in% c(\"auc\", \"map\")){\n metric <- max(cv_metrics[, test_mean])\n metric_round <- which.max(cv_metrics[, test_mean])\n if (metric > best_metric) {\n best_metric <- metric\n best_metric_round <- metric_round\n best_seednumber <- seed_number\n best_param <- param\n }\n # when eval_matric is the smaller the better\n } else {\n metric <- min(cv_metrics[, test_mean])\n metric_round <- which.min(cv_metrics[, test_mean])\n if (metric < best_metric) {\n best_metric <- metric\n best_metric_round <- metric_round\n best_seednumber <- seed_number\n best_param <- param\n }\n }\n \n \n param_df[i, ] <- c(as.data.frame(param)[1, \"max_depth\"],\n as.data.frame(param)[1, \"eta\"],\n seed_number,\n metric,\n metric_round)\n metrics_cv[[i]] <- cv_metrics\n}\n\n\n\n# final training ===============================================================\nset.seed(best_seednumber)\nxgb <- xgboost(data = X_train, \n label = y_train, \n params = best_param,\n nthread = 3, \n nrounds = best_metric_round)\n\n\npred <- predict(xgb, X_test)\npred_class <- as.integer(pred > 0.5)\nmetrics_binary(y_test, pred)\n\n\nsaveRDS(xgb, file = \"trained_models/binary_classification_tfidf_xgboost.rda\")\n"
},
{
"alpha_fraction": 0.688753604888916,
"alphanum_fraction": 0.698782205581665,
"avg_line_length": 32.238094329833984,
"blob_id": "e5bb2332eaee9768a2624bb007dba080eacbc17c",
"content_id": "bada53dcb1b9409449e58137208ed751944efcad",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2792,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 84,
"path": "/python/embedding_matrix_of_clinical_notes_using_biowordvec.py",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "\"\"\"\nGenerate word embedding matrix of clinical notes using pre-trained BioWordVec.\n\nThe word embedding matrix is saved as csv file for transfer learning in R.\n\"\"\"\n#%% load packages\nfrom gensim.models import KeyedVectors\nimport numpy as np\nimport pandas as pd\nimport tensorflow as tf\nfrom tensorflow import keras\nfrom tensorflow.keras.preprocessing.text import Tokenizer\n\n\n#%% load biowordvec\n# use limit to set the amount of top words to read. Far exceed 16GB memory if read all.\n# loading the top 5M words takes 6.5G memory, comfortable for my 16GB laptop\ndir_biowordvec = \"/home/gl/data/pre-trained-word-embedding/bio-word-vector/\"\nmodel = KeyedVectors.load_word2vec_format(\n fname=dir_biowordvec + \"BioWordVec_PubMed_MIMICIII_d200.vec.bin\", \n binary=True, \n limit=int(5E6) \n)\n\n# check the words in the model\nmodel_words = model.vocab.keys()\n\n\n#%% get pretrained embedding matrix of tokens of gas-neu-uro\n# we will use keras to tokenize the note as later on they are used by keras\n\ndef get_embedding_matrix(corpus, max_words=5000, fname=None):\n \"\"\"\n Get embedding matrix of corpus using BioWordVec\n\n Parameters\n ----------\n corpus: string iterable\n max_words: int, number of top frequent words to keep\n fname: str, file path if want to save the martix to a text file\n\n Return\n ------\n No return but a text file may be saved\n \"\"\"\n tk = Tokenizer(5000) # keep top 5000 to save space\n tk.fit_on_texts(corpus)\n word_index = tk.word_index\n tokens = list(word_index.keys())\n\n # get the matrix of my own words\n # as np.zeros if a word is not in the model vocabulary\n\n token_embeddings = []\n not_in_model = []\n for wd in tokens:\n if wd in model_words:\n token_embeddings.append(model.get_vector(wd))\n else:\n token_embeddings.append(np.zeros(200))\n not_in_model.append(wd)\n print(f\"{len(not_in_model)} tokens are not in the model's vocab\")\n print(\"These tokens are asigned array of zeros. They are: \")\n print(not_in_model) \n \n token_df = pd.DataFrame(token_embeddings) \n token_df.index = tokens\n\n # save my_embeddings as text file\n if fname is not None:\n token_df.to_csv(fname, header=False)\n \n\nmt = pd.read_csv(\"data/amazon_medacy_mtsamples_gastr_neuro_urolo.csv\")\n# Get pretrained embedding matrix of original notes\n# clean to be consistent with R read_notes\nnotes = mt.note.str.replace(\".\", \". \").drop_duplicates(False)\nfile_name = \"../data/gas_neu_uro_token_embeddings_note.csv\"\nget_embedding_matrix(notes, fname=file_name)\n\n# get pretrained embedding matrix of Amazon medical entities\namzn = mt.amazon_me.drop_duplicates(False)\nfile_name = \"../data/gas_neu_uro_token_embeddings_amazon.csv\"\nget_embedding_matrix(amzn, fname=file_name)\n"
},
{
"alpha_fraction": 0.32850512862205505,
"alphanum_fraction": 0.3352552354335785,
"avg_line_length": 31.845266342163086,
"blob_id": "3708980865cfa9ce6f98872a332c063c8aa5c361",
"content_id": "48300b8783f61aecddfda7cbcdf167c7079b26ee",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 14222,
"license_type": "no_license",
"max_line_length": 78,
"num_lines": 433,
"path": "/shiny-apps/ui.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(shiny)\nlibrary(shinydashboard)\n\ndashboardPage(\n dashboardHeader(title = \"Clinical Note Analysis\"),\n dashboardSidebar(\n sidebarMenu(\n # sidebar menu =====================================================\n menuItem(\"Overview\", \n tabName = \"overview\", \n icon = icon(\"home\")),\n menuItem(\"Clinical Notes\", \n tabName = \"clinical_note\", \n icon = icon(\"table\")),\n menuItem(\"Medical Named Entities\", \n tabName = \"medical_entity\", \n icon = icon(\"table\")),\n menuItem(\"Clustering\", \n tabName = \"cluster\", \n icon = icon(\"brain\")),\n menuItem(\"Classification\", \n tabName = \"classification\", \n icon = icon(\"brain\"))\n )\n ),\n dashboardBody(\n # modify css ==========================================================\n tags$head(tags$style(includeCSS(\"asset/custom.css\"))),\n \n tabItems(\n # overview ====\n tabItem(\n \"overview\",\n includeMarkdown(\"Rmd/overview.Rmd\")\n ),\n \n # clinical note ====================================================\n tabItem(\n \"clinical_note\",\n h1(\"What are Clinical Notes\"),\n includeMarkdown(\"Rmd/clinical_note_intro.Rmd\"),\n br(),\n \n tabsetPanel(\n # .. clinical notes ====\n tabPanel(\n \"Introduction\", \n includeMarkdown(\"./Rmd/clinical_note_introduction.Rmd\"),\n \n br(),\n tags$a(\n href=\"#top\", \"Go to Top\",\n class = \"go-to-top\"\n )\n ),\n \n # .. mtsamples ====\n tabPanel(\n \"mtsamples.com\", \n includeMarkdown(\"./Rmd/clinical_note_mtsamples.Rmd\"),\n br(),\n \n fluidPage(\n column(\n 6,\n plotOutput(\"specialty_count\"),\n style = \"padding-left: 0;\"\n ),\n column(\n 6,\n plotOutput(\"section_count\"),\n style = \"padding-right: 0;\"\n )\n ),\n \n br(),\n h3(\"Clinical note examples\"),\n p(\"In the table below, we list one example note for each \n category, as well as the section headers for each note.\"),\n DT::dataTableOutput(\"raw_table\"),\n \n br(),\n tags$a(\n href=\"#top\", \"Go to Top\",\n class = \"go-to-top\"\n )\n )\n )\n ),\n \n # medical_entities =====================================================\n \n tabItem(\n \"medical_entity\",\n \n h1(\"Medical Named Entities\"),\n includeMarkdown(\"Rmd/medical_named_entity_intro.Rmd\"),\n \n tabsetPanel(\n # .. extraction ====\n tabPanel(\n \"Extraction\",\n includeMarkdown(\"Rmd/medical_named_entity_extraction.Rmd\"),\n br(),\n \n textInput(\"word\", \n \"Search word in the table\",\n width = \"200px\",\n placeholder = \"type in a word\"),\n htmlOutput(\"word_stats\"),\n \n br(),\n DT::dataTableOutput(\"bows\"),\n \n br(),\n tags$a(\n href=\"#top\", \"Go to Top\",\n class = \"go-to-top\"\n )\n ),\n \n # .. wordcloud ====\n tabPanel(\n \"Wordcloud\",\n includeMarkdown(\"Rmd/medical_named_entity_wordcloud.Rmd\"),\n br(),\n \n fluidRow(\n # word cloud 1\n column(\n 6,\n fluidRow(\n column(\n 6,\n selectInput(\"cloud_type_1\", \n \"Select specialty\",\n choices = c(\"All\", \n \"Gastroenterology\", \n \"Neurology\",\n \"Urology\"),\n selected = \"Gastroenterology\")\n ),\n column(\n 5,\n selectInput(\"cloud_method_1\",\n \"Select bag of words\",\n choices = c(\"amazon_me\", \n \"medacy_me\", \n \"top_tf\", \n \"top_tfidf\"),\n selected = \"top_tfidf\")\n )\n ),\n plotOutput(\"wordcloud_1\", width = \"90%\", height = \"300px\"),\n plotOutput(\"bar_1\", width = \"90%\", height = \"200px\")\n ),\n \n # word cloud 2\n column(\n 6,\n fluidRow(\n column(\n 6,\n selectInput(\"cloud_type_2\", \n \"Select specialty\",\n choices = c(\"All\", \n \"Gastroenterology\", \n \"Neurology\",\n \"Urology\"),\n selected = \"Gastroenterology\")\n ),\n column(\n 5,\n selectInput(\"cloud_method_2\",\n \"Select bag of words\",\n choices = c(\"amazon_me\", \n \"medacy_me\", \n \"top_tf\", \n \"top_tfidf\"),\n selected = \"amzone_me\")\n )\n ),\n plotOutput(\"wordcloud_2\", width = \"90%\", height = \"300px\"),\n plotOutput(\"bar_2\", width = \"90%\", height = \"200px\")\n )\n ),\n \n br(),\n tags$a(\n href=\"#top\", \"Go to Top\",\n class = \"go-to-top\"\n )\n )\n )\n \n \n ),\n \n # clustering ======================================================\n tabItem(\n \"cluster\",\n h1(\"Identify Medical Subdomains\"),\n includeMarkdown(\"Rmd/clustering_intro.Rmd\"),\n \n tabsetPanel(\n # .. pca ====\n tabPanel(\n \"Principle Component\",\n includeMarkdown(\"Rmd/clustering_pca.Rmd\"),\n br(),\n radioButtons(\"pca\",\n label = \"Select corpus\",\n choices = c(\"clinical notes\",\n \"amazon medical entities\"),\n # selected = \"amazon medical entities\",\n inline = TRUE),\n plotOutput(\"pca_plot\", height = \"800px\"),\n \n br(),\n tags$a(\n href=\"#top\", \"Go to Top\",\n class = \"go-to-top\"\n )\n ),\n \n # .. hcluster ====\n tabPanel(\n \"Hierarchical Clustering\",\n includeMarkdown(\"Rmd/clustering_hcluster.Rmd\"),\n br(),\n \n radioButtons(\"dend\",\n label = \"Select corpus\",\n choices = c(\"clinical notes\",\n \"amazon medical entities\"),\n inline = TRUE),\n plotOutput(\"dend_plot\"),\n br(),\n plotOutput(\"hcluster_plot\"),\n \n br(),\n tags$a(\n href=\"#top\", \"Go to Top\",\n class = \"go-to-top\"\n )\n ),\n \n # .. kmeans ====\n tabPanel(\n \"K-means Clustering\",\n includeMarkdown(\"Rmd/clustering_kmeans.Rmd\"),\n br(),\n \n fluidRow(\n column(\n 5,\n radioButtons(\"kmeans\",\n label = \"Select corpus\",\n choices = c(\"clinical notes\",\n \"amazon medical entities\"),\n inline = TRUE)\n ),\n column(\n 5,\n radioButtons(\"kmeans_pca\",\n label = \"Using principle components?\",\n choices = c(\"yes\", \"no\"),\n selected = \"no\",\n inline = TRUE)\n )\n ),\n plotOutput(\"kmeans_plot\", height = \"400px\"),\n \n br(),\n tags$a(\n href=\"#top\", \"Go to Top\",\n class = \"go-to-top\"\n )\n )\n )\n ),\n \n # classification ===================================================\n tabItem(\n \"classification\",\n h1(\"Medical Specialty Classification\"),\n includeMarkdown(\"Rmd/classification_intro.Rmd\"),\n tabsetPanel(\n # .. gas-neu-uro ====\n tabPanel(\n \"Gastro-Neuro-Urol\",\n includeMarkdown(\"Rmd/classification_gas_neu_uro.Rmd\"),\n # the html is generated from Rmd and then delete \n # everything outside of <body> ... </body>.\n # The js and css in header mess up with shiny's js & css\n includeHTML(\"Rmd/classification_gas_neu_uro_table.html\"),\n \n br(),\n tags$a(\n href=\"#top\", \"Go to Top\",\n class = \"go-to-top\"\n )\n ),\n \n # .. multiclass ====\n tabPanel(\n \"Multiclass Classification\",\n includeMarkdown(\"Rmd/classification_multiclass.Rmd\"),\n \n \n fluidRow(\n # plot confusion matrix 1\n column(\n 6,\n fluidRow(\n column(1),\n column(\n 5,\n selectInput(\"cm_method_1\",\n \"Select algorithm and method\",\n choices = c(\"svm + tfidf\", \n \"svm + tfidf + pca\", \n \"neural network + tfidf\", \n \"neural network + tfidf + pca\"),\n selected = \"svm + tfidf\"),\n \n htmlOutput(\"acc_1\")\n ),\n column(\n 4,\n radioButtons(\"cm_type_1\",\n \"Select type\",\n choices = c(\"recall\", \"precision\"),\n selected = \"recall\",\n inline = TRUE)\n )\n ),\n plotOutput(\"multiclass_1\", width = \"90%\", height = \"400px\")\n ),\n \n # plot confusion matrix 2\n column(\n 6,\n fluidRow(\n column(1),\n column(\n 5,\n selectInput(\"cm_method_2\",\n \"Select algorithm and method\",\n choices = c(\"svm + tfidf\", \n \"svm + tfidf + pca\", \n \"neural network + tfidf\", \n \"neural network + tfidf + pca\"),\n selected = \"svm + tfidf + pca\"),\n \n htmlOutput(\"acc_2\")\n ),\n column(\n 4,\n radioButtons(\"cm_type_2\",\n \"Select type\",\n choices = c(\"recall\", \"precision\"),\n selected = \"recall\",\n inline = TRUE)\n )\n ),\n plotOutput(\"multiclass_2\", width = \"90%\", height = \"400px\")\n )\n ),\n \n br(),\n tags$a(\n href=\"#top\", \"Go to Top\",\n class = \"go-to-top\"\n )\n ),\n \n # .. prediction ====\n tabPanel(\n \"Deployed Model\",\n includeMarkdown(\"Rmd/classification_prediction.Rmd\"),\n br(),\n \n fluidRow(\n column(\n 3,\n fileInput(\"file_upload\", \"Choose a file\",\n multiple = FALSE,\n accept = c(\"text/csv\",\n \"text/comma-separated-values,text/plain\",\n \".csv\")),\n includeText(\"Rmd/file_upload.txt\")\n ),\n \n column(\n 9,\n # input from text field\n textAreaInput(\n \"text_input\",\n \"Input clinical notes\",\n placeholder = \"One paragraph per note\",\n height = \"200px\"\n ) %>% \n # bug in width when set at \"100%\"\n shiny::tagAppendAttributes(style = 'width: 100%;')\n \n ## do not use submitButton, it control all input\n #submitButton(\"Submit\"),\n #actionButton(\"submit_text_input\", \"submit\")\n )\n ),\n br(),\n \n \n h4(\"Prediction:\"),\n verbatimTextOutput(\"print_prediction\"),\n br(),\n downloadButton(\"download\", \"Download prediciton\"),\n \n # input using uploaded data\n br(),\n br(),\n br(),\n tags$a(\n href=\"#top\", \"Go to Top\",\n class = \"go-to-top\"\n )\n )\n )\n )\n )\n )\n)\n"
},
{
"alpha_fraction": 0.5912008285522461,
"alphanum_fraction": 0.6085851788520813,
"avg_line_length": 28.325490951538086,
"blob_id": "74582586ab2c257c76485958af9145eff4785791",
"content_id": "19eaea1ddce85f62437031cd551c9c8f9551ac1c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 7478,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 255,
"path": "/machine-learning/gas_neu_uro_classification_embedding_neural_netword.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(tensorflow)\nlibrary(keras)\nsource(\"utilities.R\")\nlibrary(data.table)\nlibrary(progress)\n\nget_pretrained_matrix <- function(tokens, pretrained_file){\n # Calculate pretrained matrix for the top max_words tokens in a corpus\n #\n # Arguments:\n # tokens: string vector, tokens tokenized using keras\n # pretrained_file: str, csv file of saved pretrained matrix, from which to\n # subset and get the pretrained matrix of tokens\n # max_words: int, number of top frequent token to keep\n #\n # Return:\n # a matrix\n \n # load the saved pretrained matrix and keep only those in tokens\n # make sure all tokens are included in pretrained vacob, usually the case\n pretrained_dt <- fread(pretrained_file)\n stopifnot(length(setdiff(tokens, pretrained_dt$V1)) == 0) \n pretrained_dt <- pretrained_dt[V1 %in% tokens] \n \n # reorder the pretrained matrix so the row order agrees with words order\n cat(\"Calculate pretrained matrix:\\n\")\n order_factor <- factor(tokens, levels = tokens)\n pb <- progress_bar$new(total = length(tokens))\n for (wd in tokens){\n pb$tick()\n pretrained_dt[V1 == wd, levels := which(levels(order_factor) == wd)]\n }\n \n # only return the matrix \n pretrained_matrix <- pretrained_dt[order(levels)] %>%\n .[, V1 := NULL] %>%\n .[, levels := NULL] %>%\n as.matrix()\n}\n\n\ncnn_metrics <- function(corpus, pretrained_file, seq_length = 500){\n # calculate average accuracy and f1 score out of 100 repeat\n # \n # Arguments:\n # corpus: string vector\n # pretrained_file: str, csv file of saved pretrained matrix, from which to\n # subset and get the pretrained matrix of tokens\n #\n # Return:\n # numeric vector \n #\n \n # initialize tokenizer specifing maximum words\n max_words <- 3000\n tk <- text_tokenizer(num_words = max_words)\n # update tk in place with a vector or list of documents\n fit_text_tokenizer(tk, corpus)\n \n token_index <- tk$word_index\n tokens <- names(token_index)[1:max_words]\n pretrained_matrix <- get_pretrained_matrix(tokens, pretrained_file)\n \n # convert the documents into a list of sequence\n X <- texts_to_sequences(tk, corpus)\n \n # pad the sequence to get a matrix\n seq_length <- seq_length\n X <- pad_sequences(X, seq_length)\n y_class <- dat$y\n n_class <- length(unique(y_class))\n y <- to_categorical(y_class, n_class)\n \n \n n_rep <- 100\n df_acc_f1 <- data.frame(\n acc = numeric(n_rep),\n f1_gas = numeric(n_rep),\n f1_neu = numeric(n_rep),\n f1_uro = numeric(n_rep)\n )\n set.seed(6789)\n in_trains <- caret::createDataPartition(y_class, times = n_rep, p = 0.7)\n \n cat(\"Train 100 models:\\n\")\n pb <- progress_bar$new(total = 100)\n pb$tick(0) # display progress bar right away. \n for (i in 1:100){\n pb$tick()\n in_train <- in_trains[[i]]\n X_train <- X[in_train,]\n y_train <- y[in_train,] \n X_test <- X[-in_train,]\n y_test <- y[-in_train,]\n y_test_class <- y_class[-in_train]\n \n dim_emb <- 200\n dropout <- 0.3\n model <- keras_model_sequential() %>% \n layer_embedding(input_dim = max_words,\n output_dim = dim_emb,\n input_length = seq_length) %>%\n layer_dropout(dropout) %>%\n layer_conv_1d(filters = 16,\n kernel_size = 3,\n #activation = \"relu\",\n padding = \"valid\",\n strides = 1) %>%\n layer_dropout(dropout) %>%\n layer_global_average_pooling_1d() %>%\n layer_dense(units = 64, activation = \"relu\") %>%\n layer_dropout(dropout) %>%\n # output layer\n layer_dense(n_class, activation = \"softmax\")\n \n # use pretrained embedding\n get_layer(model, index = 1) %>% \n set_weights(list(pretrained_matrix))\n \n # compile, fit, and evaluate model in place\n compile(model,\n loss = \"categorical_crossentropy\",\n optimizer = \"adam\",\n metrics = \"accuracy\"\n )\n \n fit(model,\n x = X_train, y = y_train,\n epochs = 50,\n batch_size = 32,\n validation_split = 0.3,\n verbose = 3\n )\n \n \n y_pred <- predict(model, X_test)\n y_pred_class <- predict_classes(model, X_test)\n tb <- table(y_test_class, y_pred_class)\n acc <- sum(diag(tb)) / length(y_test_class)\n \n f1 <- function(tb, k){\n recall <- diag(tb)[k] / sum(y_test_class == k - 1)\n precision <- diag(tb)[k] / sum(y_pred_class == k - 1)\n f1 <- 2 * (recall * precision) / (recall + precision)\n }\n \n f1_gas <- f1(tb, 1)\n f1_neu <- f1(tb, 2)\n f1_uro <- f1(tb, 3)\n \n df_acc_f1[i, ] <- c(acc, f1_gas, f1_neu, f1_uro)\n }\n return(df_acc_f1)\n}\n\n\n\n# prepare data =================================================================\nset.seed(12345)\ndat <- read_notes(\n \"data/amazon_medacy_mtsamples_gastr_neuro_urolo.csv\",\n duplicate_rm = T,\n cols_keep = \"all\",\n y_label = TRUE\n)\n\n\n# try out cnn construction using clinical notes ================================\nnotes <- dat$note\n# initialize tokenizer specifing maximum words\nmax_words <- 3000\ntk <- text_tokenizer(num_words = max_words)\n# update tk in place with a vector or list of documents\nfit_text_tokenizer(tk, notes)\n\nclinical_word_index <- tk$word_index\nclinical_words <- names(clinical_word_index)[1:max_words]\n\n# convert the documents into a list of sequence\nX <- texts_to_sequences(tk, notes)\n\n# pad the sequence to get a matrix\nseq_length <- 500\nX <- pad_sequences(X, seq_length)\ny_class <- dat$y\nn_class <- length(unique(y_class))\ny <- to_categorical(y_class, n_class)\n\n# split X and y into train and test\nset.seed(1234)\nin_train <- caret::createDataPartition(y_class, p = 0.7, list = FALSE)\nX_train <- X[in_train,]\ny_train <- y[in_train,] \nX_test <- X[-in_train,]\ny_test <- y[-in_train,]\ny_test_class <- y_class[-in_train]\n\n\n# build model =================================================================\ndim_emb <- 200 # vector length of pretrained embedding\ndropout <- 0.3\nmodel <- keras_model_sequential() %>% \n layer_embedding(input_dim = max_words,\n output_dim = dim_emb,\n input_length = seq_length) %>%\n layer_dropout(dropout) %>%\n layer_conv_1d(filters = 16,\n kernel_size = 3,\n #activation = \"relu\",\n padding = \"valid\",\n strides = 1) %>%\n layer_dropout(dropout) %>%\n layer_global_average_pooling_1d() %>%\n layer_dense(units = 64, activation = \"relu\") %>%\n layer_dropout(dropout) %>%\n # output layer\n layer_dense(n_class, activation = \"softmax\")\n\nsummary(model)\n\n\n# set the embedding layer to pretrained matrix\npretrained_file <- \"data/gas_neu_uro_token_embeddings_note.csv\"\npretrained_matrix <- get_pretrained_matrix(words, pretrained_file)\nget_layer(model, index = 1) %>% \n set_weights(list(pretrained_matrix))\n\n# compile, fit, and evaluate model in place\ncompile(model,\n loss = \"categorical_crossentropy\",\n optimizer = \"adam\",\n metrics = \"accuracy\"\n)\n\nfit(model,\n x = X_train, y = y_train,\n epochs = 50,\n batch_size = 32,\n validation_split = 0.3,\n verbose = 3\n)\n\n\n# repeat 100 times to get average metrics =======\n\n# mean and standard deviation\npretrained_file <- \"data/gas_neu_uro_token_embeddings_note.csv\"\ndf_acc_f1 <- cnn_metrics(dat$note, pretrained_file)\nsapply(df_acc_f1, mean)\nsapply(df_acc_f1, sd)\n\npretrained_file <- \"data/gas_neu_uro_token_embeddings_amazon.csv\"\ndf_acc_f1 <- cnn_metrics(dat$amazon_me, pretrained_file)\nsapply(df_acc_f1, mean)\nsapply(df_acc_f1, sd)\n"
},
{
"alpha_fraction": 0.5125378966331482,
"alphanum_fraction": 0.5233948826789856,
"avg_line_length": 31.171985626220703,
"blob_id": "edc15987a46200b51201deba84cd6df9d5b11936",
"content_id": "270412c14c6c214d1efa271f6d62d1be9cca4947",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 18145,
"license_type": "no_license",
"max_line_length": 110,
"num_lines": 564,
"path": "/utilities.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "###\n### collection of functions used by multiple files in this project\n###\n\nlibrary(tm)\nlibrary(magrittr)\nlibrary(ggplot2)\nlibrary(grid)\nlibrary(gridExtra)\nlibrary(data.table)\nlibrary(tidyverse)\nlibrary(tidytext)\nlibrary(progress)\n\n\nread_notes <- function(csv_file, \n specialties = NULL,\n y_label = NULL, \n cols_keep = c(\"specialty\", \"note\"),\n randomize = TRUE,\n duplicate_rm = TRUE, \n clean = TRUE,\n id = TRUE,\n random_state = 1234){\n # read clinical notes and add label y to the original data\n #\n # Arguments:\n # csv_file: string, path to the the data file\n # specialties: string vector, selected specialties such as\n # c(\"Gastroenterology\", \"Neurology\")\n # cols_keep: string, columns in the orignial data to keep, \"all\" to keep\n # all columns.\n # randomize: boolean, randomize row (sample) orders to break grouping\n # y_label: boolean, if TRUE, add a class label 0, 1, 2, ... to each note\n # duplicate_rm: boolean, remove rows if duplicated in column note\n # clean: boolean, if TRUE add missing space after \".\", for example,\n # \"abscess.PROCEDURE\".\n # id: boolean, add id to each sample in the original data. Randomize and \n # remove duplicates does not change note id.\n # random_state: int, set random state if randomize is TRUE\n #\n # Return:\n # a data.table\n \n set.seed(random_state) # only useful when randomize = TRUE\n \n dat <- fread(csv_file)\n if (!is.null(specialties)){\n dat <- dat[specialty %in% specialties]\n }\n if (!identical(cols_keep, \"all\")){\n dat <- dat[, ..cols_keep] # ..var select columns by variable\n }\n if (isTRUE(clean)){\n # missing space after \".\", for example \"abscess.PROCEDURE\"\n dat[, note := str_replace_all(note, \"\\\\.\", \"\\\\. \")]\n }\n if (isTRUE(y_label)){\n dat[, y := as.integer(factor(specialty)) - 1]\n }\n if (id){\n dat[, id := 1:nrow(dat)]\n setcolorder(dat, c(\"id\", setdiff(names(dat), \"id\")))\n }\n if (duplicate_rm){\n # all duplicated including first one\n rows_duplicated <- duplicated(dat$note) | duplicated(dat$note, fromLast = TRUE)\n dat <- dat[!rows_duplicated]\n message(paste(\"Deleted\", sum(rows_duplicated), \n \"rows with duplicated notes.\",\n \"Set duplicate_rm = FALSE if you want to keep duplicates.\"))\n }\n if (randomize){\n dat <- dat[sample(nrow(dat))]\n }\n}\n\n\n\nword_count <- function(doc_vector){\n # count word in corpus\n #\n # Arguments:\n # doc_vector: a vector of documents\n #\n # Return:\n # data.table\n \n count <- tolower(doc_vector) %>%\n str_split(\", | \") %>%\n unlist() %>%\n table() %>%\n as.data.table() %>%\n set_colnames(c(\"word\", \"count\")) %>%\n .[!word %in% tm::stopwords()] %>% # remove stopwords\n .[word != \"\"] %>% # medaCy generate nothing from some notes\n .[order(-count)] %>%\n .[count > 1] %>% # delete useless info to save plotting time\n .[, word := factor(word, levels = word)]\n}\n\n\n\ntop_tfidf <- function(df, col){\n # Add tfidf columns to a dataframe containing a column of documents and \n # count the words in documents using package tidytext\n #\n # Arguments:\n # df: dataframe containing a column of corpus\n # col: string, column name of the corpus selected for tfidf \n # return:\n # list of two data frames:\n # tfidf: include tf, tfidf, top_tf, top_tfidf\n \n # count of each word in each document\n tokens <- as_tibble(df) %>%\n select(id, !!col) %>%\n # list all tokens in each document\n unnest_tokens(word, !!col) %>%\n # remove stop words. do NOT use tidytext's stop_words, too broad\n filter(!word %in% tm::stopwords()) %>% \n # keep words with letters and \"'\" only, then remove 's\n filter(str_detect(word, \"^[a-z']+$\")) %>%\n mutate(word = str_remove_all(word, \"'s\")) %>%\n # count grouped by id and word\n count(id, word, sort = TRUE)\n \n \n # times of appearance of each word in all document\n n_times <- tokens %>%\n group_by(word) %>%\n summarise(n_times = sum(n))\n \n # words only shows up one time in all document\n words_1 <- n_times %>%\n filter(n_times == 1) %>%\n select(word) %>%\n pull()\n \n # remove the one-time words, which are not represenative\n tokens <- tokens %>%\n filter(!word %in% words_1)\n \n # nubmer of words in each document \n total_words <- tokens %>% \n group_by(id) %>%\n summarise(total = sum(n))\n \n # calculate tfidf and combined with total number of words in each documents\n df_tfidf <- bind_tf_idf(tokens, word, id, n) %>%\n left_join(total_words)\n\n # for each word: number of documents, total count, average tf and tfidf\n word_stats <- df_tfidf %>%\n group_by(word) %>%\n summarise(n_documents = n(),\n n_times = sum(n),\n avg_tf = round(mean(tf),4),\n avg_tfidf = round(mean(tf_idf), 4))\n \n # top 10 words by term frequency in each document\n top_tf <- df_tfidf %>%\n arrange(desc(tf)) %>%\n group_by(id) %>%\n slice(1:10) %>%\n select(id, word) %>%\n group_by(id) %>%\n summarise(top_tf = paste(word, collapse = \", \"))\n \n # top 10 words by tfidf in each document\n top_tfidf <- df_tfidf %>%\n arrange(desc(tf_idf)) %>%\n group_by(id) %>%\n slice(1:10) %>%\n select(id, word) %>%\n group_by(id) %>%\n summarise(top_tfidf = paste(word, collapse = \", \"))\n \n \n tfidf <- top_tf %>%\n left_join(top_tfidf) %>%\n right_join(df) %>%\n arrange(id)\n \n return(list(tfidf = tfidf, word_stats = word_stats))\n}\n\n\ntfidf_tm <- function(corpus, sparsity = 0.992){\n # Calculate normalized tfidf matrix of a coupus using tm package\n #\n # Arguments:\n # corpus: a vector of text documents\n # sparsity: fraction of top words to keep\n #\n # Return:\n # matrix, each row is the mormalized tfidf vector of a document\n \n corpus <- Corpus(VectorSource(corpus)) %>%\n tm_map(tolower) %>%\n tm_map(stripWhitespace) %>%\n # remove stopwords before removing punctuationo so that stopwords like \n # it's and i'll can be removed\n tm_map(removeWords, stopwords(\"english\")) %>%\n tm_map(removePunctuation) %>%\n tm_map(stemDocument)\n \n tfidf <- DocumentTermMatrix(\n corpus,\n control = list(weighting = function(x) weightTfIdf(x, normalize = FALSE))\n ) %>%\n removeSparseTerms(sparsity) %>%\n as.matrix() \n \n # normalize so that each row vector has length of 1\n tfidf <- tfidf / sqrt(rowSums(tfidf * tfidf))\n \n return(tfidf)\n}\n\n\nplot_cv <- function(rep_id = NULL, dat = metrics_cv){\n # Plot the train and validation metrics over iterations of \n # cross validation that used to find best parameters\n #\n # Arguments:\n # rep_id: one of the many repeats. If NULL, plot each repeat\n # dat: list of data frames generated by the cross validation run\n \n if (is.null(rep_id)){\n for (i in 1:length(dat)){\n p <- ggplot(dat[[i]]) +\n geom_line(aes(iter, train_mean), color = \"blue\") +\n geom_line(aes(iter, test_mean), color = \"red\") +\n labs(title = paste(\"repeat\", i),\n x = \"Iteration / Epoch\",\n y = \"Average Metrics\")\n print(p)\n \n key <- readline(\n prompt = paste0(\"Press [Enter] to view next repeat\", \n \" [Esc] to exit: \")\n )\n }\n } else {\n ggplot(dat[[rep_id]]) +\n geom_line(aes(iter, train_mean), color = \"blue\") +\n geom_line(aes(iter, test_mean), color = \"red\") +\n labs(title = paste(rep_id, \"repeat\"),\n x = \"Iteration / Epoch\",\n y = \"Average Metrics\")\n }\n}\n\n\nmetrics_binary <- function(y_true, y_pred, cutoff = 0.5){\n # Get key metrics of binary classification\n #\n # Arguments:\n # y_true: integer, true class\n # y_pred: numeric, predicted probability\n # cutoff: numeric in 0 - 1, cutoff probability\n # Return:\n # numeric vector, model metrics auc, f1, sensitity, and specificity\n \n y_pred_class <- round(y_pred)\n auc <- ModelMetrics::auc(y_true, y_pred)\n f1 <- ModelMetrics::f1Score(y_true, y_pred, cutoff)\n sensitivity <- ModelMetrics::sensitivity(y_true, y_pred, cutoff)\n specificity <- ModelMetrics::specificity(y_true, y_pred, cutoff)\n \n cat(\"confusion matrix:\\n y_true\\n\")\n print(ModelMetrics::confusionMatrix(y_true, y_pred))\n cat(\" \\n\")\n \n return(c(auc = auc, \n f1 = f1, \n sensitivity = sensitivity, \n specificity = specificity))\n}\n\n\naccuracy <- function(y_true, y_pred){\n # Calculate classification accuracy\n #\n # Arguments:\n # y_true: integer, true class\n # y_pred: numeric, predicted probability\n # Return:\n # numeric \n tb <- table(y_true, y_pred)\n acc <- sum(diag(tb)) / length(y_true)\n}\n\n\nbest_match <- function(y_true, y_clusters){\n # Foe 3 clusters match, match each cluster to y_true to get best accuracy\n #\n # Arguments:\n # y_true: int vector, true classes of each sample, c(0, 2, 1, 1, ...)\n # y_clusters: int vector, kmeans cluster number, c(1, 3, 2, 1, ...)\n # Return:\n # int vector, y_pred with best match\n \n y_pred <- rep(999, length(y_true))\n matches <- subset(expand.grid(rep(list(0:2), 3)), \n Var1 != Var2 & Var1 != Var3 & Var2 != Var3)\n \n best_diag_sum <- 0 # total number of correct samples\n for (i in 1:nrow(matches)){\n match <- as.numeric(matches[i, ])\n y_pred[y_clusters == 1] <- match[1]\n y_pred[y_clusters == 2] <- match[2]\n y_pred[y_clusters == 3] <- match[3]\n \n tb <- table(y_true, y_pred)\n diag_sum <- sum(diag(tb))\n if (best_diag_sum < diag_sum){\n best_diag_sum <- diag_sum\n best_match <- match\n best_accuracy <- diag_sum / length(y_true)\n best_y_pred <- y_pred\n }\n }\n print(table(y_true, best_y_pred))\n cat(paste(\"accuracy: \", best_accuracy))\n return(best_y_pred)\n}\n\n\nplot_pc1_pc2 <- function(pca, \n color = NULL, \n color_map = c(\"red\", \"blue\", \"cyan\"),\n pch = NULL,\n title = NULL){\n # Plot samples in PC1-PC2 space\n #\n # Arguments:\n # pca: matrix, pca of tfidf\n # color: int vector to mark the color of each sample. can be y_true, \n # y_clusters, y_pred, or other vector of the same length as pca\n # color_map: string, color to map color\n # pch: int vector, shape of data point\n # title: string, plot title\n \n PC1 <- pca[, 1]\n PC2 <- pca[, 2]\n \n sample_colors <- rep(\"black\", nrow(pca))\n if (!is.null(color)){\n sample_colors[color == 0] <- color_map[1]\n sample_colors[color == 1] <- color_map[2]\n sample_colors[color == 2] <- color_map[3]\n }\n \n if (!is.null(color) & !is.null(pch)){\n plot(PC1, PC2, col = sample_colors, pch = pch, main = title)\n } else if (!is.null(color)){\n plot(PC1, PC2, col = sample_colors, main = title)\n } else if (!is.null(pch)){\n plot(PC1, PC2, pch = pch, main = title)\n } else {\n plot(PC1, PC2, main = title)\n }\n}\n\n\nplot_confusion_matrix <- function(y_true, y_pred,\n lab_x = NULL, lab_y = NULL,\n type = \"recall\",\n grob = FALSE){\n # plot confusion matrix of classification prediction\n #\n # Arguments:\n # y_true, int, y_pred: true and predicted class\n # type: string, percent as \"recall\" or \"precision\"\n # classes: string, names of classes\n # grob: bool, if TRUE, add colored title to the plot\n #\n # Return:\n # a ggplot\n \n stopifnot(type %in% c(\"recall\", \"precision\"))\n \n if (type == \"recall\"){\n x <- y_true\n y <- y_pred\n x_axis <- \"True\"\n y_axis <- \"Predicted\"\n } else if (type == \"precision\"){\n x <- y_pred\n y <- y_true\n x_axis <- \"Predicted\"\n y_axis <- \"True\"\n }\n \n if (is.factor(x)){\n x <- as.numeric(as.character(x))\n y <- as.numeric(as.character(y))\n }\n \n n <- length(unique(x))\n if (is.null(lab_x) & is.null(lab_y)){\n lab_x <- 1:n\n lab_y <- 1:n\n } else if (is.null(lab_x)) {\n lab_x <- lab_y\n } else if (is.null(lab_y)){\n lab_y <- lab_x\n }\n\n cm <- table(x, y) # confusion matrix\n # percent\n cm_pct <- cm / rowSums(cm) \n pct_dt <- as.data.table(matrix(unlist(cm_pct), ncol = 1)) %>%\n .[, x := rep(0:(n-1), n)] %>%\n .[, y := rep(0:(n-1), each = n)]\n \n # count\n cm_dt <- as.data.table(matrix(unlist(cm), ncol = 1)) %>%\n .[, x := rep(0:(n-1), n)] %>%\n .[, y := rep(0:(n-1), each = n)]\n\n main_plot <- ggplot() + \n geom_jitter(aes(x, y), color = \"blue\", size = 1,\n width = 0.1, height = 0.1, alpha = 0.3) +\n geom_text(data = cm_dt, \n # aes(x + 0.03, y + 0.2, label = V1), hjust = 0,\n aes(x, y - 0.2, label = V1),\n hjust = 0.5,\n color = \"purple\") +\n geom_text(data = pct_dt, \n #aes(x - 0.03, y + 0.2, label = paste0(round(100 * V1, 1), \"%\")), \n aes(x, y + 0.2, label = paste0(round(100 * V1, 1), \"%\")),\n hjust = 0.5,\n color = \"red\") +\n scale_x_continuous(breaks = 0:(n-1), labels = lab_x) +\n scale_y_continuous(breaks = 0:(n-1), labels = lab_y) +\n labs(x = x_axis,\n y = y_axis) +\n theme(panel.background = element_rect(fill = NA, color = \"gray20\"),\n panel.grid.major = element_line(color = \"gray95\"),\n axis.ticks = element_blank())\n \n if (grob){\n grobs <- grobTree(\n gp = gpar(fontsize = 12, fontface = \"bold\"), \n textGrob(label = \" Percent\", \n name = \"title1\",\n x = unit(0.2, \"lines\"), \n y = unit(1.4, \"lines\"), \n hjust = 0, \n vjust = 1, \n gp = gpar(col = \"red\")),\n textGrob(label = \" and \", \n name = \"title2\",\n x = grobWidth(\"title1\") + unit(0.2, \"lines\"),\n y = unit(1.4, \"lines\"),\n hjust = 0, \n vjust = 1),\n textGrob(label = \"Number\", \n name = \"title3\",\n x = grobWidth(\"title1\") + grobWidth(\"title2\") + unit(0.2, \"lines\"),\n y = unit(1.4, \"lines\"),\n gp = gpar(col = \"purple\"),\n hjust = 0, \n vjust = 1),\n textGrob(label = \" of True Specialties Being Predicted as Others\",\n x = grobWidth(\"title1\") + grobWidth(\"title2\") + grobWidth(\"title3\") + unit(0.2, \"lines\"),\n y = unit(1.4, \"lines\"),\n hjust = 0, \n vjust = 1)\n )\n \n gg <- arrangeGrob(main_plot, top=grobs, padding = unit(2.6, \"line\"))\n \n grid.arrange(gg)\n } else {\n main_plot\n }\n}\n\n\n# text2vec functions ===========================================================\nget_iter <- function(corpus, ids = NULL, stem = TRUE){\n # create iterator for text2vec\n #\n # Arguments:\n # corpus: string vector\n # ids: id of corpus\n # stem: bool, use stem tokenizer if TRUE, word tokenizer if not\n #\n # Return:\n # a text2vec iterator\n #\n \n if (stem){\n tokenizer <- function(x) {\n word_tokenizer(x) %>% \n lapply( function(x) SnowballC::wordStem(x, language=\"en\"))\n }\n } else {\n tokenizer <- word_tokenizer\n }\n it <- itoken(corpus, tolower, tokenizer, ids = ids)\n}\n\n\nget_vocab <- function(corpus, min_count = 5, min_doc = 5){\n # Crate text2vec vocabularoy of a corpus and \n # keep words count more than min_count and document count more than min_doc\n it <- get_iter(corpus)\n vocab <- create_vocabulary(it, stopwords = tm::stopwords())\n vocab <- prune_vocabulary(\n vocab, \n term_count_min = min_count,\n doc_count_min = min_doc\n )\n}\n\n\nget_vectorizer <- function(vocab){\n # Create text2vec vectorizer from vocab for use in create_dtm\n vocab_vectorizer(vocab)\n}\n#train_vectorizer <- get_vectorizer(train)\n\n\nget_dtm <- function(corpus, vectorizer){\n # Get dtm of a corpus using existing vectorizer\n it <- get_iter(corpus)\n dtm <- create_dtm(it, vectorizer)\n}\n# train_dtm <- get_dtm(train, train_vectorizer)\n# test_dtm <- get_dtm(test, train_vectorizer)\n\n\nfit_tfidf <- function(dtm){\n # create a tfidf model using dtm\n mdl <- TfIdf$new()\n fit_transform(dtm, mdl) # fit does not work\n return(mdl)\n}\n# tfidf_model <- fit_tfidf(train_dtm)\n\n\ntransform_tfidf <- function(dtm, tfidf_model){\n # Get normalized tfidf matrix of dtm using tfidf_model\n tfidf <- transform(dtm, tfidf_model)\n tfidf <- as.matrix(tfidf)\n tfidf <- tfidf / sqrt(rowSums(tfidf * tfidf))\n}\n# train_tfidf <- transform_tfidf(train_dtm, tfidf_model)\n# test_tfidf <- transform_tfidf(test_dtm, tfidf_model)\n\n\nfit_pca <- function(tfidf, ...){\n # crate a pca model\n prcomp(tfidf, ...)\n}\n# pca_model <- fit_pca(train_tfidf)\n\n# train_pca <- predict(pca_model, train_tfidf)\n# test_pca <- predict(pca_model, test_tfidf)\n"
},
{
"alpha_fraction": 0.6021978259086609,
"alphanum_fraction": 0.6104395389556885,
"avg_line_length": 23.917808532714844,
"blob_id": "61c62691d8084150d74c3c64f85fe648aef12169",
"content_id": "5a5ff76f8776dd95af785718e3bcad8764f839a3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 1820,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 73,
"path": "/shiny-apps/controllers/classification_multiclass_deploy_models.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "# medical specialties in the order of class 0, 1, 2, 3, 4, 5 for convert numbers\n# back to strings\nlookup_table <- c(\n \"Cardiovascular / Pulmonary\",\n \"Gastroenterology\",\n \"Obstetrics / Gynecology\",\n \"Neurology\",\n \"Orthopedic\",\n \"Urology\"\n)\n\n# take input from text area or file upload, whichever updated =================\ninput_val <- reactiveValues()\nobserveEvent(input$text_input, {\n input_val$notes <- input$text_input %>%\n str_split(\"\\n\") %>%\n unlist()\n})\nobserveEvent(input$file_upload, {\n dat <- read.csv(\n input$file_upload$datapath, \n sep = \"\\n\", \n header = FALSE,\n stringsAsFactors = FALSE\n )\n \n input_val$notes <- dat$V1\n})\n\n# # use the reactive value inside reactive environment\n# new_tfidf <- reactive({\n# texts <- input_val$notes\n# tfidf <- texts %>%\n# get_dtm(train_vectorizer) %>%\n# transform_tfidf(tfidf_model)\n# \n# stopifnot(length(texts) == dim(tfidf)[1])\n# \n# tfidf\n# })\n\n\nnew_pca <- reactive({\n texts <- input_val$notes\n dtm <- get_dtm(texts, train_vectorizer)\n tfidf <- transform_tfidf(dtm, tfidf_model)\n # use drop = FALSE so one row is still a matrix not a vector\n pca <- predict(pca_model, tfidf)[, 1:25, drop = FALSE]\n})\n\n\nprediction <- reactive({\n predict(svm_model_deploy, new_pca())\n})\n\n\noutput$print_prediction <- renderPrint({\n # + 1 as class start from 0, complicated coversion in case of factor\n lookup_table[as.numeric(as.character(prediction())) + 1]\n})\n\n\n# .. download prediction ====\ny_download <- reactive({\n pred <- as.numeric(as.character(prediction())) + 1\n lookup_table[pred]\n})\noutput$download <- downloadHandler(\n filename = function() {\"prediction.txt\"},\n content = function(file) {\n write(y_download(), file)\n }\n)\n\n"
},
{
"alpha_fraction": 0.6034858226776123,
"alphanum_fraction": 0.6175480484962463,
"avg_line_length": 28.68235206604004,
"blob_id": "1b88a561ffb59ee075e45259c08b0be3138fda51",
"content_id": "59a4bbe21a62073b84c4320e42950f5d11e68923",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 5049,
"license_type": "no_license",
"max_line_length": 120,
"num_lines": 170,
"path": "/machine-learning/multiclass_classification_tfidf_svm.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "# Only use well-defined medical specialties which have more than 150 samples \n# removing duplicates\n\nlibrary(e1071)\nlibrary(progress)\nsource(\"utilities.R\")\n\n# prepare data =================================================================\nspecialties <- c(\n \"Gastroenterology\", \"Obstetrics / Gynecology\", \"Cardiovascular / Pulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\ncols <- c(\"specialty\", \"note\")\nset.seed(1234)\ndat <- read_notes(\n \"data/mtsamples_multi_class.csv\", \n duplicate_rm = T,\n specialties = specialties,\n cols_keep = cols,\n id = TRUE,\n y_label = TRUE\n)\ntfidf <- tfidf_tm(dat$note)\ny <- as.factor(dat$y) # svm requires y to be factor\n\n\n# find best n_pca ==============================================================\n# result: keep first 20 - 50 principle components have the best accuracy.\n# We will use n_pca = 25 considering accuracy and speed.\n# run only if need pca\nX <- prcomp(tfidf)$x\n\nset.seed(1111)\nin_train <- caret::createDataPartition(y, p = 0.7, list = FALSE)\n\nX_train <- X[in_train,]\nX_test <- X[-in_train,]\n\ny_train <- y[in_train]\ny_test <- y[-in_train]\n\n\nsvm_pca <- function(n_pc, X_train, y_train, n_split){\n # Calculate the accuracy for number of principle components used in SVM\n #\n # n_pc: int, number of principle components to keep\n # X_train, y_train: trainig data\n # n_split: int, times of repeating train-validation split\n \n # split train and validation data\n X_train <- X_train[, 1:n_pc]\n \n # create 10 split index\n intrains <- caret::createDataPartition(\n y_train, times = n_split, p = 0.6, list = FALSE\n )\n \n acc <- 0\n for (i in 1:n_split){\n intrain <- intrains[, i]\n Xtrain <- X_train[intrain,]\n ytrain <- y_train[intrain]\n Xval <- X_train[-intrain,]\n yval <- y_train[-intrain]\n mdl <- svm(Xtrain, ytrain)\n ypred <- predict(mdl, Xval)\n tb <- table(truth = yval, predict = ypred)\n accuracy <- sum(diag(tb)) / length(yval)\n acc <- acc + accuracy / n_split\n }\n \n return(c(n_pc = n_pc, acc = accuracy))\n}\n\n\nplot_n_pca <- function(n_pcas, X_train, y_train, n_split = 100){\n # Plot accuracy ~ n_pca to find best n_pca values\n #\n # n_pcas: int, vector of number of principle components to keep\n # n_split: times of repearing train-val split to get average accuracy\n \n n <- length(n_pcas)\n pc_acc <- data.frame(n_pc = rep(0, n), acc = rep(0, n))\n \n pb <- progress_bar$new()\n for (i in 1:n) {\n pb$tick()\n n_pc <- n_pcas[i]\n m <- svm_pca(n_pc, X_train, y_train, n_split)\n pc_acc[i, ] <- m\n \n plot(pc_acc$n_pc, pc_acc$acc, type = \"p\", xlim = c(0, max(n_pcas)))\n }\n \n # plot(pc_acc$n_pc, pc_acc$acc, type = \"p\")\n}\n\n\n# # find the best n_pca\nn_pcas <- c(2:50, 2 * (26:50), 5 * (21:45))\nplot_n_pca(n_pcas, X_train, y_train, 10)\n\n# one model tfidf ================================================================\n# results: poor recall and precision\n\nX <- tfidf\n\nset.seed(11111)\nin_train <- caret::createDataPartition(y, p = 0.7, list = FALSE)\n\nX_train <- X[in_train,]\nX_test <- X[-in_train,]\n\ny_train <- y[in_train]\ny_test <- y[-in_train]\n\nmdl <- svm(X_train, y_train, kernel = \"linear\")\ny_pred <- predict(mdl, X_test)\n\nclasses_x <- c(\n \"Gastroenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nclasses_y <- c(\n \"Gastro-\\nenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\n\nggplot_multiclass_svm_tfidf_recall <- plot_confusion_matrix(y_test, y_pred, classes_x, classes_y, type = \"recall\")\nggplot_multiclass_svm_tfidf_precision <- plot_confusion_matrix(y_test, y_pred, classes_x, classes_y, type = \"precision\")\n\naccuracy_svm_tfidf <- accuracy(y_test, y_pred)\n\n\n# one model pca ================================================================\n# visually pick 25 as the best n_pca to train model\n# results: exellent recall and precision\nX <- prcomp(tfidf)$x[, 1:25]\n\nset.seed(11111)\nin_train <- caret::createDataPartition(y, p = 0.7, list = FALSE)\n\nX_train <- X[in_train,]\nX_test <- X[-in_train,]\n\ny_train <- y[in_train]\ny_test <- y[-in_train]\n\nmdl <- svm(X_train, y_train, kernel = \"linear\")\ny_pred <- predict(mdl, X_test)\n\nclasses_x <- c(\n \"Gastroenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nclasses_y <- c(\n \"Gastro-\\nenterology\", \"Obstetrics\\nGynecology\", \"Cardiovascular\\nPulmonary\", \n \"Neurology\", \"Urology\", \"Orthopedic\"\n)\nggplot_multiclass_svm_pca_recall <- plot_confusion_matrix(y_test, y_pred, classes_x, classes_y)\nggplot_multiclass_svm_pca_precision <- plot_confusion_matrix(y_test, y_pred, classes_x, classes_y, type = \"precision\")\n\naccuracy_svm_pca <- accuracy(y_test, y_pred)\n\n\n# save for shiny ===============================================================\nsave(ggplot_multiclass_svm_pca_recall, ggplot_multiclass_svm_pca_precision,\n ggplot_multiclass_svm_tfidf_recall, ggplot_multiclass_svm_tfidf_precision,\n accuracy_svm_tfidf, accuracy_svm_pca,\n file = \"shiny-apps/RData/ggplot_multiclass_svm.RData\")\n\n\n\n"
},
{
"alpha_fraction": 0.7982456088066101,
"alphanum_fraction": 0.7982456088066101,
"avg_line_length": 113,
"blob_id": "a992bf23a2f02f8abd8194b949ae7c83dac08b86",
"content_id": "def1341ff67ec7f7ff0581eeb6e8c73d368a674a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 114,
"license_type": "no_license",
"max_line_length": 113,
"num_lines": 1,
"path": "/README.md",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "This repo houses the codes that generate the Shiny Web application at https://gl-li.shinyapps.io/Clinical_notes/.\n"
},
{
"alpha_fraction": 0.4505196511745453,
"alphanum_fraction": 0.4667871594429016,
"avg_line_length": 34.99187088012695,
"blob_id": "5e1ffc03e1c2425aa4f1a2d3972ac58a5114a4e3",
"content_id": "570d4fcb8052abff17148c602607832d274b953b",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 4426,
"license_type": "no_license",
"max_line_length": 72,
"num_lines": 123,
"path": "/shiny-apps/controllers/classification_multiclass.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "output$multiclass_1 <- renderPlot(\n {\n if (input$cm_method_1 == \"svm + tfidf\"){\n g <- get(paste0(\n \"ggplot_multiclass_svm_tfidf_\", input$cm_type_1\n ))\n } else if (input$cm_method_1 == \"svm + tfidf + pca\"){\n g <- get(paste0(\n \"ggplot_multiclass_svm_pca_\", input$cm_type_1\n ))\n } else if (input$cm_method_1 == \"xgboost + tfidf\"){\n g <- get(paste0(\n \"ggplot_multiclass_xgb_tfidf_\", input$cm_type_1\n ))\n } else if (input$cm_method_1 == \"xgboost + tfidf + pca\"){\n g <- get(paste0(\n \"ggplot_multiclass_xgb_pca_\", input$cm_type_1\n ))\n } else if (input$cm_method_1 == \"neural network + tfidf\"){\n g <- get(paste0(\n \"ggplot_multiclass_nn_tfidf_\", input$cm_type_1\n ))\n } else if (input$cm_method_1 == \"neural network + tfidf + pca\"){\n g <- get(paste0(\n \"ggplot_multiclass_nn_pca_\", input$cm_type_1\n ))\n } else if (input$cm_method_1 == \"neural network + embedding\"){\n g <- get(paste0(\n \"ggplot_multiclass_nn_embedding_\", input$cm_type_1\n ))\n }\n g\n }\n)\n\noutput$multiclass_2 <- renderPlot(\n {\n if (input$cm_method_2 == \"svm + tfidf\"){\n g <- get(paste0(\n \"ggplot_multiclass_svm_tfidf_\", input$cm_type_2\n ))\n } else if (input$cm_method_2 == \"svm + tfidf + pca\"){\n g <- get(paste0(\n \"ggplot_multiclass_svm_pca_\", input$cm_type_2\n ))\n } else if (input$cm_method_2 == \"xgboost + tfidf\"){\n g <- get(paste0(\n \"ggplot_multiclass_xgb_tfidf_\", input$cm_type_2\n ))\n } else if (input$cm_method_2 == \"xgboost + tfidf + pca\"){\n g <- get(paste0(\n \"ggplot_multiclass_xgb_pca_\", input$cm_type_2\n ))\n } else if (input$cm_method_2 == \"neural network + tfidf\"){\n g <- get(paste0(\n \"ggplot_multiclass_nn_tfidf_\", input$cm_type_2\n ))\n } else if (input$cm_method_2 == \"neural network + tfidf + pca\"){\n g <- get(paste0(\n \"ggplot_multiclass_nn_pca_\", input$cm_type_2\n ))\n } else if (input$cm_method_2 == \"neural network + embedding\"){\n g <- get(paste0(\n \"ggplot_multiclass_nn_embedding_\", input$cm_type_2\n ))\n }\n g\n }\n)\n\n\noutput$acc_1 <- renderText(\n {\n if (input$cm_method_1 == \"svm + tfidf\"){\n acc <- accuracy_svm_tfidf\n } else if (input$cm_method_1 == \"svm + tfidf + pca\"){\n acc <- accuracy_svm_pca\n } else if (input$cm_method_1 == \"xgboost + tfidf\"){\n acc <- accuracy_xgb_tfidf\n } else if (input$cm_method_1 == \"xgboost + tfidf + pca\"){\n acc <- accuracy_xgb_pca\n } else if (input$cm_method_1 == \"neural network + tfidf\"){\n acc <- accuracy_nn_tfidf\n } else if (input$cm_method_1 == \"neural network + tfidf + pca\"){\n acc <- accuracy_nn_pca\n } else if (input$cm_method_1 == \"neural network + embedding\"){\n acc <- accuracy_nn_embedding\n }\n \n acc <- round(acc * 100, 1)\n paste0(\"<p>\",\n \"Overall accuracy: \", \n \"<font size='5'>\", acc, \"%\", \"</font>\",\n \"</p>\")\n }\n)\n\n\noutput$acc_2 <- renderText(\n {\n if (input$cm_method_2 == \"svm + tfidf\"){\n acc <- accuracy_svm_tfidf\n } else if (input$cm_method_2 == \"svm + tfidf + pca\"){\n acc <- accuracy_svm_pca\n } else if (input$cm_method_2 == \"xgboost + tfidf\"){\n acc <- accuracy_xgb_tfidf\n } else if (input$cm_method_2 == \"xgboost + tfidf + pca\"){\n acc <- accuracy_xgb_pca\n } else if (input$cm_method_2 == \"neural network + tfidf\"){\n acc <- accuracy_nn_tfidf\n } else if (input$cm_method_2 == \"neural network + tfidf + pca\"){\n acc <- accuracy_nn_pca\n } else if (input$cm_method_2 == \"neural network + embedding\"){\n acc <- accuracy_nn_embedding\n }\n \n acc <- round(acc * 100, 1)\n paste0(\"<p>\",\n \"Overall accuracy: \", \n \"<font size='5'>\", acc, \"%\", \"</font>\",\n \"</p>\")\n }\n)"
},
{
"alpha_fraction": 0.5633165836334229,
"alphanum_fraction": 0.5693467259407043,
"avg_line_length": 32.72881317138672,
"blob_id": "f89fcd66551727ed1ec458ed753dbfe44e4db5f9",
"content_id": "2cd064e32eecf776ca482f48f07e11fdcfef8e1d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1990,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 59,
"path": "/python/utilities.py",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "# -*- coding: utf-8 -*-\n\"\"\"\nCommon functions\n\"\"\"\n\nimport pandas as pd\n\ndef read_notes(csv_file, \n specialties = None,\n y_label = None, \n cols_keep = [\"specialty\", \"note\"],\n randomize = True,\n duplicate_rm = True, \n clean = True,\n id = True):\n \"\"\"\n read clinical notes and add label y to the original data\n \n Arguments\n ---------\n csv_file: string, path to the the data file\n specialties: string vector, selected specialties such as\n c(\"Gastroenterology\", \"Neurology\")\n cols_keep: string, columns in the orignial data to keep, \"all\" to keep\n all columns.\n randomize: boolean, randomize row (sample) orders to break grouping\n y_label: boolean, if TRUE, add a class label 0, 1, 2, ... to each note\n duplicate_rm: boolean, remove rows if duplicated in column note\n clean: boolean, if TRUE add missing space after \".\", for example,\n \"abscess.PROCEDURE\".\n id: boolean, add id to each sample after removing duplicates\n\n Return:\n a data.frame\n \"\"\"\n \n dat = pd.read_csv(csv_file)\n if specialties is not None:\n dat = dat.query('specialty in @specialties') # @var\n if cols_keep != \"all\":\n dat = dat[cols_keep] # var select columns by variable\n if randomize:\n dat = dat.sample(frac=1)\n if clean:\n # missing space after \".\", for example \"abscess.PROCEDURE\"\n dat[\"note\"] = dat.note.str.replace(\".\", \". \")\n if y_label:\n dat[\"y\"] = dat.specialty.astype(\"category\").cat.codes\n if duplicate_rm:\n nrow_0 = dat.shape[0]\n dat = dat.drop_duplicates(subset=\"note\")\n nrow_1 = dat.shape[0]\n print(\"Deleted \" + str(nrow_0 - nrow_1) + \" rows with duplicated notes\")\n if id:\n dat[\"id\"] = range(dat.shape[0])\n columns = [\"id\"] + list(dat.columns[dat.columns != \"id\"])\n dat = dat[columns]\n \n return(dat)\n"
},
{
"alpha_fraction": 0.5523344278335571,
"alphanum_fraction": 0.5806506276130676,
"avg_line_length": 27.104265213012695,
"blob_id": "c26724f2f82842dbbfc84199e442341b1f6c7e76",
"content_id": "9952272737eea0d93c04c814ca5ec408e71d78a6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "R",
"length_bytes": 5933,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 211,
"path": "/machine-learning/gas_neu_uro_classification_tfidf_neural_netword.R",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "library(tensorflow)\nlibrary(keras)\nsource(\"utilities.R\")\n\n# prepare data =================================================================\nset.seed(12345)\ndat <- read_notes(\n \"data/amazon_medacy_mtsamples_gastr_neuro_urolo.csv\",\n duplicate_rm = T,\n cols_keep = \"all\",\n y_label = TRUE\n)\n\n# try out different nn construction ============================================\n# result:\n# simple two dense layer with drop out regularization works just fine\n\nnotes <- dat$note\n# initialize tokenizer specifing maximum words\ntk <- text_tokenizer(num_words = 3000)\n# update tk in place with a vector or list of documents\nfit_text_tokenizer(tk, notes)\n# convert the documents into a matrix of tfidf\nX <- texts_to_matrix(tk, notes, mode = \"tfidf\")\n# normalize the matrix so that length of each row vector is 1\nX <- X / sqrt(rowSums(X * X))\n\n# for multiclass, y should be converted to a matrix \ny_class <- dat$y\nn_class <- length(unique(y_class))\ny <- to_categorical(y_class, n_class)\n\n# split X and y into train and test\nset.seed(1234)\nin_train <- caret::createDataPartition(y_class, p = 0.7, list = FALSE)\nX_train <- X[in_train,]\ny_train <- y[in_train,] \nX_test <- X[-in_train,]\ny_test <- y[-in_train]\ny_test_class <- y_class[-in_train]\n\n\nmodel <- keras_model_sequential() %>% \n # input layer\n layer_dense(32, input_shape = dim(X_train)[2], activation = \"relu\") %>%\n layer_dropout(0.2) %>% \n # second layer\n layer_dense(units = 16, activation = \"relu\") %>% \n layer_dropout(0.2) %>% \n # output layer\n layer_dense(n_class, activation = \"softmax\")\n\n#summary(model)\n\n# compile, fit, and evaluate model in place\ncompile(model,\n loss = \"categorical_crossentropy\",\n optimizer = \"adam\",\n metrics = \"accuracy\"\n)\n\nfit(model,\n x = X_train, y = y_train,\n epochs = 20,\n batch_size = 32,\n validation_split = 0.3,\n verbose = 3\n)\n\ny_pred <- predict(model, X_test)\ny_pred_class <- predict_classes(model, X_test)\ntable(y_test_class, y_pred_class)\n\n\n# define function to get average metrics =====================================\nnn_metrics <- function(corpus, n_rep = 100, pca = FALSE, n_pca = 25, epoch = 20){\n # calculate average accuracy and f1 score out of 100 repeat\n # \n # Arguments:\n # corpus: string, \"note\" or \"amazon_me\"\n # pca: bool, if TRUE, process with pca\n # n_pca: int, number of pc to keep\n # epoch: int, epoch for model training, manual set after a few plot\n #\n # Return:\n # numeric vector \n #\n \n notes <- dat[, get(corpus)]\n # initialize tokenizer specifing maximum words\n tk <- text_tokenizer(num_words = 3000)\n # update tk in place with a vector or list of documents\n fit_text_tokenizer(tk, notes)\n # convert the documents into a matrix of tfidf\n X <- texts_to_matrix(tk, notes, mode = \"tfidf\")\n # normalize the matrix so that length of each row vector is 1\n X <- X / sqrt(rowSums(X * X))\n \n y_class <- dat$y\n n_class <- length(unique(y_class))\n y <- to_categorical(y_class, n_class)\n \n if (pca){\n X <- prcomp(X)$x[, 1:n_pca]\n }\n \n df_acc_f1 <- data.frame(\n acc = numeric(n_rep),\n f1_gas = numeric(n_rep),\n f1_neu = numeric(n_rep),\n f1_uro = numeric(n_rep)\n )\n set.seed(6789)\n in_trains <- caret::createDataPartition(y_class, times = n_rep, p = 0.7)\n for (i in 1:n_rep){\n cat(i)\n in_train <- in_trains[[i]]\n X_train <- X[in_train,]\n y_train <- y[in_train,] \n X_test <- X[-in_train,]\n y_test <- y[-in_train,]\n y_test_class <- y_class[-in_train]\n \n model <- keras_model_sequential() %>% \n # input layer\n layer_dense(32, input_shape = dim(X_train)[2], activation = \"relu\") %>%\n layer_dropout(0.2) %>% \n # second layer\n layer_dense(units = 16, activation = \"relu\") %>% \n layer_dropout(0.2) %>% \n # output layer\n layer_dense(n_class, activation = \"softmax\")\n \n #summary(model)\n \n # compile, fit, and evaluate model in place\n compile(model,\n loss = \"categorical_crossentropy\",\n optimizer = \"adam\",\n metrics = \"accuracy\"\n )\n \n fit(model,\n x = X_train, y = y_train,\n epochs = epoch,\n batch_size = 32,\n validation_split = 0.3,\n verbose = 3\n )\n \n y_pred <- predict(model, X_test)\n y_pred_class <- predict_classes(model, X_test)\n tb <- table(y_test_class, y_pred_class)\n acc <- sum(diag(tb)) / length(y_test_class)\n \n f1 <- function(tb, k){\n recall <- diag(tb)[k] / sum(y_test_class == k - 1)\n precision <- diag(tb)[k] / sum(y_pred_class == k - 1)\n f1 <- 2 * (recall * precision) / (recall + precision)\n }\n \n f1_gas <- f1(tb, 1)\n f1_neu <- f1(tb, 2)\n f1_uro <- f1(tb, 3)\n \n # assert correct calculation\n stopifnot(acc <= 1 & f1_gas <= 1 & f1_neu <= 1 & f1_uro <= 1)\n stopifnot(acc >= 0 & f1_gas >= 0 & f1_neu >= 0 & f1_uro >= 0)\n \n df_acc_f1[i, ] <- c(acc, f1_gas, f1_neu, f1_uro)\n }\n return(df_acc_f1)\n}\n\n# find the best n_pca ==========================================================\n# result:\n# For note, n_pca = 40 - 60 is best. choose 50\n# for amazon_me, 20- 40, choose 30\nn_pcas <- c(2:50, 2 * (26:50), 5 * (21:45))\nn_rep <- length(n_pcas)\nacc <- numeric(n_rep)\n\nfor (i in 1:n_rep){\n npca <- n_pcas[i]\n df_acc_f1 <- nn_metrics(\"amazon_me\", n_rep = 1, pca = TRUE, n_pca = npca, epoch = 40)\n acc[i] <- sapply(df_acc_f1, mean)[1]\n print(paste(npca, acc[i]))\n}\n\nplot(n_pcas[1:70], acc[1:70])\n\n\n\n# repeat 100 times to get average metrics ======================================\n\n# mean and standard deviation\ndf_acc_f1 <- nn_metrics(\"note\")\nsapply(df_acc_f1, mean)\nsapply(df_acc_f1, sd)\n\ndf_acc_f1 <- nn_metrics(\"amazon_me\")\nsapply(df_acc_f1, mean)\nsapply(df_acc_f1, sd)\n\ndf_acc_f1 <- nn_metrics(\"note\", pca = TRUE, n_pca = 50, epoch = 40)\nsapply(df_acc_f1, mean)\nsapply(df_acc_f1, sd)\n\ndf_acc_f1 <- nn_metrics(\"amazon_me\", pca = TRUE, n_pca = 30, epoch = 40)\nsapply(df_acc_f1, mean)\nsapply(df_acc_f1, sd)\n\n\n\n"
},
{
"alpha_fraction": 0.7614756226539612,
"alphanum_fraction": 0.7783722877502441,
"avg_line_length": 94.9459457397461,
"blob_id": "3fcddeff9116b6c3caf92a578bc3cf4ce4998970",
"content_id": "2288b41e23005386d11ef901fcd9a2673f92ff7c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "RMarkdown",
"length_bytes": 3551,
"license_type": "no_license",
"max_line_length": 620,
"num_lines": 37,
"path": "/shiny-apps/Rmd/medical_named_entity_extraction.Rmd",
"repo_name": "GL-Li/medical_notes",
"src_encoding": "UTF-8",
"text": "---\ntitle: \" \"\n---\n\nMedical named entities are specific terms that have medical meaning, such as diseases, drugs, symptoms, and tests. We will extract medical named entities from the clinical notes with tow methods: [Amazon Comprehend Medical](https://aws.amazon.com/comprehend/medical/), a commercial service provided by Amazon and Python package [medaCy](https://github.com/NLPatVCU/medaCy), a open source project developed by a team of researchers at Virginia Commonwealth University.\n\n### Amazon Comprehend Medical\nWith Amazon Comprehend Medical, a recognized medical entity is assigned to one of the categories of ANATOMY, MEDICAL_CONDITION, MEDICATION, PROTECTED_HEALTH_INFORMATION, and TEST_TREATMENT_PROCEDURE. It is also labeled with one of the types of ADDRESS, AGE, BRAND_NAME, DATE, DX_NAME, GENERIC_NAME, ID, NAME, PROCEDURE_NAME, PROFESSION, SYSTEM_ORGAN_SITE, TEST_NAME, and TREATMENT_NAME. An entity may also has a traits, though often the traits remain empty. One of the most useful trait is negate, which means the patient does not have what described by the entity text.\n\nHere is an example of a medical entity recognized by Comprehend Medical. It has an Id, the starting and ending position of the text in the string of clinical notes, the probability score of the recognition, the exact text extracted, category, type, and traits of the extracted text.\n\n```\n{'Id': 6,\n 'BeginOffset': 364,\n 'EndOffset': 381,\n 'Score': 0.9676284193992615,\n 'Text': 'sigmoid colectomy',\n 'Category': 'TEST_TREATMENT_PROCEDURE',\n 'Type': 'PROCEDURE_NAME',\n 'Traits': []}\n```\nMedical named entities of bulk clinical notes can be extracted easily with Python API ([code](https://github.com/GL-Li/clinical_notes/blob/master/python/extract_medical_named_entity_with_amazon_comprehend_medical.py)). But keep in mind that one entity costs one cent and typically a collection of clinical notes of size 1M characters costs you about $100. New users have 3-month free tier with 25000 entities quota per month. Be cautious, do not bankrupt yourself.\n\n\n### medaCy\nPython package medaCy is under development and currently focuses on the recognition of medications. It uses categories 'ADE', 'Dosage', 'Drug', 'Duration', 'Form', 'Frequency', 'Reason', 'Route', 'Strength' to describe the extracted entities ([code](https://github.com/GL-Li/clinical_notes/blob/master/python/extract_medical_named_entity_with_medacy.py)). The examples below shows three entities extracted from one clinical notes. The entity is in a simpler form than that extracted by Amazon Comprehend Medical, having only category, starting and ending position in the string of clinical notes, and the extracted text.\n\n```\n[('Drug', 1397, 1405, 'peroxide'),\n ('Drug', 2008, 2014, 'Vicryl'),\n ('Route', 2015, 2027, 'subcutaneous')]\n```\n \n### Compare Amazon Comprehend and medaCy\nIn the table below, we display the medical named entities extracted with the two methods from clinical notes in three specialties: gastroenterology, neurology, and urology. Amazon Comprehend Medical (amazon_me) extracts much more information than medaCy does. Obviously, medaCy (medacy_me) misses many medical entities other than medication using its currently available [light clinical notes model](https://github.com/NLPatVCU/medaCy/blob/master/examples/models/clinical_notes_model.md).\n \nFor comparison, we also listed top 10 words having the highest term frequency (top_tf) and term frequency inverse document frequency (top_tfidf) in the table. Common English stopwords have been removed from the calculation.\n\n"
}
] | 43 |
bw4sz/GoogleVideo
|
https://github.com/bw4sz/GoogleVideo
|
c8cb1fc920015f2418f82b127ed954a10aeee75d
|
245e63fc5fa09e9488995078057b00771e15baa2
|
e2e8433104c625a0508db240355cf125d9e5fcc1
|
refs/heads/master
| 2021-01-23T03:42:46.775557 | 2020-01-20T16:05:10 | 2020-01-20T16:05:10 | 86,109,607 | 3 | 2 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6640569567680359,
"alphanum_fraction": 0.6825622916221619,
"avg_line_length": 76.83333587646484,
"blob_id": "af01fdce343b4e952d4f2bfd8be4f24bad03745b",
"content_id": "7de0a4669354d6e768d9620eaa70fea8f9d5048c",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1405,
"license_type": "permissive",
"max_line_length": 185,
"num_lines": 18,
"path": "/VideoMeerkat/CommandArgs.py",
"repo_name": "bw4sz/GoogleVideo",
"src_encoding": "UTF-8",
"text": "import argparse\n\ndef CommandArgs():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--input\", help=\"path of single video\",type=str,default='Hummingbird.avi')\n parser.add_argument(\"--output\", help=\"output directory\",default=\"/Users/Ben/VideoMeerkat\")\n parser.add_argument(\"--draw\", help=\"'Draw' or 'enter' object size\",type=str,default='enter')\n parser.add_argument(\"--size\", help=\"Minimum size of contour\",default=0.01,type=float)\n parser.add_argument(\"--moglearning\", help=\"Speed of MOG background detector, lowering values are more sensitive to movement\",default=0.09,type=float) \n parser.add_argument(\"--mogvariance\", help=\"Variance in MOG to select background\",default=25,type=int) \n parser.add_argument(\"--crop\", help=\"Set region of interest?\",action='store_true')\n parser.add_argument(\"--draw_box\", help=\"Draw boxes to highlight motion'?\",action=\"store_true\")\n parser.add_argument(\"--show\", help=\"Show frames as you process\",action='store_true')\n parser.add_argument(\"--google_account\", help=\"Path to google service account .json file\",default=\"/Users/Ben/Dropbox/Google/MeerkatReader-9fbf10d1e30c.json\")\t \n parser.add_argument(\"--bucket\", help=\"Path to google service account .json file\",default=\"api-project-773889352370-ml\")\t \n \n args=parser.parse_args()\n return(args)\n "
},
{
"alpha_fraction": 0.509458601474762,
"alphanum_fraction": 0.5133724808692932,
"avg_line_length": 33.6363639831543,
"blob_id": "1eba5a7dafbdb5b346f5d7fbbbd8639e5b9d5af7",
"content_id": "db2ffe17630a3d6c4295c0ed4b1b2edfe3b1f40b",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1533,
"license_type": "permissive",
"max_line_length": 155,
"num_lines": 44,
"path": "/VideoMeerkat/VideoMeerkat.py",
"repo_name": "bw4sz/GoogleVideo",
"src_encoding": "UTF-8",
"text": "import cv2\nimport numpy as np\nimport os \nimport Video\nimport CommandArgs\nimport glob\n\nclass VideoMeerkat:\n def __init__(self):\n print(\"Welcome to VideoMeerkat\")\n \n def process_args(self):\n self.args=CommandArgs.CommandArgs()\n \n #get all videos in queue\n self.queue= []\n \n #Create Pool of Videos\n if not os.path.isfile(self.args.input):\n self.args.batch=True\n for (root, dirs, files) in os.walk(self.args.input):\n for files in files:\n fileupper=files.upper()\n if fileupper.endswith((\".TLV\",\".AVI\",\".MPG\",\".MP4\",\".MOD\",\".MTS\",\".WMV\",\".MOV\",\".MP2\",\".MPEG-4\",\".DTS\",\".VOB\",\".MJPEG\",\".M4V\",\".XBA\")):\n self.queue.append(os.path.join(root, files)) \n print(\"Added \" + str(files) + \" to queue\")\n else:\n self.queue=[self.args.input]\n self.args.batch=False\n \n if len(self.queue)==0:\n raise ValueError(\"No videos in the supplied folder. If videos exist, ensure that they can be read by standard video CODEC libraries.\")\n \n def run(self):\n for vid in self.queue:\n video_instance=Video.Video(vid,self.args)\n video_instance.analyze()\n video_instance.clip() \n video_instance.write()\n \nif __name__ == \"__main__\":\n MM=VideoMeerkat() \n MM.process_args() \n MM.run()\n \n "
},
{
"alpha_fraction": 0.546072244644165,
"alphanum_fraction": 0.556818962097168,
"avg_line_length": 36.85564422607422,
"blob_id": "12179137be0d692b02666ed64ad4239a60038dd1",
"content_id": "0537eb84c5df9f1476f1cc4397ef68897a1a8663",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 14423,
"license_type": "permissive",
"max_line_length": 170,
"num_lines": 381,
"path": "/VideoMeerkat/Video.py",
"repo_name": "bw4sz/GoogleVideo",
"src_encoding": "UTF-8",
"text": "import cv2\nimport sys\nimport subprocess\nimport math\nimport os\nimport numpy as np\nimport csv\nimport time\nimport Crop\nfrom operator import itemgetter\nfrom itertools import groupby\nfrom urlparse import urlparse\nfrom google.cloud import storage\nfrom Geometry import *\nfrom VideoClip import VideoClip\n\nfrom google.cloud.gapic.videointelligence.v1beta1 import enums\nfrom google.cloud.gapic.videointelligence.v1beta1 import (video_intelligence_service_client)\nfrom google.cloud.proto.videointelligence.v1beta1 import video_intelligence_pb2\n\nclass Video:\n def __init__(self,vid,args):\n \n #start time\n self.start_time=time.time()\n \n #store args from MotionMeerkat\n self.args=args\n self.args.video=vid\n \n #set descriptors\n self.frame_count=0\n \n #Box Annotations dictionary\n self.annotations={}\n \n ##Google Properties##\n #Google Credentials\n os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = args.google_account\n \n #Set Google Credentials and Properties\n os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = args.google_account\n \n ##Cloud Video Properties\n self.video_client = (video_intelligence_service_client.VideoIntelligenceServiceClient())\n self.features = [enums.Feature.LABEL_DETECTION]\n self.video_context = video_intelligence_pb2.VideoContext()\n self.video_context.stationary_camera = True\n self.video_context.label_detection_mode = video_intelligence_pb2.FRAME_MODE \n \n #Google Cloud Storage\n storage_client = storage.Client()\n \n #TODO check if bucket exists.\n self.bucket = storage_client.get_bucket(args.bucket) \n\n #create output directory\n normFP=os.path.normpath(vid)\n (filepath, filename)=os.path.split(normFP)\n (shortname, extension) = os.path.splitext(filename)\n (_,IDFL) = os.path.split(filepath) \n \n self.file_destination=os.path.join(self.args.output,shortname) \n print(\"Writing clips to \" + self.file_destination)\n\n if not os.path.exists(self.file_destination):\n os.makedirs(self.file_destination) \n \n ##VIDEO PROPERTIES\n #read video\n self.cap=cv2.VideoCapture(self.args.video)\n \n #set frame frate\n self.frame_rate=self.cap.get(5)\n \n #background subtraction\n self.background_instance=self.create_background() \n \n #Detector almost always returns first frame\n self.IS_FIRST_FRAME = True \n \n #Motion History, boolean state of Motion\n self.MotionHistory=[]\n \n def analyze(self):\n \n if self.args.show: \n cv2.namedWindow(\"Motion_Event\")\n #cv2.namedWindow(\"Background\") \n \n while True:\n\n #read frame\n ret,self.original_image=self.read_frame()\n \n if not ret:\n #end time\n self.end_time=time.time()\n break\n \n self.frame_count+=1\n \n #adapt settings of mogvariance to keep from running away\n self.adapt()\n \n #background subtraction\n self.background_apply()\n \n #skip the first frame after adding it to the background.\n if self.IS_FIRST_FRAME:\n print(\"Skipping first frame\")\n self.IS_FIRST_FRAME=False\n self.MotionHistory.append(False)\n continue\n \n #contour analysis\n self.countours=self.find_contour()\n \n #Next frame if no contours\n if len(self.contours) == 0 :\n self.MotionHistory.append(False)\n continue\n \n #bounding boxes\n bounding_boxes = self.cluster_bounding_boxes(self.contours)\n \n #Next frame if no bounding boxes\n if len(bounding_boxes) == 0 :\n self.MotionHistory.append(False)\n continue\n\n #minimum box size\n width = np.size(self.original_image, 1)\n height = np.size(self.original_image, 0)\n area = width * height\n \n #remove if smaller than min size\n remaining_bounding_box=[]\n \n for bounding_box in bounding_boxes:\n if area * self.args.size < bounding_box.h * bounding_box.w:\n remaining_bounding_box.append(bounding_box)\n \n #next frame is no remaining bounding boxes\n if len(remaining_bounding_box)==0:\n self.MotionHistory.append(False)\n continue\n \n self.annotations[self.frame_count] = remaining_bounding_box\n \n #store frame history\n self.MotionHistory.append(True)\n \n if self.args.show:\n for bounding_box in remaining_bounding_box:\n if self.args.draw_box: \n cv2.rectangle(self.original_image, (bounding_box.x, bounding_box.y), (bounding_box.x+bounding_box.w, bounding_box.y+bounding_box.h), (0,0,255), 2)\n cv2.imshow(\"Motion_Event\", self.original_image)\n cv2.waitKey(0)\n cv2.destroyAllWindows() \n \n def read_frame(self):\n \n #read frame\n ret,image=self.cap.read()\n \n if not ret:\n return((ret,image))\n \n #set crop settings if first frame\n if self.IS_FIRST_FRAME:\n if self.args.crop:\n self.roi=Crop.Crop(image,\"Crop\") \n if self.args.crop:\n cropped_image=image[self.roi[1]:self.roi[3], self.roi[0]:self.roi[2]]\n return((ret,cropped_image))\n else:\n return((ret,image))\n \n def create_background(self):\n \n self.fgbg = cv2.createBackgroundSubtractorMOG2(detectShadows=False,varThreshold=float(self.args.mogvariance))\n self.fgbg.setBackgroundRatio(0.95)\n \n def background_apply(self):\n \n #Apply Subtraction\n #self.image = self.fgbg.apply(self.original_image,learningRate=self.args.moglearning)\n self.image = self.fgbg.apply(self.original_image)\n \n #Erode to remove noise, dilate the areas to merge bounded objects\n kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE,(15,15))\n self.image= cv2.morphologyEx(self.image, cv2.MORPH_OPEN, kernel)\n \n def adapt(self): #Adapt the MOG sensitivity based on performance\n \n #If current frame is a multiple of the 1000 frames\n if self.frame_count % 1000 == 0: \n #get the percent of frames returned in the last 10 minutes\n if (sum([x < self.frame_count-1000 for x in self.annotations.keys()])/1000) > 0.05:\n \n #increase tolerance rate\n self.args.mogvariance+=5\n \n #add a ceiling\n if self.args.mogvariance > 120: \n self.args.mogvariance = 120\n print(\"Adapting to video conditions: increasing MOG variance tolerance to %d\" % self.args.mogvariance)\n\n def find_contour(self):\n _,self.contours,hierarchy = cv2.findContours(self.image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE )\n self.contours = [contour for contour in self.contours if cv2.contourArea(contour) > 50]\n \n def cluster_bounding_boxes(self, contours):\n bounding_boxes = []\n for i in range(len(contours)):\n x1,y1,w1,h1 = cv2.boundingRect(contours[i])\n\n parent_bounding_box = self.get_parent_bounding_box(bounding_boxes, i)\n if parent_bounding_box is None:\n parent_bounding_box = self.BoundingBox(Rect(x1, y1, w1, h1))\n parent_bounding_box.members.append(i)\n bounding_boxes.append(parent_bounding_box)\n\n for j in range(i+1, len(contours)):\n if self.get_parent_bounding_box(bounding_boxes, j) is None:\n x2,y2,w2,h2 = cv2.boundingRect(contours[j])\n rect = Rect(x2, y2, w2, h2)\n distance = parent_bounding_box.rect.distance_to_rect(rect)\n if distance < 100:\n parent_bounding_box.update_rect(self.extend_rectangle(parent_bounding_box.rect, rect))\n parent_bounding_box.members.append(j)\n return bounding_boxes\n\n def get_parent_bounding_box(self, bounding_boxes, index):\n for bounding_box in bounding_boxes:\n if index in bounding_box.members:\n return bounding_box\n return None\n\n def extend_rectangle(self, rect1, rect2):\n x = min(rect1.l_top.x, rect2.l_top.x)\n y = min(rect1.l_top.y, rect2.l_top.y)\n w = max(rect1.r_top.x, rect2.r_top.x) - x\n h = max(rect1.r_bot.y, rect2.r_bot.y) - y\n return Rect(x, y, w, h) \n \n class BoundingBox:\n def update_rect(self, rect):\n self.rect = rect\n self.x = rect.l_top.x\n self.y = rect.l_top.y\n self.w = rect.width\n self.h = rect.height\n self.time=None\n\n def __init__(self, rect):\n self.update_rect(rect)\n self.members = [] \n\n def clip(self):\n \n #find beginning and end segments\n #multiply frame number by frame rate to get timestamp \n clip_range=ClipLength(self.MotionHistory,self.frame_rate)\n \n ##Clip rules##\n \n #1) If two consecutive clips are within 20 seconds, combine.\n \n n=20\n\n #perform iteratively until no more concatanations\n flat_list = [item for sublist in clip_range for item in sublist]\n b = [abs(i - j) > n for i, j in zip(flat_list[:-1], flat_list[1:])]\n m = [i + 1 for i, j in enumerate(b) if j is True]\n m = [0] + m + [len(flat_list)]\n new_groups = [flat_list[i: j] for i, j in zip(m[:-1], m[1:])]\n \n rule1=[[min(x),max(x)] for x in new_groups]\n \n #2 If clip duration is less than 2 second, remove\n rule2=[]\n for clip in rule1:\n if clip[1]-clip[0] > 2:\n rule2.append(clip)\n \n #If no clips after rules\n if len(rule2)==0:\n print(\"No remaining clips\")\n return None\n \n #turn back class if needed\n if isinstance(rule2[0],float):\n rule2=[rule2]\n \n #Create clip class\n VideoClips=[]\n for index,clip_info in enumerate(rule2):\n cl=VideoClip(video_context=self.video_context,features=self.features,video_client=self.video_client)\n \n cl.bucket=self.bucket\n cl.original_path=self.args.video #video path on local machine\n cl.begin=clip_info[0] # Begin Time\n cl.end=clip_info[1] #End Time\n cl.frame_rate=self.frame_rate \n \n #add clip number and set GCS path\n vname,ext=os.path.splitext(os.path.basename(self.args.video))\n cl.local_path= self.file_destination + \"/\" + vname+\"_\"+str(index)+\".avi\"\n VideoClips.append(cl)\n \n #for each VideoClip, cut segment using FFMPEG, upload to GCS and annotate using cloud video intelligence\n self.clip_labels=[]\n for clip in VideoClips:\n clip.ffmpeg()\n clip.upload()\n clip.label()\n self.clip_labels.append(clip.parse())\n \n def write(self): \n \n #write parameter logs \n self.output_args=self.file_destination + \"/parameters.csv\"\n with open(self.output_args, 'wb') as f: \n writer = csv.writer(f,)\n writer.writerows(self.args.__dict__.items())\n \n #Total time\n self.total_min=round((self.end_time-self.start_time)/60,3)\n writer.writerow([\"Minutes\",self.total_min])\n \n #Frames in file\n writer.writerow([\"Total Frames\",self.frame_count])\n \n #Frames returned to file\n writer.writerow([\"Motion Events\",len(self.annotations)])\n \n #Hit rate\n len(self.annotations)\n writer.writerow([\"Return rate\",float(len(self.annotations)/self.frame_count)])\n \n #Frames per second\n writer.writerow([\"Frame processing rate\",round(float(self.frame_count)/(self.total_min*60),2)])\n \n #Write frame bounding boxes\n self.output_annotations=self.file_destination + \"/bounding_boxes.csv\"\n with open(self.output_annotations, 'wb') as f: \n writer = csv.writer(f)\n writer.writerow([\"Frame\",\"x\",\"y\",\"h\",\"w\"])\n for x in self.annotations.keys(): \n bboxes=self.annotations[x]\n for bbox in bboxes: \n writer.writerow([x,bbox.x,bbox.y,bbox.h,bbox.w])\n\n #Write clip annotations\n self.output_annotations=self.file_destination + \"/annotations.csv\"\n with open(self.output_annotations, 'wb') as f: \n writer = csv.writer(f)\n writer.writerow([\"Video\",\"Clip\",\"Label\",\"Clip_Minute\",\"Video_Minute\",\"Confidence\"])\n for clip in self.clip_labels: \n for line in clip:\n writer.writerow(line)\n \n###Helper Functions#####\n \ndef ClipLength(l,frame_rate):\n \n #get first position of Motion\n indexes = [next(group) for key, group in groupby(enumerate(l), key=itemgetter(1))]\n \n #number of frames with Motion\n len_indexes = [len(list(group)) for key, group in groupby(l)]\n \n clip_range=[]\n \n #Create time ranges by dividing frame counts by frame rate\n for position,length in enumerate(len_indexes):\n if indexes[position][1] == True:\n clip_range.append([float(indexes[position][0])/frame_rate,float(indexes[position][0]+length)/frame_rate])\n return clip_range\n"
},
{
"alpha_fraction": 0.45969945192337036,
"alphanum_fraction": 0.4722222089767456,
"avg_line_length": 33.16535568237305,
"blob_id": "62f40aa38e908b3fb61c337b0189c90c646b328b",
"content_id": "8c5c69a9dbca8936ce6993bea0059fd5b1c4c82b",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4392,
"license_type": "permissive",
"max_line_length": 131,
"num_lines": 127,
"path": "/PreviousVersion/DownloadVideo.py",
"repo_name": "bw4sz/GoogleVideo",
"src_encoding": "UTF-8",
"text": "import label\nimport os\nimport time\nimport cv2\nimport numpy as np\nimport urllib\n\nclass Video:\n def __init__(self,path,vidpath,keep,write,view):\n \n self.time = time.time() # start time\n self.path = path # url to video\n self.vidpath=vidpath # where to save video locally\n self.keep=keep # should video be deleted\n self.write=write #write an annotated video\n self.view=view #view output as it happens\n \n def label(self):\n self.labels=label.main(self.path)\n label.label_parse(self.labels)\n \n def download(self):\n #check if path exists\n #if not download file\n vidname=os.path.basename(self.path)\n print(\"Checking if %s exists\" %(vidname))\n self.local_file= self.vidpath +\"/\"+ vidname\n if not os.path.isfile(self.local_file):\n print(\"Downloading \" + str(self.path))\n \n #if google cloud path\n \n if self.path[0:3] == \"gs:\":\n \n #bucket name\n bucket_name=self.path.split(\"/\")[2]\n \n #file path\n f=self.path.split(\"/\")[3:]\n source_blob_name='/'.join(f)\n \n destination_file_name=self.local_file\n \n #download from gcp\n label.download_blob(bucket_name, source_blob_name, \n destination_file_name)\n \n else:\n #Any arbitrary public path \n urllib.urlretrieve(self.path, self.local_file)\n print (\"Download complete \")\n \n def show(self):\n \n #frame counter\n fcount=0\n\n if self.write:\n \n #load video\n cap = cv2.VideoCapture(self.local_file) \n \n #Get frame rate\n fr=cap.get(5)\n orig_image = cap.read()[1] \n \n #Get information about camera and image\n width = np.size(orig_image, 1)\n height = np.size(orig_image, 0)\n frame_size=(width, height) \n \n #create videowriter with annotated file name\n vidname=os.path.basename(self.path)\n self.annotated_file= self.vidpath + \"/annotated_\" + vidname \n out = cv2.VideoWriter(self.annotated_file,cv2.VideoWriter_fourcc('X','V','I',\"D\"),float(fr),frame_size) \n \n #play video\n cap = cv2.VideoCapture(self.local_file)\n \n while True:\n ret, frame = cap.read() \n \n fcount=fcount+1 # add frame count\n \n #check for end of video\n if not ret:\n break\n #get time, API returns in microseconds, opencv in milliseconds\n msec=cap.get(cv2.CAP_PROP_POS_MSEC)*1000 \n \n #which labels fall into this time\n labels_to_write=list()\n \n labelData = self.labels.label_annotations\n for label in labelData:\n for location in label.locations:\n segment = location.segment\n startTime = segment.start_time_offset\n endTime = segment.end_time_offset\n if msec > (int(startTime)-400000) and msec < (int(endTime)+400000):\n labels_to_write.append(str(label.description)) \n\n\n #write labels\n font = cv2.FONT_HERSHEY_SIMPLEX\n \n #position counter\n pcount=0\n for text in labels_to_write:\n cv2.putText(frame,text,(10,30 + 50 * pcount), font, 1,(255,0,0),2,cv2.LINE_AA) \n pcount=pcount+1\n \n if self.write:\n out.write(frame)\n \n if self.view:\n cv2.imshow('frame',frame) \n #show frame - hit q to exit frame\n if cv2.waitKey(1) & 0xFF == ord('q'):\n break\n \n cap.release()\n cv2.destroyAllWindows()\n \n def cleanup(self):\n if not self.keep:\n os.remove(self.local_file)\n \n \n \n \n \n "
},
{
"alpha_fraction": 0.6587575674057007,
"alphanum_fraction": 0.6639344096183777,
"avg_line_length": 35.492061614990234,
"blob_id": "82e1a974354932ad6e8a0211012a0404fe028983",
"content_id": "f05c339af35cddb7533eb77dce4a1a5b1096f2b1",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2318,
"license_type": "permissive",
"max_line_length": 120,
"num_lines": 63,
"path": "/PreviousVersion/label.py",
"repo_name": "bw4sz/GoogleVideo",
"src_encoding": "UTF-8",
"text": "import argparse\nimport urllib\nimport json\nfrom google.cloud import storage\nimport sys\nimport time\n\nfrom google.cloud.gapic.videointelligence.v1beta1 import enums\nfrom google.cloud.gapic.videointelligence.v1beta1 import (\n video_intelligence_service_client)\nfrom google.cloud.proto.videointelligence.v1beta1 import video_intelligence_pb2\n\ndef main(path):\n \"\"\" Detects labels given a GCS path. \"\"\"\n video_client = (video_intelligence_service_client.\n VideoIntelligenceServiceClient())\n features = [enums.Feature.LABEL_DETECTION]\n video_context = video_intelligence_pb2.VideoContext()\n video_context.stationary_camera = True\n video_context.label_detection_mode = video_intelligence_pb2.FRAME_MODE\n operation = video_client.annotate_video(path, features, video_context=video_context)\n print('\\nProcessing video for label annotations:')\n\n while not operation.done():\n sys.stdout.write('.')\n sys.stdout.flush()\n time.sleep(10)\n\n print('\\nFinished processing.')\n\n results = operation.result().annotation_results[0]\n\n return(results)\n\ndef label_parse(results):\n \n labelData = results.label_annotations\n print ('Video Annotations:')\n for label in labelData:\n print (label.description) \n for location in label.locations:\n segment = location.segment\n startTime = segment.start_time_offset\n endTime = segment.end_time_offset\n print (\" \" + str(startTime) + \", \" + str(endTime))\n \ndef download_blob(bucket_name, source_blob_name, destination_file_name):\n \"\"\"Downloads a blob from the bucket.\"\"\"\n storage_client = storage.Client()\n try:\n bucket = storage_client.get_bucket(bucket_name)\n blob = bucket.blob(source_blob_name)\n \n blob.download_to_filename(destination_file_name)\n \n print('Blob {} downloaded to {}.'.format(\n source_blob_name,\n destination_file_name)) \n except:\n print(\"User does not have access to that bucket. Trying public link:\")\n gcs_url = 'https://%(bucket)s.storage.googleapis.com/%(file)s' % {'bucket':bucket_name, 'file':source_blob_name}\n urllib.urlretrieve(gcs_url, destination_file_name)\n print (\"Download complete\") \n \n \n\n"
},
{
"alpha_fraction": 0.7403100728988647,
"alphanum_fraction": 0.7937984466552734,
"avg_line_length": 50.599998474121094,
"blob_id": "0ac1331152f70404b0c00086fd28505bbc366153",
"content_id": "179defc8340e4108a02b8f87fd4cb90e29339532",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1290,
"license_type": "permissive",
"max_line_length": 176,
"num_lines": 25,
"path": "/tests/tests_basic.py",
"repo_name": "bw4sz/GoogleVideo",
"src_encoding": "UTF-8",
"text": "import MotionMeerkat\nimport os\n\n#Run defaults\n# Serice account credentials\nos.environ['GOOGLE_APPLICATION_CREDENTIALS'] = \"C:/Users/Ben/Dropbox/Google/MeerkatReader-9fbf10d1e30c.json\"\n\n#Test 1\n#defaults, no download, video file supplied locally in git repo\nMotionMeerkat.MotionMeerkat(path='gs://api-project-773889352370-testing/Clips/jantest.mp4',view=False,keep=True,write=True,vidpath=\"C:/Users/Ben/Dropbox/GoogleCloud/\")\n\n#MotionMeerkat.MotionMeerkat(path='gs://api-project-773889352370-testing/Clips/1450_bbwo_female.ts',view=False,keep=True,write=True,vidpath=\"C:/Users/Ben/Dropbox/GoogleCloud/\")\n\n#Test 2\n#A new private file in my gcs bucket, reads the service account credentials, deletes file when done.\n#MotionMeerkat.MotionMeerkat(\"gs://api-project-773889352370-testing/Clips/shark.avi\",view=True,keep=True,write=True,vidpath=\"C:/Users/Ben/Dropbox/GoogleCloud/\")\n\n#MotionMeerkat.MotionMeerkat(\"gs://api-project-773889352370-testing/Hummingbirds/FH110_02.AVI\",write=True,vidpath=\"C:/Users/Ben/Dropbox/GoogleCloud\")\n\n#Test 3\n#A new public file in someone else's gcs bucket, deletes file when done.\n#MotionMeerkat.MotionMeerkat(\"gs://cloud-ml-sandbox/video/chicago.mp4\",write=True,vidpath=\"C:/Users/Ben/Dropbox/GoogleCloud\")\n\n#Test 4\n#A low quality file with several scene changes.\n"
},
{
"alpha_fraction": 0.8002645373344421,
"alphanum_fraction": 0.804894208908081,
"avg_line_length": 51.13793182373047,
"blob_id": "cb3afa4bb0a25f036047ad2a2e8d7ef58eea1e4c",
"content_id": "30209351a16df5cd219343eb75610f94e3bb09d6",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 1514,
"license_type": "permissive",
"max_line_length": 451,
"num_lines": 29,
"path": "/README.md",
"repo_name": "bw4sz/GoogleVideo",
"src_encoding": "UTF-8",
"text": "# MotionMeerkat in the cloud\n\nObserving biodiversity is expensive and time-consuming. Ecologists are increasingly turning to long duration video to locate, count and identify animals in natural environments. However, scientists currently waste hundreds of hours manually watching and annotating frames. Automated video analysis using computer vision will increase the efficiency of ecological sampling and allow scientists to understand the effect of global change on biodiversity.\n\nI am building a cloud platform for scientists to annotate animal presence in ecological videos. My project will utilize Google’s newly announced [Cloud Video Intelligence API](https://cloud.google.com/video-intelligence/) for animal detection and massive parallelization. This will dramatically improve the reach, capability and functionality of my current desktop software for an engaged user community. \n\nThese new tools use deep learing nueral networks to classify images contained in video sequences. \n\n\n\n## Installation\n\nThe following python packages need to be installed to run MotionMeerkat and the Google Cloud Video API\n\n## File Structure\n\nTo specify a file to analyze\n\n```\nMotionMeerkat.MotionMeerkat(<pathtofile>)\n```\n\nTo run general tests_basic\n\n```\n python tests_basic.py\n```\n\n** Please note that the Google Cloud Intelligence API is current in private beta ** (4/8/2017) [Apply for access](https://cloud.google.com/video-intelligence/)\n"
},
{
"alpha_fraction": 0.5161290168762207,
"alphanum_fraction": 0.5294466018676758,
"avg_line_length": 37.94186019897461,
"blob_id": "a03be7018eb9066d8b468ed74788ed4e45f6464a",
"content_id": "a3fbb5c2c6aa6e0d30ffbe062f7e559b23ea9f2b",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3379,
"license_type": "permissive",
"max_line_length": 116,
"num_lines": 86,
"path": "/VideoMeerkat/VideoClip.py",
"repo_name": "bw4sz/GoogleVideo",
"src_encoding": "UTF-8",
"text": "import sys\nimport time\nimport os\nfrom moviepy.video.io.ffmpeg_tools import ffmpeg_extract_subclip\n\nclass VideoClip:\n \n def __init__(self,video_context,features,video_client):\n \n self.video_context=video_context\n self.features=features\n self.video_client=video_client\n \n self.bucket=None #Bucket authenticated destination\n self.begin=None #Start Time\n self.end=None # End Time\n self.frame_rate=None #Frame Rate\n self.original_path=None #Full video\n self.local_path=None #The clipped path variable\n self.gcs_path=None #Desired GCS path\n\n def ffmpeg(self):\n ffmpeg_extract_subclip(self.original_path, self.begin, self.end, targetname=self.local_path)\n \n def upload(self):\n \n #Upload clip to google cloud\n #construct filename\n splitname=os.path.split(self.local_path)\n filename=splitname[len(splitname)-1]\n blob = self.bucket.blob(\"VideoMeerkat\" + \"/\" + filename.lower())\n \n self.gcs_path='gs://' + self.bucket.name +\"/\"+ blob.name\n \n if not blob.exists():\n blob.upload_from_filename(filename=self.local_path) \n #upload to gcp \n print(\"Uploaded \" + self.gcs_path)\n\n def label(self):\n \n operation = self.video_client.annotate_video(self.gcs_path, self.features, video_context=self.video_context)\n print('\\nProcessing video for label annotations:')\n \n while not operation.done():\n sys.stdout.write('.')\n sys.stdout.flush()\n time.sleep(15)\n \n print('\\nFinished processing.')\n \n self.result = operation.result().annotation_results[0]\n \n for i, label in enumerate(self.result.label_annotations):\n print('Label description: {}'.format(label.description))\n print('Locations:')\n \n for l, location in enumerate(label.locations):\n positions = 'Entire video'\n if (location.segment.start_time_offset != -1 or\n location.segment.end_time_offset != -1):\n positions = '{} to {}'.format(\n location.segment.start_time_offset / 1000000.0,\n location.segment.end_time_offset / 1000000.0)\n \n print('\\t{}: {}'.format(l, positions))\n \n print('\\n')\n\n def parse(self):\n self.parsed_labels=[]\n for label in self.result.label_annotations:\n for location in label.locations:\n \n #skip full video annotations\n if location.segment.end_time_offset == -1:\n continue \n \n #pass lables, convert time to minutes\n self.parsed_labels.append([self.original_path,\n self.local_path,\n str(label.description), \n location.segment.start_time_offset/1000000.0/60,\n location.segment.start_time_offset/1000000.0/60+self.begin/60,\n location.confidence])\n return self.parsed_labels \n \n \n"
},
{
"alpha_fraction": 0.6903385519981384,
"alphanum_fraction": 0.6903385519981384,
"avg_line_length": 38.064517974853516,
"blob_id": "28aee761f6577cf758e50b8368a3d1087edc224d",
"content_id": "76033b78641e2ee8787e135aef924e713388dd94",
"detected_licenses": [
"Apache-2.0"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1211,
"license_type": "permissive",
"max_line_length": 113,
"num_lines": 31,
"path": "/PreviousVersion/main.py",
"repo_name": "bw4sz/GoogleVideo",
"src_encoding": "UTF-8",
"text": "import argparse \nimport Video\n\ndef run(path,keep,write,view,vidpath=\"\"):\n\n #create instance\n video_instance=Video.Video(path=path,vidpath=vidpath,keep=keep,write=write,view=view)\n\n #send to google for labels\n video_instance.label() \n \n #download file to play locally\n video_instance.download()\n \n #show video with annotations\n video_instance.show()\n \n #cleanup video staging file\n video_instance.cleanup()\n \n#run if called directly from command line\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('-gcs_uri', help='The local file directory to save annotated video') \n parser.add_argument('-keep', help='Should the downloaded file be kept after analysis?',action=\"store_true\") \n parser.add_argument('-write', help='Should a annotated video file be written',action=\"store_false\") \n parser.add_argument('-view', help='Show annotations within program video',action=\"store_true\") \n parser.add_argument('-vidpath', help='directory to save annotated video',default=\"\") \n \n args = parser.parse_args() \n run(path=args.gcs_uri,keep=args.keep,write=args.write,show=args.show,vidpath=args.vidpath)\n"
}
] | 9 |
MoodSherzad/Tillampad-Datalogi-KTH
|
https://github.com/MoodSherzad/Tillampad-Datalogi-KTH
|
7c52ec796e51f6d4c24ea1e48b0f087c8976bb25
|
471d8cee0975475cd5a1ae0145f04e017cbe62b2
|
3bfb867d4f86b7fe9fd9243af42eb47dbc415f53
|
refs/heads/master
| 2022-12-18T06:16:25.599724 | 2020-09-15T13:29:29 | 2020-09-15T13:29:29 | 170,544,520 | 0 | 1 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5778170228004456,
"alphanum_fraction": 0.5913503766059875,
"avg_line_length": 26.419355392456055,
"blob_id": "707917d75bf39ad58b5c489e44c39545ea0f361e",
"content_id": "9d03106a1ded15a91852dfbd42e55cbb81d2ace2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3428,
"license_type": "no_license",
"max_line_length": 88,
"num_lines": 124,
"path": "/Labbar/Lab 6/labsex.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "import timeit\n\nclass Song:\n def __init__(self, track_id, låt_id, key_namn, låt_titel):\n self.track_id = track_id\n self.låt_id = låt_id\n self.key_namn = key_namn\n self.låt_titel = låt_titel\n\n def __lt__(self, other):\n return self.track_id < other.track_id\n\ndef readfile(file_name):\n LIST_SONG = []\n DICT_SONG = {} # Dictionary syntax\n with open(file_name, encoding='utf-8') as data_set:\n for raw_row in data_set:\n row = raw_row.strip('\\n').split('<SEP>')\n song_object = Song(row[0], row[1], row[2], row[3])\n LIST_SONG.append(song_object)\n DICT_SONG[row[0]] = song_object\n return LIST_SONG, DICT_SONG\n\n\n\n'''\nTidskomplexitet O(n)\n'''\ndef linsok(song_list, key):\n # Tagen från föreläsning 3. Har en tidskomplexitet på O(n)\n for x in song_list:\n if key == x.track_id:\n return True\n return False\n\n'''\nquicksort och dess hjälpfunktioner togs från föreläsning 7.\nTidskomplexitet på O(n log(n)) i bästa fallet och O(n^2) i värsta fallet\n'''\n# Använd mergesort istället, finns i föreläsning 7\ndef quicksort(data):\n sista = len(data) - 1\n qsort(data, 0, sista)\n\ndef qsort(data, low, high):\n pivotindex = (low+high)//2\n # flytta pivot till kanten\n data[pivotindex], data[high] = data[high], data[pivotindex] \n \n # damerna först med avseende på pivotdata\n pivotmid = partitionera(data, low-1, high, data[high]) \n \n # flytta tillbaka pivot\n data[pivotmid], data[high] = data[high], data[pivotmid] \n \n if pivotmid-low > 1:\n qsort(data, low, pivotmid-1)\n if high-pivotmid > 1:\n qsort(data, pivotmid+1, high)\n\ndef partitionera(data, v, h, pivot):\n while True:\n v = v + 1\n while data[v] < pivot:\n v = v + 1\n h = h - 1\n while h != 0 and pivot < data[h]:\n h = h - 1\n data[v], data[h] = data[h], data[v]\n if v >= h: \n break\n data[v], data[h] = data[h], data[v]\n return v\n\n\n# Tidskomplexitet O(log n)\n\ndef binary_search(the_list, key):\n low = 0\n high = len(the_list)-1 # För att kunna komma det sista elementet\n found = False\n\n while low <= high and not found:\n middle = (low + high)//2\n if the_list[middle] == key:\n found = True\n else:\n if key < the_list[middle]:\n high = middle - 1\n else:\n low = middle + 1\n return found\n\n'''\nTidskomplexitet O(1)\n'''\ndef dictsok(song_dict, key):\n resultat = song_dict[key]\n\ndef main():\n\n filename = \"unique_tracks.txt\"\n \n lista, dictionary = readfile(filename)\n n = len(lista)\n print(\"Antal element =\", n)\n\n sista = lista[n-1]\n testkey = sista.track_id\n\n linjtid = timeit.timeit(stmt = lambda: linsok(lista, testkey), number = 1)\n print(\"Linjärsökningen tog\", round(linjtid, 4) , \"sekunder\")\n\n sorttid = timeit.timeit(stmt = lambda: quicksort(lista), number = 1)\n print(\"Det tog\", round(sorttid, 4), \"att sortera listan med hjälp av quicksort\")\n\n bintid = timeit.timeit(stmt = lambda: binary_search(lista, testkey), number = 10000)\n print(\"Binärsökningen tog\", round(bintid, 4) , \"sekunder\")\n\n dicttid = timeit.timeit(stmt = lambda: dictsok(dictionary, testkey), number = 10000)\n print(\"Uppslagning i dictionary tog\", round(dicttid, 4) , \"sekunder\")\n\nif __name__ == '__main__':\n main()"
},
{
"alpha_fraction": 0.599629282951355,
"alphanum_fraction": 0.6163113713264465,
"avg_line_length": 29,
"blob_id": "75f5288085c6b2835d547d5e4811643c41abc4ce",
"content_id": "b8e05a437840fe3e9ac43b344fba9d5818dbbbed",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1102,
"license_type": "no_license",
"max_line_length": 77,
"num_lines": 36,
"path": "/Labbar/Lab 6/Lista.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "class Song:\n def __init__(self, track_id, artistnamn, sångtitel, låtlängd, år):\n # artistid\tartistnamn\tsångtitel\tlåtlängd\tår (inläsnings strukturen)\n self.track_id = track_id\n self.artistnamn = artistnamn\n self.sångtitel = sångtitel\n self.låtlängd = låtlängd\n self.är = år\n\n\n\ndef readfile(file_name):\n LIST_SONG = []\n DICT_SONG = {} # Dictionary syntax\n with open(file_name, encoding='utf-8') as data_set:\n for raw_row in data_set:\n row = raw_row.strip('\\n').split('\\t')\n song_object = Song(row[0], row[1], row[2], row[3], row[4])\n LIST_SONG.append(song_object)\n DICT_SONG[row[3]] = song_object\n return LIST_SONG, DICT_SONG\n \ndef nstorsta2(x, n): #x är listan och n är vilket tal vi vill ha\n list1 = []\n for i in range(n):\n list1.append(x.pop(max(x)))\n print(list1[-1].låtlängd)\n\n\nfilename = \"testlistan.txt\" #behövs sen\nlista, dictionary = readfile(filename) #behövs sen\n\n\nnstorsta2(dictionary,3) \nnstorsta2(dictionary,2)\nnstorsta2(dictionary,1)"
},
{
"alpha_fraction": 0.5408768057823181,
"alphanum_fraction": 0.5479857921600342,
"avg_line_length": 27.627119064331055,
"blob_id": "41af9cae2dde45fbfee8adc020225c56438ac98a",
"content_id": "01af44062990bdbd99a90ebeeeaa6f29cd8866dd",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1709,
"license_type": "no_license",
"max_line_length": 125,
"num_lines": 59,
"path": "/Labbar/Lab 7/fel.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "class HashNode:\n def __init__(self, nyckel, värde):\n self.nyckel = nyckel\n self.värde = värde\n\n\nclass HashTabell:\n def __init_(self, storlek = 32):\n self._storlek = storlek\n self._tabell = [None] * self._storlek\n self._tilläg = 0\n\"\"\"\n def _getitem__(self, nyckel):\n if key in HashTabell:\n self.get(key)\n else\n KeyError\n\n\"\"\"\n def __getitem__(self, nyckel):\n try: \n self.hämta(key)\n except KeyError\n print(\"Nyckel finns ej\")\n\n def __putitem__(self, nyckel, value):\n self.put(key, value)\n \n \n\n\n\n\n\n \n \n \n def omformatera(self): # Done #Funktion som hjälper oss att göra våran Hashtabell större.\n self._table = self._table + [None] * self._size\n self._size = self._size * 2\n\n def hämta(self, nyckel1): #Funktion som hjälper oss att plocka ut de vi eftersöka från vår Hashtabell.\n index = self._hashfunc(nyckel1) #index i hashtabeln\n attempts = 0\n new_index = index - 1\n while attempts <= self._size: # så länge försök är mindre eller lika med storleken\n new_index = (new_index + 1) % self._size\n if self._table[new_index] is not None and self._table[new_index].key == key1:\n return self._table[new_index].value\n attempts += 1\n raise KeyError \n\n\n def _hashfunc(self,key): #Done #Vår egenskriven hashfunktion som returnerna våra \"key's\" till Hashtabellen.\n n = len(key)\n keySum = 0 \n for x in range(n):\n keySum = keySum + ord(key[x])\n return keySum%self.size)"
},
{
"alpha_fraction": 0.6860986351966858,
"alphanum_fraction": 0.6905829310417175,
"avg_line_length": 21,
"blob_id": "9d16f246f0e364586c0b941a613320f5585e6a88",
"content_id": "4efe927dcd8a4425a1d6a9b3fe5f84e945ab0c75",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 233,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 10,
"path": "/Labbar/Föreläsningar/Föreläsning 8.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "# Trädet är balanserad när höjdskillnaden är max 1 nivå\n# Trappa eller heap kan beskrivas som ett binärträd (ej sökträd)\n# pq.delMin()\n# pq.delMin()\n# pq.insert()\n# pq.exists()\n\n\ndef writechain(p):\n if p is not None: \n"
},
{
"alpha_fraction": 0.5462728142738342,
"alphanum_fraction": 0.5605815649032593,
"avg_line_length": 25.266666412353516,
"blob_id": "a3ac821e5d204ab4255d2c83aae3ddf80480d2e4",
"content_id": "eaa3ca7d24a65737cada8d239272682f4434bfb7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4348,
"license_type": "no_license",
"max_line_length": 124,
"num_lines": 165,
"path": "/Labbar/Lab 10/L9v2.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "from linkedQFile import *\nimport string\nimport sys\nq = LinkedQ()\np = LinkedQ()\natomLista = ['H', 'He', 'Li', 'Be', 'B', 'C', 'N', 'O', 'F', 'Ne', 'Na',\n 'Mg', 'Al', 'Si', 'P', 'S', 'Cl', 'Ar', 'K', 'Ca', 'Sc', 'Ti', 'V', 'Cr',\n 'Mn', 'Fe', 'Co', 'Ni', 'Cu', 'Zn', 'Ga', 'Ge', 'As', 'Se', 'Br', 'Kr', \n 'Rb', 'Sr', 'Y', 'Zr', 'Nb', 'Mo', 'Tc', 'Ru', 'Rh', 'Pd', 'Ag', 'Cd', \n 'In', 'Sn', 'Sb', 'Te', 'I', 'Xe', 'Cs', 'Ba', 'La', 'Ce', 'Pr', 'Nd', \n 'Pm', 'Sm', 'Eu', 'Gd', 'Tb', 'Dy', 'Ho', 'Er', 'Tm', 'Yb', 'Lu', 'Hf',\n 'Ta', 'W', 'Re', 'Os', 'Ir', 'Pt', 'Au', 'Hg', 'Tl', 'Pb', 'Bi', 'Po', \n 'At', 'Rn', 'Fr', 'Ra', 'Ac', 'Th', 'Pa', 'U', 'Np', 'Pu', 'Am', 'Cm',\n 'Bk', 'Cf', 'Es', 'Fm', 'Md', 'No', 'Lr', 'Rf', 'Db', 'Sg', 'Bh', 'Hs', \n\t'Mt', 'Ds', 'Rg', 'Cn', 'Fl', 'Lv']\n\nclass Syntaxfel(Exception):\n\tpass\n\ndef storeMolekyl2(molekyl):\n\tfor symbol in molekyl:\n\t\tq.enqueue(symbol)\n\treturn q\n\ndef readmol2():\n\treadgroup2()\n\tif q.isEmpty():\n\t\treturn\n\telif q.peek() is \")\":\n\t\tif p.isEmpty():\n\t\t\traise Syntaxfel(\"Felaktig gruppstart vid radslutet \") \n\t\treturn\n\telse:\n\t\treadmol2()\n\ndef readgroup2():\n\tif q.isEmpty():\n\t\traise Syntaxfel(\"Felaktig gruppstart vid radslutet \")\n\n\tif q.peek().isdigit():\n\t\traise Syntaxfel(\"Felaktig gruppstart vid radslutet \")\n\t\t\n\tif q.peek().isalpha():\n\t\treadAtom2()\n\t\tif q.peek() is None:\n\t\t\treturn\n\t\tif q.peek().isdigit():\n\t\t\tnumber2()\n\t\treturn\n\n\telif q.peek() is \"(\": #PATANTESER\n\t\tp.enqueue(q.dequeue())\n\t\tif q.peek().isdigit():\n\t\t\traise Syntaxfel(\"Felaktig gruppstart vid radslutet \")\n\t\treadmol2()\n\t\tif not q.peek() is \")\":\n\t\t\traise Syntaxfel(\"Saknad högerparentes vid radslutet \")\n\t\t\t\t\n\t\tif q.isEmpty():\n\t\t\traise Syntaxfel(\"Saknad siffra vid radslutet \")\n\t\telse:\n\t\t\tp.dequeue()\n\t\t\tq.dequeue() #PARANTESER\n\t\t\tif q.isEmpty():\n\t\t\t\traise Syntaxfel(\"Saknad siffra vid radslutet \")\n\t\t\tnumber2()\t\t\t\n\telse:\n\t\traise Syntaxfel(\"Felaktig gruppstart vid radslutet \")\n\ndef readAtom2():\n\tif q.peek().isupper():\n\t\tatom = q.dequeue()\n\t\t#print(r, \"readAtom2 stor bokstav\")\n\telse:\n\t\traise Syntaxfel(\"Saknad stor bokstav vid radslutet \")\n\tif not q.peek() is None:\n\t\tif q.peek().islower():\n\t\t\tatom = atom + q.dequeue()\n\tif atom in atomLista:\n\t\treturn\n\telse:\n\t\traise Syntaxfel(\"Okänd atom vid radslutet \")\n\ndef number2(): #FIXAD DELUX\n if q.peek().isdigit():\n if q.peek() == \"0\":\n q.dequeue()\n raise Syntaxfel(\"För litet tal vid radslutet \")\n elif q.peek() == \"1\":\n try:\n if q.peekNext().isdigit():\n while q.peek().isdigit():\n q.dequeue()\n break\n else:\n #q.dequeue()\n raise Syntaxfel(\"För litet tal vid radslutet \")\n except:\n q.dequeue()\n raise Syntaxfel(\"För litet tal vid radslutet \")\n \n while q.peek() != None:\n if q.peek().isdigit():\n q.dequeue()\n else:\n return\n else:\n raise Syntaxfel(\"Saknad siffra vid radslutet \")\n\ndef firstError2(): #FIXAD DELUX\n\tnotDequeue = \"\"\n\twhile not q.isEmpty():\n\t\tnotDequeue = notDequeue + q.dequeue()\n\treturn notDequeue\n\ndef readFormel2(molekyl): #FIXAD halvt\n\tmolekyl = molekyl.strip()\n\tstoreMolekyl2(molekyl)\n\ttry:\n\t\treadmol2()\n\t\tif p.isEmpty is False:\n\t\t\traise Syntaxfel(\"Saknad högerparentes vid radslutet \")\n\t\treturn \"Formeln är syntaktiskt korrekt\"\n\texcept Syntaxfel as error:\n\t\treturn str(error) + firstError2()\n\"\"\"\ndef main():\n for i in [\"Na\", \"H2O\", \"Si(C3(COOH)2)4(H2O)7\", \"Na332\",\"C(Xx4)5\",\"C(OH4)C\",\"C(OH4C\",\"H2O)Fe\", \"H02C\", \"Nacl\",\"(Cl)2)3\"]:\n output = readFormel2(i)\n print(output)\n\"\"\"\n\n\ndef main5():\n\tkattisInput = sys.stdin.readline().strip() # väntar input\n\tif kattisInput != \"#\": # hashtag är en stoppkolss\n\t\toutput = readFormel2(kattisInput)\n\t\tfirstError2()\n\t\tprint(output)\n\t\tq.Empty() #måste rensa känkade listan\n\t\tp.Empty() #måste rensa känkade listan\n\t\tmain()\n\ndef main1():\n\tfor molekyl in [\"Es(W177Pm3Am8AmHo\", \"(98(Sg)G(1ScU\"]:\n\t\tif not molekyl is \"#\":\n\t\t\t\n\t\t\toutput = readFormel2(molekyl)\n\t\t\tfirstError2()\n\t\t\tprint(output)\n\ndef main2(x):\n\tq.Empty()\n\tp.Empty()\n\toutput = readFormel2(x)\n\tfirstError2()\n\treturn output\n\nif __name__ == '__main__':\n main()\n\n\n\n\n# Länk till godkänd kattis: https://kth.kattis.com/submissions/4000040"
},
{
"alpha_fraction": 0.5138964653015137,
"alphanum_fraction": 0.5215258598327637,
"avg_line_length": 37.20833206176758,
"blob_id": "3f26158b12ef800060bd50a34e7cb0c1b352da3f",
"content_id": "339985d9a804204ede74cb827f6a8f78297ba7ff",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1854,
"license_type": "no_license",
"max_line_length": 98,
"num_lines": 48,
"path": "/Labbar/Lab 4/bfs2.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "from LinkedQfile import LinkedQ\nfrom bintreeFile import Bintree\nsvenska = Bintree() # Skapar ett binärt sökträd för de svenska orden\ngamla = Bintree() # Skapar ett binärt sökträd för de gamla orden\n\n\nwith open(\"word3.txt\", \"r\", encoding=\"utf-8\") as svenskfil:\n for rad in svenskfil:\n ordet = rad.strip() \n if ordet in svenska:\n pass\n else:\n svenska.put(ordet) \n\n\ndef makechildren(nod, slutord, q):\n for i in range(len(nod)): # for loop som går igenom rang med ordets storlek ex [0,1,2]\n for letter in \"abcdefghijklmnopqrstuvwxyzåäö\": # for loop som går igenom varje bokstav\n if nod[i] is letter:\n pass\n else:\n if i == 0:\n new_word = letter + nod[1] + nod [2] #nya ordet när vi byter första bokstaven\n elif i == 1:\n new_word = nod[0] + letter + nod[2] #nya ordet när vi byter andra bokstaven\n elif i == 2:\n new_word = nod[0] + nod[1] + letter #nya ordet när vi byter tredje bokstaven\n if new_word in svenska and new_word not in gamla:\n if new_word == slutord:\n return True \n else:\n q.enqueue(new_word)\n gamla.put(new_word)\n\ndef main():\n start = input(\"Ange startordet: \")\n slut = input(\"Ange slutordet: \")\n queue = LinkedQ()\n queue.enqueue(start)\n while not queue.isEmpty():\n nod = queue.dequeue()\n if makechildren(nod, slut, queue) is True:\n print(\"Det finns en väg till\", slut)\n break\n elif queue.isEmpty() is True:\n print(\"Det finns ingen väg till\", slut)\n\nmain()\n\n"
},
{
"alpha_fraction": 0.6094420552253723,
"alphanum_fraction": 0.6309012770652771,
"avg_line_length": 29.39130401611328,
"blob_id": "9dc5c05ae6b84944eb187b72d5175d2be3890611",
"content_id": "d6ebbdf4c89e1e75f32eb5bdf45a9ceece1f6dab",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 705,
"license_type": "no_license",
"max_line_length": 102,
"num_lines": 23,
"path": "/Labbar/Lab 7/testtest.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "#print(ord(\"hej\"))\n\n\n\n\"\"\"\ndef hashfunc(key): #Vår egenskriven hashfunktion som returnerna våra \"key's\" till Hashtabellen.\n key1 = key[::-1]\n print(key1)\n hash_sum = 0\n for index,char in enumerate(key1):\n hash_sum = hash_sum + 31**(index+1) * ord(char)\n print(hash_sum)\n print(hash_sum%16)\n\"\"\"\ndef hashfunc(key): #Vår egenskriven hashfunktion som returnerna våra \"key's\" till Hashtabellen.\n n = len(key)\n keySum = 0 \n for x in range(n):\n keySum = keySum + ord(key[x]) #tar ascii värdet av varje bokstav och plussar ihop dem\n print(keySum)\n print (keySum%11) # modulus 11 men vi kör mudulus self.size i huvudfilen\n\nhashfunc(\"hejj\")\n"
},
{
"alpha_fraction": 0.508152186870575,
"alphanum_fraction": 0.5113224387168884,
"avg_line_length": 23.82022476196289,
"blob_id": "5101cfd4eabd2661bf1105838c2db31bdac64ddb",
"content_id": "52ddf502ed4e1786074dc888e82aeece776fcc5d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2227,
"license_type": "no_license",
"max_line_length": 70,
"num_lines": 89,
"path": "/Labbar/Lab 8/L8.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "from linkedQFile import *\n\nclass Syntaxfel(Exception):\n pass\n\n# Funktion molecule ska lagra alla symboler i en länkad lista\ndef molecule(indata):\n q = LinkedQ()\n for i in indata:\n q.enqueue(i)\n return q\n\n# Funktion uppercase kontrollerar ifall bokstaven är en stor bokstav\ndef uppercase(q):\n if q.peek().isupper():\n q.dequeue()\n else:\n raise Syntaxfel(\"En stor bokstav saknas!\")\n\n# Funktion lowercase kontrol\ndef lowercase(q):\n if q.peek().islower():\n q.dequeue()\n number(q)\n\n\n\"\"\"def number(q):\n if q.peek() is not None:\n if q.peek().isdigit():\n if q.peek() not in [\"1\",\"0\"]:\n q.dequeue()\n else:\n raise Syntaxfel(\"Siffran måste vara större än 1!\")\n else:\n raise Syntaxfel(\"SyntaxFel\")\n\n \"\"\"\n# kollar ifall siffer används rätt\ndef number(q):\n if q.peek() is not None:\n if q.peek().isdigit():\n\n if q.peek() is \"0\":\n raise Syntaxfel(\"får ej börja med noll\")\n elif q.peek() is \"1\":\n q.dequeue()\n if q.peek() is None:\n raise Syntaxfel(\"Siffran måste vara större än 1!\")\n\n else:\n while q.peek() is not None:\n if q.peek().isdigit():\n q.dequeue()\n pass\n else:\n raise Syntaxfel(\"SyntaxFel\")\n elif q.peek().isdigit():\n q.dequeue()\n number(q)\n else:\n raise Syntaxfel(\"Siffran måste vara större än 1\")\n else:\n raise Syntaxfel(\"SyntaxFel\")\n\n# kollar om molekylen har rätt syntax\ndef syntax_control(indata):\n q = molecule(indata)\n try:\n uppercase(q)\n lowercase(q)\n number(q)\n except Syntaxfel as fel:\n return str(fel)\n return \"Formeln följer korrekt syntax!\"\n\ndef main():\n indata = input(\"Skriv en molekyl: \").strip() # kolla om den är tom\n resultat = syntax_control(indata)\n print(resultat)\n \nif __name__ == '__main__':\n main()\n\n\n \n\n# Godkänd av Caroline Yu\n\n# Tjena Alexander och Julia"
},
{
"alpha_fraction": 0.5479219555854797,
"alphanum_fraction": 0.5479219555854797,
"avg_line_length": 24.586956024169922,
"blob_id": "b478b4db6f924c9acf74455e98bc0246ec6b60b8",
"content_id": "5c0cb7b21031c4c396f7c4870b6c9fffc8611bcf",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1187,
"license_type": "no_license",
"max_line_length": 103,
"num_lines": 46,
"path": "/Labbar/Lab 9/test/linkedQFile.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "class Node:\n def __init__(self, value):\n self.value = value\n self.next = None\n\n\nclass LinkedQ:\n def __init__(self):\n self.first = None\n self.last = None\n\n def enqueue(self, x):\n temp = Node(x)\n\n if self.first is None: # fix för första elementet\n self.first = temp\n self.last = temp\n else:\n self.last.next = temp # den sista nodens pekare ändras från sig själv till den nya noden\n self.last = temp # den nya noden placeras sist\n\n def dequeue(self):\n x = self.first.value # ettan sparas till en variabel\n self.first = self.first.next # tvåan flyttas fram och blir ettan\n return x\n\n def isEmpty(self): # kollar om listan är tom\n return self.first is None # is snabbare än ==\n\n def Empty(self):\n self.first = None\n self.last = None\n\n def peek(self):\n\n if self.isEmpty() is False:\n return self.first.value\n else:\n return None\n \n def peekNext(self):\n\n if self.isEmpty() is False:\n return self.first.next.value\n else:\n return None\n\n\n"
},
{
"alpha_fraction": 0.554770290851593,
"alphanum_fraction": 0.554770290851593,
"avg_line_length": 29.321428298950195,
"blob_id": "bc696e1e1b2f3764ad2def9dfe8e30c6ff2ef295",
"content_id": "27607842e34a6919131e48ecf4ed4affed2a0af6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 857,
"license_type": "no_license",
"max_line_length": 105,
"num_lines": 28,
"path": "/Labbar/Lab 4/LinkedQfile.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "class Node:\n def __init__(self, value):\n self.value = value\n self.next = None\n\n\nclass LinkedQ:\n def __init__(self):\n self.__first = None\n self.__last = None\n\n def enqueue(self, x):\n temp = Node(x)\n\n if self.__first is None: # fix för första elementet\n self.__first = temp\n self.__last = temp\n else:\n self.__last.next = temp # den sista nodens pekare ändras från sig själv till den nya noden\n self.__last = temp # den nya noden placeras sist\n\n def dequeue(self):\n x = self.__first.value # ettan sparas till en variabel\n self.__first = self.__first.next # tvåan flyttas fram och blir ettan\n return x\n\n def isEmpty(self): # kollar om listan är tom\n return self.__first is None # is snabbare än ==\n"
},
{
"alpha_fraction": 0.47999998927116394,
"alphanum_fraction": 0.49047619104385376,
"avg_line_length": 30.84848403930664,
"blob_id": "1719e9bd0fb908417f2ba91890ab52356b012930",
"content_id": "beaf457d8874d17d36e75da949699e0ecdb09bdb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1053,
"license_type": "no_license",
"max_line_length": 66,
"num_lines": 33,
"path": "/Labbar/Lab 4/bfs.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "from bintreeFile import Bintree\nsvenska = Bintree()\nwith open(\"word3.txt\", \"r\", encoding = \"utf-8\") as svenskfil:\n for rad in svenskfil:\n ordet = rad.strip() \n if ordet in svenska:\n pass\n else:\n svenska.put(ordet) \n\ndef makechildren(startord):\n gamla = Bintree()\n for i in range(len(startord)):\n for letter in \"abcdefghijklmnopqrstuvwxyzåäö\":\n if startord[i] is letter:\n pass\n else:\n if i == 0:\n new_word = letter + startord[1] + startord [2]\n elif i == 1:\n new_word = startord[0] + letter + startord[2]\n elif i == 2:\n new_word = startord[0] + startord[1] + letter\n if new_word in svenska and new_word not in gamla:\n gamla.put(new_word)\n print(new_word)\n\ndef main():\n startord = input(\"Ange startord: \")\n makechildren(startord)\n\nif __name__ == '__main__':\n main()"
},
{
"alpha_fraction": 0.45245853066444397,
"alphanum_fraction": 0.5470971465110779,
"avg_line_length": 32.58706283569336,
"blob_id": "e940481d2760bf65b176935e7fef90525b8c285b",
"content_id": "b33b07395fcfd20895dc6647a637b4fe8374e7d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 6766,
"license_type": "no_license",
"max_line_length": 148,
"num_lines": 201,
"path": "/Labbar/Lab 10/Lab10.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "from linkedQFile import *\nimport sys\nimport string\nfrom molgrafik import *\nfrom L9v2 import *\n#from hashtest import *\nq = LinkedQ()\nparantes = LinkedQ()\natomLista = ['H', 'He', 'Li', 'Be', 'B', 'C', 'N', 'O', 'F', 'Ne', 'Na',\n 'Mg', 'Al', 'Si', 'P', 'S', 'Cl', 'Ar', 'K', 'Ca', 'Sc', 'Ti', 'V', 'Cr',\n 'Mn', 'Fe', 'Co', 'Ni', 'Cu', 'Zn', 'Ga', 'Ge', 'As', 'Se', 'Br', 'Kr', \n 'Rb', 'Sr', 'Y', 'Zr', 'Nb', 'Mo', 'Tc', 'Ru', 'Rh', 'Pd', 'Ag', 'Cd', \n 'In', 'Sn', 'Sb', 'Te', 'I', 'Xe', 'Cs', 'Ba', 'La', 'Ce', 'Pr', 'Nd', \n 'Pm', 'Sm', 'Eu', 'Gd', 'Tb', 'Dy', 'Ho', 'Er', 'Tm', 'Yb', 'Lu', 'Hf',\n 'Ta', 'W', 'Re', 'Os', 'Ir', 'Pt', 'Au', 'Hg', 'Tl', 'Pb', 'Bi', 'Po', \n 'At', 'Rn', 'Fr', 'Ra', 'Ac', 'Th', 'Pa', 'U', 'Np', 'Pu', 'Am', 'Cm',\n 'Bk', 'Cf', 'Es', 'Fm', 'Md', 'No', 'Lr', 'Rf', 'Db', 'Sg', 'Bh', 'Hs', \n\t'Mt', 'Ds', 'Rg', 'Cn', 'Fl', 'Lv']\n\n<<<<<<< HEAD\ndef atom_info():\n \"\"\"Returnerar en dict med atomernas namn och vikt\"\"\"\n vikter = \"H 1.00794, He 4.002602, Li 6.941, Be 9.012182, B 10.811, C 12.0107, N 14.0067, O 15.9994, F 18.9984032, Ne 20.1797, \\\nNa 22.98976928, Mg 24.3050, Al 26.9815386, Si 28.0855, P 30.973762, S 32.065, Cl 35.453, K 39.0983, Ar 39.948, Ca 40.078, Sc 44.955912, \\\nTi 47.867, V 50.9415, Cr 51.9961, Mn 54.938045, Fe 55.845, Ni 58.6934, Co 58.933195, Cu 63.546, Zn 65.38, Ga 69.723, Ge 72.64, As 74.92160, \\\nSe 78.96, Br 79.904, Kr 83.798, Rb 85.4678, Sr 87.62, Y 88.90585, Zr 91.224, Nb 92.90638, Mo 95.96, Tc 98, Ru 101.07, Rh 102.90550, Pd 106.42, \\\nAg 107.8682, Cd 112.411, In 114.818, Sn 118.710, Sb 121.760, I 126.90447, Te 127.60, Xe 131.293, Cs 132.9054519, Ba 137.327, La 138.90547, \\\nCe 140.116, Pr 140.90765, Nd 144.242, Pm 145, Sm 150.36, Eu 151.964, Gd 157.25, Tb 158.92535, Dy 162.500, Ho 164.93032, Er 167.259, Tm 168.93421, \\\nYb 173.054, Lu 174.9668, Hf 178.49, Ta 180.94788, W 183.84, Re 186.207, Os 190.23, Ir 192.217, Pt 195.084, Au 196.966569, Hg 200.59, Tl 204.3833, \\\nPb 207.2, Bi 208.98040, Po 209, At 210, Rn 222, Fr 223, Ra 226, Ac 227, Pa 231.03588, Th 232.03806, Np 237, U 238.02891, Am 243, Pu 244, Cm 247, \\\nBk 247, Cf 251, Es 252, Fm 257, Md 258, No 259, Lr 262, Rf 265, Db 268, Hs 270, Sg 271, Bh 272, Mt 276, Rg 280, Ds 281, Cn 285\"\n vikt_info = vikter.split(\", \")\n\n vikt_info = atom_info()\n print(vikt_info)\n vikt_info = atom_info()\n atom_dict = {}\n for line in vikt_info:\n atom, vikt = line.split()\n atom_dict[atom] = vikt\n return atom_dict\n\n\n=======\nfellista = []\n>>>>>>> cd4bc5ca7b4cbcd5bd2d5c50c92ab70f54a50665\nclass Syntaxfel(Exception):\n\tpass\n\ndef storeMolekyl(molekyl):\n\tfor symbol in molekyl:\n\t\tq.enqueue(symbol)\n\treturn q\n\ndef readmol():\n mol_objekt = readgroup()\n\n if q.isEmpty():\n return mol_objekt\n elif q.peek() is \")\":\n if parantes.isEmpty():\n raise Syntaxfel(\"Felaktig gruppstart vid radslutet \") \n return mol_objekt\n else:\n mol_objekt.next = readmol()\n return mol_objekt\n\ndef readgroup():\n rut_objekt = Ruta()\n if q.isEmpty():\n raise Syntaxfel(\"Felaktig gruppstart vid radslutet \")\n\n if q.peek().isdigit():\n raise Syntaxfel(\"Felaktig gruppstart vid radslutet \")\n\n if q.peek().isalpha():\n rut_objekt.atom = readAtom()\n if q.peek() is None:\n return rut_objekt\n if q.peek().isdigit():\n antal = int(number())\n rut_objekt.num = antal\n \n\n elif q.peek() is \"(\": #PATANTESER\n parantes.enqueue(q.dequeue())\n if q.peek().isdigit():\n raise Syntaxfel(\"Felaktig gruppstart vid radslutet \")\n rut_objekt.down = readmol()\n if not q.peek() is \")\":\n raise Syntaxfel(\"Saknad högerparentes vid radslutet \")\n\n if q.isEmpty():\n raise Syntaxfel(\"Saknad siffra vid radslutet \")\n else:\n parantes.dequeue()\n q.dequeue() #PARANTESER\n if q.isEmpty():\n raise Syntaxfel(\"Saknad siffra vid radslutet \")\n antal = int(number())\n rut_objekt.num = antal\t\t\t\n else:\n raise Syntaxfel(\"Felaktig gruppstart vid radslutet \")\n return rut_objekt\n\ndef readAtom():\n\tif q.peek().isupper():\n\t\tatom = q.dequeue()\n\t\t#print(r, \"readAtom stor bokstav\")\n\telse:\n\t\traise Syntaxfel(\"Saknad stor bokstav vid radslutet \")\n\tif not q.peek() is None:\n\t\tif q.peek().islower():\n\t\t\tatom = atom + q.dequeue()\n\tif atom in atomLista:\n\t\treturn atom\n\telse:\n\t\traise Syntaxfel(\"Okänd atom vid radslutet \")\n\ndef number(): #FIXAD DELUX\n n = \"\"\n lista = []\n if q.peek().isdigit():\n if q.peek() == \"0\":\n lista.append(q.dequeue())\n raise Syntaxfel(\"För litet tal vid radslutet \")\n elif q.peek() == \"1\":\n try:\n if q.peekNext().isdigit():\n while q.peek().isdigit():\n lista.append(q.dequeue())\n\n break\n else:\n #q.dequeue()\n raise Syntaxfel(\"För litet tal vid radslutet \")\n except:\n lista.append(q.dequeue())\n raise Syntaxfel(\"För litet tal vid radslutet \")\n \n while q.peek() != None:\n \n if q.peek().isdigit():\n lista.append(q.dequeue())\n\n else:\n break\n for i in range(len(lista)):\n n = n + lista[i]\n\n return n\n else:\n raise Syntaxfel(\"Saknad siffra vid radslutet \")\n\ndef firstError(): #FIXAD DELUX\n\tnotDequeue = \"\"\n\twhile not q.isEmpty():\n\t\tnotDequeue = notDequeue + q.dequeue()\n\treturn notDequeue\n\ndef readFormel(molekyl): #FIXAD halvt\n molekyl = molekyl.strip()\n storeMolekyl(molekyl)\n try:\n mol_objekt = readmol()\n if parantes.isEmpty is False:\n raise Syntaxfel(\"Saknad högerparentes vid radslutet \")\n print(\"Formeln är syntaktiskt korrekt\")\n return mol_objekt\n except Syntaxfel as error:\n return str(error) + firstError()\n\ndef weight\n\n\ndef main():\n \n if input(\"Vill du testa egen molekyl Y/N: \").strip() in [\"Y\",\"y\"]:\n in_data = input(\"Skriv in molekylen du vill testa: \") # väntar input\n else:\n in_data = \"Si(C3(COOH)2)4(H2O)7\"\n atom_dict = atom_info()\n print(atom_dict)\n\n if readFormel2(in_data) == \"Formeln är syntaktiskt korrekt\":\n mg = Molgrafik()\n if in_data != \"q\": # hashtag är en stoppkolss\n p = readFormel(in_data)\n firstError()\n mg.show(p)\n q.Empty() #måste rensa känkade listan\n parantes.Empty() #måste rensa känkade listan\n main()\n else:\n print(\"Fel syntax testa igen\")\n main()\n \n\n\nif __name__ == '__main__':\n\tmain()\n\n"
},
{
"alpha_fraction": 0.6486956477165222,
"alphanum_fraction": 0.6747826337814331,
"avg_line_length": 29.210525512695312,
"blob_id": "c3dd6ee2fb99b623fd73f86681bf9b397a37cb56",
"content_id": "a1aeff3a8f6f2be9c7d46c24f792fcad69fa13dc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 579,
"license_type": "no_license",
"max_line_length": 94,
"num_lines": 19,
"path": "/Labbar/Lab 9/UnitestS1.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "import unittest\nfrom L9v2 import *\n\nclass TestStringMethods(unittest.TestCase):\n\n def test_upper1(self):\n self.assertEqual(readFormel(\"Na\"), 'Formeln är syntaktiskt korrekt')\n\n def test_upper2(self):\n self.assertEqual(readFormel(\"H2O\"), 'Formeln är syntaktiskt korrekt')\n \n def test_upper3(self):\n self.assertEqual(readFormel(\"Si(C3(COOH)2)4(H2O)7\"), 'Formeln är syntaktiskt korrekt')\n\n def test_upper4(self):\n self.assertEqual(readFormel(\"Na332\"), 'Formeln är syntaktiskt korrekt')\n\nif __name__ == '__main__':\n unittest.main()\n\n"
},
{
"alpha_fraction": 0.5501017570495605,
"alphanum_fraction": 0.5546795725822449,
"avg_line_length": 33.5,
"blob_id": "9437bd73f39c07419f3a1987926d9471446b4179",
"content_id": "58c463d05eccca6c75d7a068785d79c8ced9e297",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3940,
"license_type": "no_license",
"max_line_length": 139,
"num_lines": 114,
"path": "/Labbar/Lab 9/Formelkoll_test.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "from subprocess import Popen, PIPE\nfrom sys import argv\nfrom tkinter import filedialog\n\ndef get_hammer_distance(A: str, B: str):\n distance = 0\n error_str = \"\"\n if len(A) != len(B):\n distance += abs(len(A) - len(B))\n\n str_length = len(A) if len(A) < len(B) else len(B)\n for i in range(str_length):\n if A[i] != B[i]:\n distance += 1\n error_str += '^'\n else:\n error_str += ' '\n for i in range(str_length, len(error_str)):\n error_str += '^'\n return distance, error_str\n\ndef get_subject_name():\n file_name = filedialog.askopenfilename()\n valid_name = False\n while not valid_name:\n try:\n open(file_name, 'r')\n except:\n print(\"Not a valid file\")\n file_name = input(\"Enter file path:\")\n else:\n valid_name = True\n return file_name\n\ndef test_subject(subject_name):\n try:\n input_file = open(\"test_input.txt\", 'r')\n output_file = open(\"test_output.txt\", 'r')\n except:\n print(\"Could not open input or output test files\")\n return\n\n output_data = output_file.readlines()\n input_data = input_file.readlines()\n\n print(\"Opening program...\")\n process = Popen(['python', subject_name], stdout=PIPE, stderr=PIPE, stdin= PIPE)\n print(\"Writing to program...\")\n for line in input_data:\n print(\"Input: \", line)\n process.stdin.write( bytes(line, encoding = \"utf-8\"))\n print(\"Retreiving output...\")\n\n test_output, errors = process.communicate()\n test_output = str(test_output)\n test_output = test_output[2:]\n test_output = test_output[:-1]\n test_output = test_output.replace(\"\\\\n\", \"\\n\")\n test_output = test_output.replace(\"\\\\r\", \"\")\n test_output = test_output.replace(\"\\\\xe4\", \"ä\")\n test_output = test_output.replace(\"\\\\xf6\", \"ö\")\n test_output = test_output.split(\"\\n\")\n if errors != \"\":\n print(\"File had error:\\n\", errors)\n\n line_counter = 0\n error_counter = 0\n error_list = []\n\n for line in test_output:\n if line_counter >= len(output_data):\n break\n output_data[line_counter] = output_data[line_counter].replace(\"ä\", \"ä\")\n output_data[line_counter] = output_data[line_counter].replace(\"ö\", \"ö\")\n if line != output_data[line_counter]:\n hammer_distance, error_str = get_hammer_distance(line, output_data[line_counter])\n if hammer_distance > 1:\n print(\"**********************************\")\n print(\"Wrong answer at line \", line_counter)\n print(\"Given input: \", input_data[line_counter].strip())\n print(\"Received output: \", line.strip())\n print(\"Expected output: \", output_data[line_counter].strip())\n print(\" \", error_str)\n print(\"Length difference: \", abs(len(line) - len(output_data[line_counter])))\n print(\"Hammer distance between answers: \", hammer_distance)\n print(\"**********************************\\n\")\n error_counter+=1\n \n line_counter+=1\n if not line_counter%100:\n print(\"At line \", line_counter, \" (Currently \", error_counter, \" Errors)\")\n \n print(\"*************************\")\n print(\"Total inputs:\", line_counter)\n print(\"Total number of errors\", error_counter)\n input()\n\n\n\ndef main():\n print(\"\"\"\"Welcome to lab 9 tester!\\nSelect your solution to test it. Do note that some errors in the output strings, such as a single \\\n character being wrong, might not be detected. Also, if a group ends with '(', there may be different answers, but Kattis does not \\\n check for this specifik case.\"\"\")\n if len(argv) > 1:\n subject_name = argv[1]\n else:\n subject_name = get_subject_name()\n test_subject(subject_name)\n \n\n \n\nif __name__ == \"__main__\":\n main()"
},
{
"alpha_fraction": 0.5087378621101379,
"alphanum_fraction": 0.5149514675140381,
"avg_line_length": 37.64615249633789,
"blob_id": "cc3f3053ba1ed0ceff158c4027c4aa90a87e598d",
"content_id": "a828fc7844c7c537f28929a9b3643cc7fa5f0801",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2598,
"license_type": "no_license",
"max_line_length": 107,
"num_lines": 65,
"path": "/Labbar/Lab 5/bfs_L5.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "from linkedQFile import LinkedQ\r\nfrom bintreeFile import Bintree\r\nsvenska = Bintree() # Skapar ett binärt sökträd för de svenska orden\r\ngamla = Bintree() # Skapar ett binärt sökträd för de gamla orden\r\n\r\n\r\nwith open(\"word3.txt\", \"r\", encoding=\"utf-8\") as svenskfil:\r\n for rad in svenskfil:\r\n ordet = rad.strip() \r\n if ordet in svenska:\r\n pass\r\n else:\r\n svenska.put(ordet) \r\n\r\nclass ParentNode:\r\n def __init__(self, word, parent = None):\r\n self.word = word\r\n self.parent = parent\r\n\r\n def writechain(self, child):\r\n if child is not None:\r\n self.writechain(child.parent)\r\n print(child.word)\r\n \r\n \r\ndef makechildren(nod, slutord, q):\r\n for i in range(len(nod.word)): # for loop som går igenom rang med ordets storlek ex [0,1,2]\r\n for letter in \"abcdefghijklmnopqrstuvwxyzåäö\": # for loop som går igenom varje bokstav\r\n if nod.word[i] is letter:\r\n pass\r\n else:\r\n if i == 0:\r\n new_word = letter + nod.word[1] + nod.word[2] #nya ordet när vi byter första bokstaven\r\n elif i == 1:\r\n new_word = nod.word[0] + letter + nod.word[2] #nya ordet när vi byter andra bokstaven\r\n elif i == 2:\r\n new_word = nod.word[0] + nod.word[1] + letter #nya ordet när vi byter tredje bokstaven\r\n if new_word in svenska and new_word not in gamla:\r\n if new_word == slutord:\r\n new_child = ParentNode(new_word, nod)\r\n print(\"\\n\"*3 + \"Från startord till slutord: \\n\")\r\n new_child.writechain(new_child)\r\n return True \r\n else:\r\n new_child = ParentNode(new_word, nod)\r\n q.enqueue(new_child)\r\n gamla.put(new_word)\r\n\r\ndef main():\r\n start = input(\"Ange startordet: \")\r\n slut = input(\"Ange slutordet: \")\r\n first_parent = ParentNode(start)\r\n queue = LinkedQ()\r\n queue.enqueue(first_parent)\r\n while not queue.isEmpty():\r\n nod = queue.dequeue()\r\n if makechildren(nod, slut, queue) is True:\r\n print(\"Det finns en väg till\", slut)\r\n break\r\n elif queue.isEmpty() is True:\r\n print(\"Det finns ingen väg till\", slut)\r\n\r\nmain()\r\n\r\n# Det är i princip samma som lab 4, men vi sparar vägen också"
},
{
"alpha_fraction": 0.6289308071136475,
"alphanum_fraction": 0.6415094137191772,
"avg_line_length": 20.86206817626953,
"blob_id": "407f51b64506568c0fca03e035ceac47bcdbe663",
"content_id": "eed2566b3c5611723debf9315c9e429b80425874",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 640,
"license_type": "no_license",
"max_line_length": 82,
"num_lines": 29,
"path": "/Labbar/Lab 8/Unitest.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "import unittest\n\nfrom L8 import *\n\n\nclass SyntaxTest(unittest.TestCase):\n\n def test_syntax_correct(self):\n\n self.assertEqual(syntax_control(\"Ha3\"), \"Formeln följer korrekt syntax!\")\n \n def test_no_uppercase(self):\n\n self.assertEqual(syntax_control(\"ha3\"), \"En stor bokstav saknas!\")\n\n def test_digit(self):\n\n self.assertEqual(syntax_control(\"Ha1\"), \"Siffran måste vara större än 1!\")\n\n def test(self):\n\n self.assertEqual(syntax_control(\"HH3\"), \"SyntaxFel\")\n\n def test_3(self):\n\n self.assertEqual(syntax_control(\"Ha1a\"), \"SyntaxFel\")\n\nif __name__ == '__main__':\n unittest.main()\n\n\n"
},
{
"alpha_fraction": 0.5262467265129089,
"alphanum_fraction": 0.5485564470291138,
"avg_line_length": 24.399999618530273,
"blob_id": "2c15e8e28431d7924f08fac655f80d58f7a33f4a",
"content_id": "6e54a6e9e6a16eb2ad335c87f06e3b8cc90732c8",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 766,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 30,
"path": "/Labbar/Lab 2/TrollkarlLinked.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "from LinkedQfile import LinkedQ\nimport sys\n\n\ndef main():\n\n choice = input(\"Write N to insert a number or write R to run the wizard: \")\n if choice == \"N\" or choice == \"n\":\n number = str(input(\"Skriv ordningen av korten\"))\n number = number.split()\n elif choice == \"R\" or choice == \"r\":\n number = [7, 1, 12, 2, 8, 3, 11, 4, 9, 5, 13, 6, 10]\n else:\n sys.exit(\"Bye Felicia\")\n\n for i in range(len(number)):\n number[i] = int(number[i])\n\n q = LinkedQ() # Skapar en tom kö\n for x in number: # Sätter in alla värden i kön\n q.enqueue(x)\n\n newlist = []\n while q.isEmpty() is False:\n a = q.dequeue()\n q.enqueue(a)\n b = q.dequeue()\n newlist.append(b)\n print(newlist)\nmain()\n"
},
{
"alpha_fraction": 0.635904848575592,
"alphanum_fraction": 0.6467195153236389,
"avg_line_length": 30.522727966308594,
"blob_id": "f64d7a7859991628c0503ce9252ee9899fd52a5b",
"content_id": "8aa9e9431fa81b14a58c3883b5ae14e093b33bfa",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1426,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 44,
"path": "/Labbar/Föreläsningar/Föreläsning 7.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "import random\n# Talen som ska sorteras delas upp, halva åt gången\n# Data som har samma längd\n# Urvals-sortering\n# Automagiskt: någon har löst problemet tidigare, och man behöver inte veta hur\n# Nästlade for-slingor, den ena ligger i den andra\n\n\n# Kollar inte värdet, utan positionen av värdet, tex minsta värdet låg på position 2\n# Denna metod gör exakt samma arbete oavsett hur datan ser ut\ndef urvalssortera(data):\n n = len(data)\n for i in range(n):\n minst = i\n for j in range(i+1, n):\n if data[j] < data[minst]:\n minst = j\n data[minst], data[i] = data[i], data[minst]\n\n# Bubbelsortering är något smartare metod, marginellt, den går igenom\n# listan gång på gång tills inga byten sker\n# Om listan är nästan sorterad från början räcker det med några få\n# genomgångar och då blir bubbel snabbare än urval\n# Jämför endast parvis, switchar plats på den vänstra och den högra, om vänstra > högra\n\n\ndef bubblesort(data):\n n = len(data)\n for i in range(n-1):\n done = True\n for j in range(n-i-1): # koden är fel, den jämför ej parvis\n if data[j+1] > data[j]: # jmf\n data[j+1], data[j] = data[j], data[j+1] # sw\n done = False\n if done:\n return\n\nn = 100\ndata = []\nfor i in range(n):\n data.append(random.randint(0,1000))\nprint(data)\nbubblesort(data)\nprint(data)\n"
},
{
"alpha_fraction": 0.5409185290336609,
"alphanum_fraction": 0.5475335717201233,
"avg_line_length": 24.507246017456055,
"blob_id": "106023aa633e39511402ebb146dfe42d911f4cdf",
"content_id": "a3ce7d0d0cdce7961d09c6eb911e176d3b9c9244",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 5319,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 207,
"path": "/Labbar/Lab 9/Lab9vår.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "from linkedQFile import *\nimport sys\nimport string\npar=[]\nq = LinkedQ()\n\nATOMER = ['H', 'He', 'Li', 'Be', 'B', 'C', 'N', 'O', 'F', 'Ne', 'Na',\n 'Mg', 'Al', 'Si', 'P', 'S', 'Cl', 'Ar', 'K', 'Ca', 'Sc', 'Ti', 'V',\n 'Cr', 'Mn', 'Fe', 'Co', 'Ni', 'Cu', 'Zn', 'Ga', 'Ge', 'As', 'Se',\n 'Br', 'Kr', 'Rb', 'Sr', 'Y', 'Zr', 'Nb', 'Mo', 'Tc', 'Ru', 'Rh', \n 'Pd', 'Ag', 'Cd', 'In', 'Sn', 'Sb', 'Te', 'I', 'Xe', 'Cs', 'Ba', \n 'La', 'Ce', 'Pr', 'Nd', 'Pm', 'Sm', 'Eu', 'Gd', 'Tb', 'Dy', 'Ho',\n 'Er', 'Tm', 'Yb', 'Lu', 'Hf', 'Ta', 'W', 'Re', 'Os', 'Ir', 'Pt', \n 'Au', 'Hg', 'Tl', 'Pb', 'Bi', 'Po', 'At', 'Rn', 'Fr', 'Ra', 'Ac',\n 'Th', 'Pa', 'U', 'Np', 'Pu', 'Am', 'Cm', 'Bk', 'Cf', 'Es', 'Fm', \n 'Md', 'No', 'Lr', 'Rf', 'Db', 'Sg', 'Bh', 'Hs', 'Mt', 'Ds', 'Rg', 'Cn', 'Fl', 'Lv']\n\n\nclass Syntaxfel(Exception):\n pass\n\ndef readFormel(molekyl): #NY\n\t\"\"\"<formel>::= <mol> \\n\"\"\"\n\tq = molecule(molekyl)\n\ttry:\n\t\treadMolekyl()\n\t\tif len(par) > 0:\n\t\t\traise Syntaxfel('Saknad högerparentes vid radslutet ')\n\t\treturn 'Formeln är syntaktiskt korrekt'\n\texcept Syntaxfel as error:\n\t\treturn str(error) + printQ()\n\ndef readMolekyl(): #NY\n\t\"\"\"<mol> ::= <group> | <group><mol>\"\"\"\n\t\"\"\"readmol() anropar readgroup() och sedan eventuellt sej själv\n\t(men inte om inmatningen är slut eller om den just kommit tillbaka från ett parentesuttryck)\"\"\"\n\n\treadGrupp()\n\tif q.isEmpty():\n\t\treturn\n\telif q.peek() == \")\":\n\t\tif len(par) < 1:\n\t\t\traise Syntaxfel(\"Felaktig gruppstart vid radslutet1 \") \n\t\treturn\n\telse:\n\t\treadMolekyl()\ndef readGrupp(): #Ny\n\t\"\"\"<group> ::= <atom> |<atom><num> | (<mol>) <num>\"\"\"\n\t\"\"\"readgroup() anropar antingen readatom() eller läser en parentes och anropar readmol()\"\"\"\n\n\tif q.isEmpty():\n\t\traise Syntaxfel(\"Felaktig gruppstart vid radslutet3 \")\n\telif q.peek().isdigit():\n\t\traise Syntaxfel(\"Felaktig gruppstart vid radslutet4 \")\n\n\telif q.peek().isalpha():\n\t\t#print(\"Kallar på readAtom i readGrupp\")\n\t\treadAtom()\n\t\tif q.peek() is None:\n\t\t\treturn\n\t\tif q.peek().isdigit():\n\t\t\treadNum()\n\t\treturn\n\n\telif q.peek() == \"(\":\n\t\tpar.append(q.dequeue())\n\t\treadMolekyl()\n\n\t\tif q.peek() != \")\":\n\t\t\traise Syntaxfel(\"Saknad högerparentes vid radslutet \")\n\t\t\t\t\n\n\t\tif q.isEmpty():\n\t\t\traise Syntaxfel(\"Saknad siffra vid radslutet \")\n\t\telse:\n\t\t\tpar.pop()\n\t\t\tq.dequeue()\n\t\t\tif q.isEmpty():\n\t\t\t\traise Syntaxfel(\"Saknad siffra vid radslutet \")\n\t\t\treadNum()\n\telse:\n\t\traise Syntaxfel(\"Felaktig gruppstart vid radslutet2 \")\n\n\ndef readAtom(): #NY\n\t\"\"\"<atom> ::= <LETTER> | <LETTER><letter>\"\"\"\n\n\tif q.peek().isupper():\n\t\tx = q.dequeue()\n\t\t#print(x, \"readAtom stor bokstav\")\n\telse:\n\t\traise Syntaxfel(\"Saknad stor bokstav vid radslutet \")\n\n\tif q.peek() != None:\n\t\tif q.peek().islower():\n\t\t\tx = x + q.dequeue()\n\t\t\t#print(\"Atomen är\", x)\n\t\n\tif x in ATOMER:\n\t\treturn\n\telse:\n\t\traise Syntaxfel(\"Okänd atom vid radslutet \")\n\n# Funktion molecule ska lagra alla symboler i en länkad lista\ndef molecule(indata):\n q = LinkedQ()\n for i in indata:\n q.enqueue(i)\n return q\n\n# Funktion uppercase kontrollerar ifall bokstaven är en stor bokstav\ndef uppercase(q):\n if q.peek().isupper():\n q.dequeue()\n else:\n raise Syntaxfel(\"En stor bokstav saknas!\")\n\n# Funktion lowercase kontrol\ndef lowercase(q):\n if q.peek().islower():\n q.dequeue()\n number(q)\n\n\n# kollar ifall siffer används rätt\ndef number(q):\n if q.peek() is not None:\n b = q.peek()\n print(b)\n if q.peek().isdigit():\n \n if q.peek() is \"0\":\n raise Syntaxfel(\"får ej börja med noll\")\n elif q.peek() is \"1\":\n q.dequeue()\n if q.peek() is None:\n raise Syntaxfel(\"Siffran måste vara större än 1!\")\n\n else:\n while q.peek() is not None:\n if q.peek().isdigit():\n q.dequeue()\n pass\n else:\n raise Syntaxfel(\"SyntaxFel2\")\n elif q.peek().isdigit():\n q.dequeue()\n number(q)\n\n \n else:\n raise Syntaxfel(\"Siffran måste vara större än 1\")\n\n elif q.peek().isupper():\n uppercase(q)\n\n\n else:\n raise Syntaxfel(\"SyntaxFel3\")\n\"\"\"\n# kollar om molekylen har rätt syntax\ndef syntax_control(indata):\n q = molecule(indata)\n try:\n readGrupp()\n uppercase(q)\n lowercase(q)\n number(q)\n except Syntaxfel as fel:\n return str(fel)\n return \"Formeln följer korrekt syntax!\"\n\"\"\"\ndef printQ():\n\trest = \"\"\n\twhile not q.isEmpty():\n\t\trest = rest + q.dequeue()\n\treturn rest\n\ndef syntax_control(molekyl):\n\tmolecule(molekyl)\n\ttry:\n\t\treadMolekyl()\n\t\tif len(par) > 0:\n\t\t\traise Syntaxfel('Saknad högerparentes vid radslutet ')\n\t\treturn 'Formeln är syntaktiskt korrekt'\n\texcept Syntaxfel as error:\n\t\treturn str(error) + printQ()\n\ndef main():\n indata = input(\"Skriv en molekyl: \").strip() # kolla om den är tom\n resultat = syntax_control(indata)\n print(resultat)\n\ndef main2(x): # kolla om den är tom\n resultat = syntax_control(x)\n print(resultat)\n\nx1 = \"Na\"\nx2 = \"H2O\"\nx3 = \"Si(C3(COOH)2)4(H2O)7\"\nx4 = \"Na332\"\n\nif __name__ == '__main__':\n #main2(x1)\n #main2(x2)\n main2(x3)\n #main2(x4)\n\n \n\n"
},
{
"alpha_fraction": 0.4881889820098877,
"alphanum_fraction": 0.4921259880065918,
"avg_line_length": 19.83333396911621,
"blob_id": "916ca1c3d6ca58326e8be14a4d4de706f72f5cea",
"content_id": "367410443e0ba37aee67e7a2d450672f81227cdc",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 254,
"license_type": "no_license",
"max_line_length": 42,
"num_lines": 12,
"path": "/Labbar/Lab 10/Ruta.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "class Node:\n def __init__(self, value):\n self.value = value\n self.next = None\n\n\nclass Ruta:\n def __init__(self, atom=\"( )\", num=1):\n self.atom = atom\n self.num = num\n self.next = None\n self.down = None\n\n\n\n\n"
},
{
"alpha_fraction": 0.5872340202331543,
"alphanum_fraction": 0.5914893746376038,
"avg_line_length": 23.473684310913086,
"blob_id": "bf1020c465ee6d3b60bc6be94e0d40028690686a",
"content_id": "8d47c6a5544540bd5e1e2a6820bb7e0a383d6458",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 474,
"license_type": "no_license",
"max_line_length": 74,
"num_lines": 19,
"path": "/Labbar/Lab 2/ArrayQ.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "from array import array\n\n\nclass ArrayQ:\n\n def __init__(self):\n self.__deck = array('b') # \"b\" bestämmer vad vad som ska matas in\n\n def enqueue(self, x):\n self.__deck.append(x) # lägger till i slutet av listan\n\n def dequeue(self):\n return self.__deck.pop(0) # plockar ut den första i listan\n\n def isEmpty(self):\n return len(self.__deck) == 0 # kollar om listan är tom\n\n def printArray(self):\n print(self.__deck)\n\n\n\n\n\n"
},
{
"alpha_fraction": 0.5709803700447083,
"alphanum_fraction": 0.6039215922355652,
"avg_line_length": 30.121952056884766,
"blob_id": "dccfb1de5c3221aba4ca799ddfea779d99a97d48",
"content_id": "5858f53959d1c9e130ffa02f4eb96b510fbdf9e6",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1297,
"license_type": "no_license",
"max_line_length": 89,
"num_lines": 41,
"path": "/Labbar/Lab 7/Del 1.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "import timeit\nclass DictHash:\n def __init__(self):\n self.dict = {}\n\n def __getitem__(self, nyckel):\n return self.dict[nyckel]\n\n def lagra(self, nyckel, värde):\n self.dict[nyckel] = värde\n\nclass Song:\n def __init__(self, trackid, artistnamn, sångtitel, låtlängd, år):\n # artistid\tartistnamn\tsångtitel\tlåtlängd\tår (inläsnings strukturen)\n self.trackid = trackid\n self.artistnamn = artistnamn\n self.sångtitel = sångtitel\n self.låtlängd = låtlängd\n self.är = år\n\ndef readfile(fil, n): \n with open(fil, encoding='utf-8') as text:\n for r in text:\n rad = r.strip('\\n').split('\\t')\n song = Song(rad[0], rad[1], rad[2], rad[3], rad[4])\n hashtabell.lagra(rad[0], song)\n if n < len(hashtabell.dict):\n break\n\nhashtabell = DictHash()\ndef main():\n fil = \"sang-artist-data.txt\"\n readfile(fil, 999987) #999987 #lisstorlek\n tid_dict = timeit.timeit(stmt = lambda: hashtabell[\"AR30R5E1187B9AD78A\"], number = 1)\n print(\"Det tog\", round(tid_dict, 40), \"sekunder att slå upp det i dictionary\")\n print(hashtabell[\"AR30R5E1187B9AD78A\"].artistnamn)\n\nif __name__ == '__main__':\n main()\n\n# Nej det tar dubbelt så långt tid som att indexa"
},
{
"alpha_fraction": 0.5772399306297302,
"alphanum_fraction": 0.5777549147605896,
"avg_line_length": 29.322580337524414,
"blob_id": "a1063c98852d9e2c9e5f2edeb4016c8094e9aa5e",
"content_id": "78abffdfd3d0e2a1805682695bf4c9561525f4b3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1966,
"license_type": "no_license",
"max_line_length": 119,
"num_lines": 62,
"path": "/Labbar/Lab 5/bintreeFile.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "class Node: \r\n def __init__(self, value): # Varje nod har ett värde och 2 pekare som har varsin riktning\r\n self.value = value\r\n self.left = None\r\n self.right = None\r\n\r\n\r\nclass Bintree: \r\n def __init__(self):\r\n self.root = None\r\n\r\n def put(self, newvalue):\r\n if self.root is None:\r\n self.root = Node(newvalue)\r\n else:\r\n self.putta(self.root, newvalue)\r\n\r\n def __contains__(self, value):\r\n return self.finns(self.root, value)\r\n\r\n def write(self):\r\n self.skriv(self.root)\r\n print(\"\\n\")\r\n\r\n def putta(self, thisNode, value):\r\n if value < thisNode.value:\r\n if thisNode.left is None:\r\n thisNode.left = Node(value)\r\n else:\r\n self.putta(thisNode.left, value)\r\n else:\r\n if thisNode.right is None:\r\n thisNode.right = Node(value)\r\n else:\r\n self.putta(thisNode.right, value)\r\n\r\n def finns(self, thisNode, value):\r\n if thisNode is None:\r\n return False\r\n elif value == thisNode.value:\r\n return True\r\n elif value < thisNode.value:\r\n return self.finns(thisNode.left, value)\r\n else: \r\n return self.finns(thisNode.right, value)\r\n\r\n def skriv(self, thisNode):\r\n if thisNode is not None:\r\n self.skriv(thisNode.left)\r\n print(thisNode.value)\r\n self.skriv(thisNode.right)\r\n\r\n# kunna rita och berätta hur binärträdet byggs upp,\r\n\r\n# visa hur du testat din klass för binära träd,\r\n\r\n# förklara varför det går snabbt att söka i ett binärträd \r\n# För varje sökning så blir listan halverad, tidskomplexiteten är genomsnittligt log n, och i värsta fall är den ordo n\r\n\r\n# förklara idén bakom att ha put som anropar putta, etc: \r\n# Så att man får en smidig rekursiv funktion\r\n# För att man inte kan ha en rekursion inuti i en metod\r\n"
},
{
"alpha_fraction": 0.6294277906417847,
"alphanum_fraction": 0.6539509296417236,
"avg_line_length": 33.11627960205078,
"blob_id": "31641f751eab585926bf85f82fc62815ee2803da",
"content_id": "fd7f3daee9354f2d6e894f1590dde6ee6ab63ec2",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1473,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 43,
"path": "/Labbar/Lab 9/UnitestS2.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "import unittest\nfrom L9v2 import *\n\nclass TestStringMethods(unittest.TestCase):\n\n def test_upper1(self):\n self.assertEqual(readFormel(\"C(Xx4)5\"), 'Okänd atom vid radslutet 4)5')\n\n def test_upper2(self):\n self.assertEqual(readFormel(\"C(OH4)C\"), 'Saknad siffra vid radslutet C')\n \n def test_upper3(self):\n self.assertEqual(readFormel(\"C(OH4C\"), 'Saknad högerparentes vid radslutet ')\n \n def test_upper4(self):\n self.assertEqual(main2(\"H2O)Fe\"), 'Felaktig gruppstart vid radslutet )Fe')\n \n def test_upper5(self):\n self.assertEqual(readFormel(\"H0\"), 'För litet tal vid radslutet ')\n\n\n def test_upper6(self):\n self.assertEqual(readFormel(\"H1C\"), 'För litet tal vid radslutet C')\n\n def test_upper7(self):\n self.assertEqual(readFormel(\"H02C\"), 'För litet tal vid radslutet 2C')\n def test_upper8(self):\n self.assertEqual(readFormel(\"Nacl\"), 'Saknad stor bokstav vid radslutet cl')\n def test_upper9(self):\n self.assertEqual(readFormel(\"a\"), 'Saknad stor bokstav vid radslutet a')\n\n \n def test_upper10(self):\n self.assertEqual(main2(\"(Cl)2)3\"), 'Felaktig gruppstart vid radslutet )3')\n \n\n def test_upper11(self):\n self.assertEqual(readFormel(\")\"), 'Felaktig gruppstart vid radslutet )')\n def test_upper12(self):\n self.assertEqual(readFormel(\"2\"), 'Felaktig gruppstart vid radslutet 2')\n \nif __name__ == '__main__':\n unittest.main()\n\n"
},
{
"alpha_fraction": 0.5104602575302124,
"alphanum_fraction": 0.5439330339431763,
"avg_line_length": 19.545454025268555,
"blob_id": "02dfba9ce207db3fac6ef31461243dc02b37f81a",
"content_id": "4c9749dc49140c9f83d43213539fedade59808e3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 243,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 11,
"path": "/Labbar/Lab 5/test.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "def utskrift(lista):\r\n if len(lista) > 0:\r\n\r\n utskrift(lista[1:]) # När vi byter plats på printen och utskrift så skrivs listan omvänd\r\n \r\n print(lista[0])\r\n\r\n \r\n\r\nlista = [1, 2, 3, 4, 5]\r\nutskrift(lista) \r\n"
},
{
"alpha_fraction": 0.6044362187385559,
"alphanum_fraction": 0.6321626901626587,
"avg_line_length": 23.606060028076172,
"blob_id": "85d5e9f2efed058672e881aa73582473bf1fcf9d",
"content_id": "b2f3ead0cadce8aea6cc762648bf08f6a3d84586",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1633,
"license_type": "no_license",
"max_line_length": 80,
"num_lines": 66,
"path": "/Labbar/Lab 10/Del2.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "class HashNode:\n\tdef __init__(self, nyckel, data, pekare = None):\n\t\tself.nyckel = nyckel\n\t\tself.data = data\n\t\tself.pekare = pekare # möjliggör länkad lista\n\n\tdef __str__(self): # den skriver ut shittet\n\t\tif self.nyckel != None and self.data != None:\n\t\t\treturn \"Med nyckel: \" + str(self.nyckel) + \" får vi värdet \" + str(self.data)\n\t\telse:\n\t\t\treturn None\n\n\nclass Hashtabell:\n\tdef __init__(self, storlek):\n\t\tself.storlek = storlek \n\t\tself.platser = [None] * self.storlek\n\t\tself.krockar = 0\n\n # Från föreläsning\n\n\tdef hashfunk(self, nyckel):\n\t\tstart = 0\n\t\tfor i in nyckel[::-1]:\n\t\t\tstart = start*2000 + ord(i)\n\t\t\t# start = start*64 + ord(i)\n\t\treturn start%self.storlek\n\n\tdef addera(self, nyckel, data): # hanterar krockar\n\t\ti = self.hashfunk(nyckel)\n\t\tif self.platser[i] is None:\n\t\t\tself.platser[i] = HashNode(nyckel, data)\n\t\telse:\n\t\t\tself.krockar += 1\n\t\t\tkrock = self.platser[i]\n\t\t\tself.platser[i] = HashNode(nyckel, data)\n\t\t\tself.platser[i].pekare = krock\n\n\tdef sök(self, nyckel):\n\t\ti = self.hashfunk(nyckel)\n\t\tnod = self.platser[i]\n\n\t\twhile nod is not None:\n\t\t\tif nod.nyckel == nyckel:\n\t\t\t\treturn nod\n\t\t\tnod = nod.pekare\n\t\traise KeyError\n\ndef readfile(fil, a ): \n with open(fil, encoding='utf-8') as text:\n for r in text:\n rad = r.strip('\\n').split('\\t')\n a.addera(rad[2], rad[1])\n\n\ndef main():\n fil = \"sang-artist-data.txt\" #\"unique_tracks.txt\" \n a = Hashtabell(3000017)\n readfile(fil, a) # 999987\n print(a.sök(\"Deserve\")) # TRNTAUZ128F149BA49 ARS5DE71187B99A194\n print(\"Antal krockar:\", a.krockar)\n\nif __name__ == '__main__':\n\tmain()\n\n\t# hashning funkar som en dictionary"
},
{
"alpha_fraction": 0.5007256865501404,
"alphanum_fraction": 0.5050798058509827,
"avg_line_length": 26.559999465942383,
"blob_id": "21c03609270de97b28ae628f7ff058c86f1cb3ca",
"content_id": "f363a3df1bbcf5a2a8efbc180c68da8043e3911d",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 689,
"license_type": "no_license",
"max_line_length": 63,
"num_lines": 25,
"path": "/Labbar/Lab 3/bintreeRun.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "from bintreeFile import Bintree\n\n\nsvenska = Bintree()\nwith open(\"word3.txt\", \"r\", encoding=\"utf-8\") as svenskfil:\n for rad in svenskfil:\n ordet = rad.strip() \n if ordet in svenska:\n print(ordet, end=\" \")\n else:\n svenska.put(ordet) \nprint(\"\\n\")\n\nengelska = Bintree()\nwith open(\"engelska.txt\", \"r\", encoding=\"utf-8\") as engelskfil:\n for rad in engelskfil:\n rad = rad.strip().split()\n for ordet in rad:\n if ordet in engelska:\n pass\n else:\n engelska.put(ordet)\n if ordet in svenska:\n print(ordet, end=\" \")\nprint(\"\\n\")\n"
},
{
"alpha_fraction": 0.5375335216522217,
"alphanum_fraction": 0.5603216886520386,
"avg_line_length": 26.66666603088379,
"blob_id": "46b5f2834f00365a366ce6beca4403e2b443c5c1",
"content_id": "41a53c751bd5da8fb571b38885c3195fa240e507",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 747,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 27,
"path": "/Labbar/Lab 2/TrollkarlArray.py",
"repo_name": "MoodSherzad/Tillampad-Datalogi-KTH",
"src_encoding": "UTF-8",
"text": "from ArrayQ import ArrayQ\n\n\ndef main():\n choice = input(\"Write N to insert a number or write R to run the wizard: \")\n if choice == \"N\" or choice == \"n\":\n number = str(input(\"Skriv ordningen av korten\"))\n number = number.split()\n elif choice == \"R\" or choice == \"r\":\n number = [7, 1, 12, 2, 8, 3, 11, 4, 9, 5, 13, 6, 10]\n else:\n quit\n for i in range(len(number)):\n number[i] = int(number[i])\n queue = ArrayQ()\n newlist = ArrayQ()\n for x in number: # lägger in korten i objektet\n queue.enqueue(x)\n\n while queue.isEmpty() is False:\n a = queue.dequeue()\n queue.enqueue(a)\n b = queue.dequeue()\n newlist.enqueue(b)\n print(newlist.printArray())\n\nmain()"
}
] | 28 |
enemyclanraid/telbot
|
https://github.com/enemyclanraid/telbot
|
8dedfa4944764f42266db310da8e31646ea2a9b7
|
c4799edd461ab5af67eb61eb544863226a65ab4b
|
217191370fc4d1e657dde9c7ac6639697b0ff4bf
|
refs/heads/master
| 2022-12-05T21:50:21.559363 | 2022-11-27T07:31:20 | 2022-11-27T07:31:20 | 292,773,485 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6286982297897339,
"alphanum_fraction": 0.6301774978637695,
"avg_line_length": 28.434782028198242,
"blob_id": "1931ec62d7276e0d638a09cbce8c51d4b2944796",
"content_id": "1f9bfdd40ddcd6a9836065b76edf0a77cbb9b4d7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 781,
"license_type": "no_license",
"max_line_length": 97,
"num_lines": 23,
"path": "/b1.py",
"repo_name": "enemyclanraid/telbot",
"src_encoding": "UTF-8",
"text": "#! /usr/bin/env python\n# -*- coding: utf-8 -*-\nimport telebot\n\nbot = telebot.TeleBot('+_+')\n\[email protected]_handler(commands=['start'])\ndef start_message(message):\n bot.send_message(message.chat.id, 'Привет, ты написал мне /start')\n\[email protected]_handler(content_types=['text'])\ndef text_handler(message):\n text = message.text.lower()\n chat_id = message.chat.id\n if text == \"привет\":\n bot.send_message(chat_id, 'Привет, я бот - парсер хабра.')\n elif text == \"как дела?\":\n bot.send_message(chat_id, 'Хорошо, а у тебя?')\n \n else:\n bot.send_message(chat_id, 'Простите, я ваc не понял :(, хотите занести ответ в базу????')\n\nbot.polling()"
},
{
"alpha_fraction": 0.519934892654419,
"alphanum_fraction": 0.7111472487449646,
"avg_line_length": 16.811594009399414,
"blob_id": "4714efbdff8d0aa0fdf14f2fed705d8f1250af03",
"content_id": "0f368b1c803c56720bcebe4be857bf8e1ce592e0",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 1229,
"license_type": "no_license",
"max_line_length": 33,
"num_lines": 69,
"path": "/requirements.txt",
"repo_name": "enemyclanraid/telbot",
"src_encoding": "UTF-8",
"text": "apturl==0.5.2\nasn1crypto==0.24.0\nBrlapi==0.6.6\ncertifi==2018.1.18\nchardet==3.0.4\nchrome-gnome-shell==0.0.0\ncommand-not-found==0.3\ncryptography==2.1.4\ncupshelpers==1.0\ncycler==0.10.0\ndefer==1.0.6\ndistro-info===0.18ubuntu0.18.04.1\nhttplib2==0.9.2\nidna==2.6\nkeyring==10.6.0\nkeyrings.alt==3.0\nlanguage-selector==0.1\nlaunchpadlib==1.10.6\nlazr.restfulclient==0.13.5\nlazr.uri==1.0.3\nlouis==3.5.0\nmacaroonbakery==1.1.3\nMako==1.0.7\nMarkupSafe==1.0\nmatplotlib==2.1.1\nnetifaces==0.10.4\nnumpy==1.13.3\noauth==1.0.1\nolefile==0.45.1\npexpect==4.2.1\nPillow==5.1.0\npipenv==2020.8.13\npower==1.4\nprotobuf==3.0.0\npsutil==5.4.2\npycairo==1.16.2\npycrypto==2.6.1\npycups==1.9.73\npygobject==3.26.1\npymacaroons==0.13.0\nPyNaCl==1.1.2\npyparsing==2.2.0\npyRFC3339==1.0\npyTelegramBotAPI==3.7.3\npython-apt==1.6.4+zorin1\npython-dateutil==2.6.1\npython-debian==0.1.32\npytz==2018.3\npyxdg==0.25\nPyYAML==3.12\nreportlab==3.4.0\nrequests==2.18.4\nrequests-unixsocket==0.1.5\nSecretStorage==2.3.1\nsimplejson==3.13.2\nsix==1.11.0\nssh-import-id==5.7\nsystem-service==0.3\nubuntu-drivers-common==0.0.0\nufw==0.36\nurllib3==1.22\nvirtualenv==15.1.0\nvirtualenv-clone==0.5.4\nwadllib==1.3.2\nxkit==0.0.0\nzope.interface==4.3.2\nzorin-appearance==3.0\nzorin-connect==1.0\nzorin-exec-guard==1.0\n"
}
] | 2 |
MOSA96/GA_MT
|
https://github.com/MOSA96/GA_MT
|
a25bb735f1cde5657ab259923d42e9a0a0ad5b1e
|
b74613e261439c0086ec972a974934cf892195ec
|
f03a4f1f2c604061386acfd6e1c924ac676646c4
|
refs/heads/main
| 2023-02-27T02:09:50.331970 | 2021-04-23T04:35:58 | 2021-04-23T04:35:58 | 334,563,530 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5781479477882385,
"alphanum_fraction": 0.5879470705986023,
"avg_line_length": 26.958904266357422,
"blob_id": "74935a0d587940f34b64373e571971865d198f36",
"content_id": "525fe462448e99a87416e29faa5ce149c0730242",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2042,
"license_type": "no_license",
"max_line_length": 90,
"num_lines": 73,
"path": "/AG_cosmologico_lambda_V2.py",
"repo_name": "MOSA96/GA_MT",
"src_encoding": "UTF-8",
"text": "import pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n#Calculamos valores de H para cada valor z para cada parámetros\ndef h_square(z,list_cdm):\n h_dict = {}\n \n for i in list_cdm:\n h_dict[i] = list(np.sqrt((H0**2)*(i*(1+z)**3+ (1-i) )))\n \n return h_dict\n\n#Calculamos \n\ndef chi_square(z, H_dat, cdm, sigma):\n #Calculamos H para cada Omega\n H_th = h_square(z,cdm)\n lista2 = []\n \n for i in H_th:\n resultado = 0\n lista = H_th[i]\n for j in range(len(lista)):\n resultado += (lista[j]-H_dat[j])**2 / sigma[j]\n \n lista2.append(resultado)\n \n return lista2 \n \ndef _obtener_fitness(z, H_dat, padres, sigma):\n _fitness = chi_square(z, H_dat, padres, sigma)\n Pfitness = list(zip(padres,_fitness))\n Pfitness.sort(key = lambda x: x[1], reverse=False)\n mejor_padre, mejor_fitness = Pfitness[0]\n return round(mejor_padre,6), round(mejor_fitness, 6), Pfitness\n\n\n\ndef mutacion(z, H_dat, padres, sigma, factor_mutacion):\n \n n = int(len(H_dat))\n padre, fitness, poblacion = _obtener_fitness(z, H_dat, padres, sigma)\n hijos = np.random.normal(padre, factor_mutacion, size=17)\n \n return hijos\n\n\ndef AG_simple(z, H_dat, padres, sigma, max_iter):\n \n Poblacion = {}\n Historial=[]\n f_mutacion = 0.2\n \n mejor_padre, mejor_fitness, poblacion = _obtener_fitness(z, H_dat, padres, sigma)\n padre = mutacion(z, H_dat, padres, sigma, f_mutacion) \n \n for i in range(1, max_iter):\n \n padre = mutacion(z, H_dat, padre, sigma, f_mutacion)\n padre_actual, fitness_actual, poblacion = _obtener_fitness(z, H_dat, padre, sigma)\n \n Poblacion[i] = poblacion\n \n if fitness_actual < mejor_fitness:\n mejor_padre = padre_actual\n mejor_fitness = fitness_actual \n \n Historial.append((i, fitness_actual))\n \n f_mutacion = f_mutacion/2 \n \n return mejor_padre, mejor_fitness, Historial, Poblacion\n"
},
{
"alpha_fraction": 0.5645239353179932,
"alphanum_fraction": 0.5722571015357971,
"avg_line_length": 27.898550033569336,
"blob_id": "6f3530bb07241bfb631a2cdd29b7d33acaec17c7",
"content_id": "70e0aa2390b9eee041d971e8c540d105d04dd86c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2070,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 69,
"path": "/AG_cosmologico_lambda.py",
"repo_name": "MOSA96/GA_MT",
"src_encoding": "UTF-8",
"text": "import pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\n\r\n#Definimos ecuaciones \r\n\r\n#Calculamos valores de H para cada valor z para cada parámetros\r\ndef h_square(z,list_cdm):\r\n h_dict = {}\r\n \r\n for i in list_cdm:\r\n h_dict[i] = list(np.sqrt((H0**2)*(i*(1+z)**3+ (1-i) )))\r\n \r\n return h_dict\r\n\r\n#Calculamos \r\n\r\ndef chi_square(z, H_dat, cdm, sigma):\r\n #Calculamos H para cada Omega\r\n H_th = h_square(z,cdm)\r\n lista2 = []\r\n \r\n for i in H_th:\r\n resultado = 0\r\n lista = H_th[i]\r\n for j in range(len(lista)):\r\n resultado += (lista[j]-H_dat[j])**2 / sigma[j]\r\n \r\n lista2.append(resultado)\r\n \r\ndef _obtener_fitness(z, H_dat, padres, sigma):\r\n _fitness = chi_square(z, H_dat, padres, sigma)\r\n Pfitness = list(zip(padres,_fitness))\r\n Pfitness.sort(key = lambda x: x[1], reverse=False)\r\n mejor_padre, mejor_fitness = Pfitness[0]\r\n return round(mejor_padre,6), round(mejor_fitness, 6)\r\n\r\n\r\ndef mutacion(z, H_dat, padres, sigma):\r\n \r\n n = int(len(H_dat))\r\n puntajes = chi_square(z, H_dat, padres, sigma)\r\n padres = np.array(padres)\r\n hijos = np.random.choice(padres, size=n, p = puntajes / sum(puntajes))\r\n hijos = hijos.tolist()\r\n inferior = min(hijos)\r\n superior = max(hijos)\r\n hijos = hijos + np.random.uniform(0, 1-superior, size = n)\r\n \r\n return hijos.tolist()\r\n\r\n\r\ndef AG_simple(z, H_dat, padres, sigma, max_iter):\r\n Historial=[]\r\n Mejores_padres = []\r\n \r\n mejor_padre, mejor_fitness = _obtener_fitness(z, H_dat, padres, sigma)\r\n \r\n for i in range(1, max_iter):\r\n padre = mutacion(z, H_dat, padres, sigma)\r\n padre_actual, fitness_actual = _obtener_fitness(z, H_dat, padre, sigma)\r\n \r\n if fitness_actual < mejor_fitness:\r\n mejor_padre = padre_actual\r\n mejor_fitness = fitness_actual \r\n \r\n Historial.append((i, np.min(chi_square(z, H_dat, padre, sigma))))\r\n \r\n return mejor_padre, mejor_fitness,Historial\r\n\r\n\r\n\r\n"
},
{
"alpha_fraction": 0.6286438703536987,
"alphanum_fraction": 0.6337135434150696,
"avg_line_length": 41.94444274902344,
"blob_id": "40aa7a3bf34496f09c8756e2eba98e95cf3b1820",
"content_id": "a337b9cf508658b590fec8e0d270b850be98db94",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 789,
"license_type": "no_license",
"max_line_length": 96,
"num_lines": 18,
"path": "/AG_optimizado.py",
"repo_name": "MOSA96/GA_MT",
"src_encoding": "UTF-8",
"text": "def AG_simple(padres, funcion_fitness, inicio, fin, max_iter=100):\r\n mejor_padre, mejor_fitness = _obtener_fitness(padres, funcion_fitness)\r\n \r\n for i in range (1, max_iter):\r\n #Seleccionamos a los mejores padres, mutamos y creamos generacion hija\r\n padres = mutacion(padres, funcion_fitness= funcion_fitness)\r\n padre_actual, fitness_actual = _obtener_fitness(padres, funcion_fitness=funcion_fitness)\r\n \r\n #Actualizamos valores de mejor padre y fitness\r\n \r\n if fitness_actual > mejor_padre:\r\n mejor_padre = padre_actual\r\n mejor_fitness = fitness_actual\r\n \r\n padre_actual, fitness_actual = _obtener_fitness(padres, funcion_fitness)\r\n\r\n \r\n return mejor_padre, mejor_fitness"
},
{
"alpha_fraction": 0.628819465637207,
"alphanum_fraction": 0.6378472447395325,
"avg_line_length": 39.14285659790039,
"blob_id": "1fd7673217b62c08ebbad06d863fece6409bb74a",
"content_id": "fb1a150d1bdd0155a12941ff8f47d4ca59403f41",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2890,
"license_type": "no_license",
"max_line_length": 144,
"num_lines": 70,
"path": "/AG.py",
"repo_name": "MOSA96/GA_MT",
"src_encoding": "UTF-8",
"text": "import numpy as np #Herramiento de computo científico\r\nimport matplotlib.pyplot as plt #Herramientas para gráficar \r\nfrom scipy.optimize import minimize #Encontrar máximos y mínimos\r\nimport time #Tiempos de ejecución\r\n\r\n\r\n#Definimos la función mutación\r\n\r\ndef mutacion(padres, funcion_fitness):\r\n n = int(len(padres))\r\n puntajes = funcion_fitness(padres)\r\n #Tomamos valores mayores que cero\r\n idx = puntajes > 0\r\n puntajes = puntajes[idx]\r\n padres = np.array(padres)[idx]\r\n #Creamos generacion hija\r\n hijos = np.random.choice(padres, size=n, p = puntajes / puntajes.sum())\r\n hijos = hijos + np.random.uniform(-0.51, 0.51, size = n)\r\n\r\n return hijos.tolist()\r\n\r\n#Creamos listas vacías para guardar tiempo de ejecución\r\ndef _obtener_fitness(padres, funcion_fitness):\r\n _fitness = fitness(padres)\r\n PFitness = list(zip(padres, _fitness))\r\n PFitness.sort(key = lambda x: x[1], reverse=True)\r\n mejor_padre, mejor_fitness = PFitness[0]\r\n return round(mejor_padre,4), round(mejor_fitness, 4)\r\n \r\n \r\n def AG(padres, funcion_fitness, inicio, fin, max_iter=100):\r\n #Lista para guardar datos\r\n Historial=[]\r\n Mejores_padres = []\r\n #Datos generación cero\r\n mejor_padre, mejor_fitness = _obtener_fitness(padres, funcion_fitness)\r\n \r\n print('generacion {}| mejor fitness{}| fitness actual {} | padre_actual {}'.format(0, mejor_fitness, mejor_fitness, mejor_padre))\r\n \r\n #Graficamos\r\n x = np.linspace(start=inicio, stop=fin, num=1000)\r\n plt.plot(x, funcion_fitness(x))\r\n plt.scatter(padres, funcion_fitness(padres), marker='o')\r\n \r\n for i in range (1, max_iter):\r\n #Seleccionamos a los mejores padres, mutamos y creamos generacion hija\r\n padres = mutacion(padres, funcion_fitness= funcion_fitness)\r\n padre_actual, fitness_actual = _obtener_fitness(padres, funcion_fitness=funcion_fitness)\r\n \r\n #Actualizamos valores de mejor padre y fitness\r\n \r\n if fitness_actual > mejor_padre:\r\n mejor_padre = padre_actual\r\n mejor_fitness = fitness_actual\r\n \r\n padre_actual, fitness_actual = _obtener_fitness(padres, funcion_fitness)\r\n \r\n if i % 10 == 0:\r\n print('generacion {}| mejor fitness {}| fitness actual {} | padre actual {}'.format(i, mejor_fitness, fitness_actual, padre_actual))\r\n \r\n Historial.append((i, np.max(funcion_fitness(padres)))) \r\n Mejores_padres.append(mejor_padre)\r\n \r\n plt.scatter(padres, funcion_fitness(padres))\r\n plt.scatter(mejor_padre, funcion_fitness(mejor_padre), marker = '.', c = 'b', s = 100)\r\n plt.ioff() \r\n \r\n print('generacion {}| mejor_padre {}| mejor_fitness {}'.format(i, mejor_padre, mejor_fitness))\r\n \r\n return mejor_padre, mejor_fitness, Historial, Mejores_padres\r\n"
},
{
"alpha_fraction": 0.3638497591018677,
"alphanum_fraction": 0.4577464759349823,
"avg_line_length": 21.66666603088379,
"blob_id": "f152b953882c310471d94cc7a033f1948941c118",
"content_id": "e77fbd27150b7d76cc2e51433809850a0c754c9e",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 426,
"license_type": "no_license",
"max_line_length": 86,
"num_lines": 18,
"path": "/Funciones.py",
"repo_name": "MOSA96/GA_MT",
"src_encoding": "UTF-8",
"text": "#Definimos funciones a optimizar\r\n\r\ndef _fitness(x):\r\n if x > -20 and x < 20:\r\n y = (x**2+x)*np.cos(2*x) + x**2\r\n return round(y, 6)\r\n else:\r\n return 0\r\n \r\ndef _polynomial(x):\r\n y = - x**6/60 - x**5/50 + x**4/2 + 2*x**3/3 - 3.2*x**2 - 6.4*x\r\n \r\n return y\r\n\r\n\r\ndef _trigonometric(x):\r\n y = np.sin(3*x + 45)**2 + 0.9*np.sin(9*x)**3 - np.sin(15*x + 50)*np.cos(2*x - 30)\r\n return y\r\n"
},
{
"alpha_fraction": 0.8366013169288635,
"alphanum_fraction": 0.8366013169288635,
"avg_line_length": 29.600000381469727,
"blob_id": "1eec5f0f8c036f3b2e64fae91c76b3dc6f683a84",
"content_id": "f0631161b39c2af159139d5081f63296ad1b1a07",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 158,
"license_type": "no_license",
"max_line_length": 58,
"num_lines": 5,
"path": "/README.md",
"repo_name": "MOSA96/GA_MT",
"src_encoding": "UTF-8",
"text": "# Algoritmos genéticos y metaheurísticos\n\nAlgoritmo genético para funciones de una variable.\n\nAlgoritmo genético para optimización de modelo Lambda CDM.\n"
}
] | 6 |
romdespoel/python_panorama
|
https://github.com/romdespoel/python_panorama
|
68109cde6cc64331f7246ff160f3e562a00482c8
|
8c8ff88dd139802b8ff545dd43c283f0e7b4f158
|
b5ba008b73b610fd53303b74cbbdb714435b17d3
|
refs/heads/master
| 2020-04-09T14:21:11.485058 | 2019-06-05T14:16:17 | 2019-06-05T14:16:17 | 160,394,855 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5987260937690735,
"alphanum_fraction": 0.6210191249847412,
"avg_line_length": 16.85714340209961,
"blob_id": "031f7c2b8e9c9ef2773ceb8c57d0aa8989d6a8eb",
"content_id": "682be279b18c492e23413b4927002caa0d34e74f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 628,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 35,
"path": "/video_stitch.py",
"repo_name": "romdespoel/python_panorama",
"src_encoding": "UTF-8",
"text": "import cv2\nimport numpy as np\nimport sys\n\ndef take_strip(img):\n height, width, _ = img.shape\n half = int(width/2)\n return img[0:, half: half+4]\n\ndef stitch(img, strip):\n return np.concatenate((img, strip), axis=1)\n\ncap = cv2.VideoCapture(sys.argv[1])\n\nret, img = cap.read()\nimg = take_strip(img)\n\nwhile(cap.isOpened()):\n\n ret, frame = cap.read()\n\n if ret:\n strip = take_strip(frame)\n img = stitch(img, strip)\n else:\n break\n\nname = sys.argv[1]\nname = name.split(\".\")\ncv2.imwrite(name[0]+\".jpg\", img)\n#cv2.imshow(\"panorama\", img)\n#cv2.waitKey(0)\n\ncap.release()\ncv2.destroyAllWindows()\n\n\n\n"
},
{
"alpha_fraction": 0.7565698623657227,
"alphanum_fraction": 0.7607192397117615,
"avg_line_length": 37.05263137817383,
"blob_id": "90bf57dc0b22e961ea5f27c5e18493dfb795a518",
"content_id": "e2f64951852948e4e3575fe22ea8883eba0009f9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 723,
"license_type": "no_license",
"max_line_length": 101,
"num_lines": 19,
"path": "/README.md",
"repo_name": "romdespoel/python_panorama",
"src_encoding": "UTF-8",
"text": "# python_panorama\nMessing about how to create a panorama from a video in Python with openCV\n\n##V1\nLame approach at stripping an image.\nKinda want to see what my flattened out face looks like so that's where we're headed. \nHere's my best shot at it so far:\n\n\n##V2 \nUsing computer vision, create a more sophisticated panorama from images/videos. \n\nHow I'm going to do this: \n\tDetect keypoints (Difference of Gaussian)\n\tExtract features (SIFT?)\n\tMatch features between images\n\tThat's as far ahead as I can see. Will update in due time and will describe what these are\n\nI stopped at: way too many keypoints found, false positives... WIll look into way to get rid of these\n"
},
{
"alpha_fraction": 0.6115537881851196,
"alphanum_fraction": 0.6334661245346069,
"avg_line_length": 15.096774101257324,
"blob_id": "460378f561ae83e310d1f6232dad747793e87c16",
"content_id": "667194da23e8aaef123ce79dd912b690952a0448",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 502,
"license_type": "no_license",
"max_line_length": 47,
"num_lines": 31,
"path": "/v1/video_stitch.py",
"repo_name": "romdespoel/python_panorama",
"src_encoding": "UTF-8",
"text": "import cv2\nimport imutils\nimport numpy as np\nimport sys\n\ndef take_strip(img):\n return img[0:, -8:-1]\n\ndef stitch(img, strip):\n return np.concatenate((img, strip), axis=1)\n\ncap = cv2.VideoCapture(sys.argv[1])\n\nret, img = cap.read()\nimg = take_strip(img)\n\nwhile(cap.isOpened()):\n\n ret, frame = cap.read()\n\n if ret:\n strip = take_strip(frame)\n img = stitch(img, strip)\n else:\n break\n\ncv2.imshow(\"panorama\", img)\ncv2.waitKey(0)\n\ncap.release()\ncv2.destroyAllWindows()\n\n\n\n"
},
{
"alpha_fraction": 0.6433353424072266,
"alphanum_fraction": 0.663420557975769,
"avg_line_length": 30.615385055541992,
"blob_id": "b68435b9fc7f832e1dbb39844f31a0fa9b33bfc5",
"content_id": "cbc13fa8ef24a6fb84df03cc5752ba021bd9220a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1643,
"license_type": "no_license",
"max_line_length": 64,
"num_lines": 52,
"path": "/v2/stitcher.py",
"repo_name": "romdespoel/python_panorama",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport imutils\nimport cv2\n\nclass Stitcher:\n\n\tdef findKPandDescriptors(self, image):\n\t\tdescriptor = cv2.xfeatures2d.SIFT_create()\n\t\tkps, feats = descriptor.detectAndCompute(image, None)\n\t\tkps = np.float32([kp.pt for kp in kps])\n\t\treturn (kps, feats)\n\n\t#maybe add false positive tests...\n\tdef matchKeypoints(self, KP_a, KP_b, feats_a, feats_b):\n\t\tmatcher = cv2.DescriptorMatcher_create(\"BruteForce\")\n\t\trawMatches = matcher.knnMatch(feats_a, feats_b, 1)\n\t\tmatches = [(m[0].trainIdx, m[0].queryIdx) for m in rawMatches]\n\t\treturn matches\n\n\tdef stitch(self, im_A, im_B):\n\t\t(KP_a, feats_a) = self.findKPandDescriptors(im_A)\n\t\t(KP_b, feats_b) = self.findKPandDescriptors(im_B)\n\t\tmatchKeypoints(KP_a, KP_b, feats_a, feats_b)\n\n\t#Find matches, and draw line between images. To serve as test\n\tdef showMatches(self, im_A, im_B):\n\t\t(KP_a, feats_a) = self.findKPandDescriptors(im_A)\n\t\t(KP_b, feats_b) = self.findKPandDescriptors(im_B)\n\t\tmatches = self.matchKeypoints(KP_a, KP_b, feats_a, feats_b)\n\n\t\t(hA, wA) = im_A.shape[:2]\n\t\t(hB, wB) = im_B.shape[:2]\n\t\tcombined = np.zeros((max(hA, hB), wA + wB, 3), dtype=\"uint8\")\n\t\tcombined[0:hA,0:wA] = im_A\n\t\tcombined[0:hB,wA:] = im_B\n\n\t\tfor (trainIdx, queryIdx) in matches:\n\t\t\tptA = (int(KP_a[queryIdx][0]), int(KP_a[queryIdx][1]))\n\t\t\tptB = (int(KP_b[trainIdx][0]) + wA, int(KP_b[trainIdx][1]))\n\t\t\tcv2.line(combined, ptA, ptB, (0, 255, 0), 1)\n\n\t\treturn combined\n\n\nif __name__ == \"__main__\":\n\tim_A = cv2.imread(\"tests/A.jpg\")\n\tim_B = cv2.imread(\"tests/B.jpg\")\n\ts = Stitcher()\n\tlines = s.showMatches(im_A, im_B)\n\t#cv2.imshow(\"keypoints\", lines)\n\t#cv2.waitKey(0)\n\tcv2.imwrite(\"tests/lines.jpg\", lines)"
}
] | 4 |
smerchan/information-retreival
|
https://github.com/smerchan/information-retreival
|
4fff092dbb03b48a7b93f0a6408cf4b526622b6b
|
7d1d911e93de0df12d83ecf1d27c69da95ba2ad8
|
4a978329d70ce51b63ccbc07a65183f770831f47
|
refs/heads/main
| 2023-01-23T14:35:47.331591 | 2020-12-09T06:35:23 | 2020-12-09T06:35:23 | 319,782,588 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5232198238372803,
"alphanum_fraction": 0.553250789642334,
"avg_line_length": 24.634920120239258,
"blob_id": "6be130daf7367548f0990cf8c17a2ab4b4d4a7b0",
"content_id": "331107aff66fa0398d7c9ad50b8b40d959ff277b",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 3230,
"license_type": "permissive",
"max_line_length": 80,
"num_lines": 126,
"path": "/src/term_dictionary/distance_functions.py",
"repo_name": "smerchan/information-retreival",
"src_encoding": "UTF-8",
"text": "# MIT License\n# \n# Copyright (c) 2020 Sameer Merchant\n# \n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n# \n# The above copyright notice and this permission notice shall be included in all\n# copies or substantial portions of the Software.\n# \n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\"\"\"\nCollection of distance functions \n\"\"\"\nfrom pprint import pprint \n\ndef jaccard_distance(w1, w2):\n \"\"\"\n Parameters:\n -----------\n w1: str\n w2: str\n\n Returns:\n --------\n float: \n Jaccard distance between w1 and w2 \n \"\"\"\n ws1 = set(w1)\n ws2 = set(w2)\n\n if (len(ws1) > 1) or (len(ws2) > 1):\n dist = 1 - len(ws1.intersection(ws2))/len(ws1.union(ws2))\n else:\n dist = 0\n return dist\n\n\ndef truncated_jaccard_dist(w1, w2):\n n1 = len(w1)\n n2 = len(w2)\n n = min(n1, n2)\n intersection = list()\n union = list()\n\n if n == 0:\n return 1\n\n for i in range(n):\n if w1[i] == w2[i]:\n intersection.append(w1[i])\n union.append(w1[i])\n else:\n union.append(w1[i])\n union.append(w2[i])\n\n dist = 1 - len(intersection)/len(union)\n return dist\n\ndef common_characters(w1, w2):\n \"\"\"\n Parameters:\n ----------\n w1: str\n w2: str\n\n Returns:\n --------\n int:\n Number of characters common between two strings\n \n \"\"\"\n ws1 = set(w1)\n ws2 = set(w2)\n return len(ws1.intersection(ws2))\n \n\ndef levenshtein_distance(w1, w2):\n \"\"\"\n Parameters:\n ----------\n w1: str\n w2: str\n\n Returns:\n --------\n int:\n Returns Levenshtein edit distance between the two strings \n \"\"\"\n n1 = len(w1) + 1\n n2 = len(w2) + 1\n dist = [[0]*n2 for _ in range(n1)]\n\n for i in range(n1):\n dist[i][0] = i\n\n for j in range(n2):\n dist[0][j] = j\n\n for x in range(1, n1):\n for y in range(1, n2):\n if w1[x-1] == w2[y-1]:\n dist[x][y] = min(dist[x-1][y-1], \n dist[x-1][y] + 1, \n dist[x][y-1] + 1)\n else:\n dist[x][y] = min(dist[x-1][y-1] + 1, \n dist[x-1][y] + 1,\n dist[x][y-1] + 1)\n return dist[n1-1][n2-1]\n\nif __name__ == '__main__':\n w1 = 'faster'\n w2 = 'fct'\n d = levenshtein_distance(w1, w2)\n print(d)\n"
},
{
"alpha_fraction": 0.6315302848815918,
"alphanum_fraction": 0.6386962532997131,
"avg_line_length": 35.04999923706055,
"blob_id": "f70ad14a856ebb5bd8ed21704b4754c6f2851185",
"content_id": "1bc8b77f18c18d9225fb7b95c2cdc37b7fc57f79",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4326,
"license_type": "permissive",
"max_line_length": 98,
"num_lines": 120,
"path": "/src/term_dictionary/spelling_correction.py",
"repo_name": "smerchan/information-retreival",
"src_encoding": "UTF-8",
"text": "# MIT License\n# \n# Copyright (c) 2020 Sameer Merchant\n# \n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n# \n# The above copyright notice and this permission notice shall be included in all\n# copies or substantial portions of the Software.\n# \n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\nfrom distance_functions import levenshtein_distance, jaccard_distance\n\n\ndef partial_match(words, vocab):\n partial_matches = set()\n for w in vocab:\n for word in words:\n if word in w:\n partial_matches.add(w)\n\n return partial_matches\n\n\ndef known(words, vocab): \n \"The subset of `words` that appear in the dictionary of WORDS.\"\n return set(w for w in words if w in vocab)\n\n\ndef edits1(word):\n \"All edits that are one edit away from `word`.\"\n letters = ' -abcdefghijklmnopqrstuvwxyz'\n splits = [(word[:i], word[i:]) for i in range(len(word) + 1)]\n deletes = [L + R[1:] for L, R in splits if R]\n transposes = [L + R[1] + R[0] + R[2:] for L, R in splits if len(R)>1]\n replaces = [L + c + R[1:] for L, R in splits if R for c in letters]\n inserts = [L + c + R for L, R in splits for c in letters]\n return set(deletes + transposes + replaces + inserts)\n\ndef inserts_one(word, c):\n \"Generate words with insert one edits\"\n splits = [(word[:i], word[i:]) for i in range(len(word) + 1)]\n inserts = [L + c + R for L, R in splits]\n return set(inserts)\n\n\ndef edits2(word): \n \"All edits that are two edits away from `word`.\"\n return (e2 for e1 in edits1(word) for e2 in edits1(e1))\n\n\ndef scrammble(word):\n \"Generate Scrambled words with two edits \"\n words = set()\n \n for i in range(len(word)):\n letter = word[i]\n\n if i < len(word):\n w = \"\".join([word[:i] + word[i+1:]])\n else:\n w = \"\".join(word[:i])\n\n for word in inserts_one(w, letter):\n words.add(word)\n for e1 in edits2(word):\n words.add(e1)\n print(words)\n return words\n\n\ndef candidates(word, vocab): \n \"Generate possible spelling corrections for word.\"\n return set(known([word], vocab) or \n known(edits1(word), vocab) or \n known(edits2(word), vocab) or \n #partial_match(edits1(word), vocab) or\n #partial_match(edits2(word), vocab) or\n #partial_match([word], vocab)\n [word])\n\ndef spelling_correction(word, vocab): \n \"Most probable spelling correction for word.\"\n normalized_word = word.lower()\n\n if normalized_word in vocab:\n return [ (vocab_dict.get(word, word), 0)]\n\n candidate_word = candidates(normalized_word, vocab)\n\n #suggested_words_list = [(w, jaccard_distance(normalized_word ,w)) for w in candidate_word]\n suggested_words_list = [(w, levenshtein_distance(normalized_word ,w)) for w in candidate_word]\n suggested_words_list.sort(key=lambda x: (x[1], x[0]))\n\n suggestions = [(vocab_dict.get(w, w), d) for w, d in suggested_words_list]\n return suggestions\n\n\nif __name__ == '__main__':\n from pprint import pprint\n from test_vocab import *\n\n vocab_dict = {w.lower(): w for w in vocab}\n normalized_vocab = [w for w in vocab_dict.keys()]\n test_words = ['rmcast', 'evpmPfRxtx', 'evpnenabeld', 'evpnenabled', 'evpnenaled', 'evpnfxrx']\n\n\n for word in test_words:\n result = spelling_correction(word, normalized_vocab)\n print(\"Word:{} Suggestions: {}\".format(word, result))\n"
},
{
"alpha_fraction": 0.6192965507507324,
"alphanum_fraction": 0.6214353442192078,
"avg_line_length": 32.65599822998047,
"blob_id": "61d0b7ba134ce5757a3063358f81351b3069ae65",
"content_id": "9f47457a6779c9a398f2f8ef4aa4855b711ed35d",
"detected_licenses": [
"MIT"
],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4208,
"license_type": "permissive",
"max_line_length": 132,
"num_lines": 125,
"path": "/src/term_dictionary/term_dictionary.py",
"repo_name": "smerchan/information-retreival",
"src_encoding": "UTF-8",
"text": "# MIT License\n# \n# Copyright (c) 2020 Sameer Merchant\n# \n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n# \n# The above copyright notice and this permission notice shall be included in all\n# copies or substantial portions of the Software.\n# \n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\nfrom collections import defaultdict\nfrom distance_functions import levenshtein_distance, truncated_jaccard_dist\n\nclass TermDictionary(object):\n \"\"\" \n A dictionary of terms \n The class maintain a dictionary of terms. \n It provides utility to lookup terms that match closest to a word \n\n Attributes:\n -----------\n vocab: Iterable List or Set of alphanumeric strings (terms)\n\n Methods\n -------\n add_term(word)\n search_terms(word)\n \"\"\"\n def __init__(self, vocab):\n \"\"\"\n Parameters\n ----------\n vocab: list or Set \n A list or set of str (terms)\n \"\"\"\n self.terms_dict = { w.lower(): w for w in vocab }\n self.trigram_dict = TermDictionary.build_trigram_term_dict(self.terms_dict.keys())\n return \n\n @staticmethod\n def get_trigrams(term):\n trigram_list = list()\n augmented_term = '$' + term + '$'\n for i in range(len(augmented_term)-2):\n trigram_list.append(augmented_term[i:i+3])\n return trigram_list\n\n @staticmethod\n def build_trigram_term_dict(terms=None):\n trigram_dict = defaultdict(list)\n\n for term in terms:\n for trigram in TermDictionary.get_trigrams(term):\n trigram_dict[trigram].append(term)\n return trigram_dict\n\n\n def add_term(self, word):\n \"\"\"\n Parameters\n ----------\n word: str\n Lookup word in the dictionary \n \"\"\"\n if type(word) == str:\n return \n\n if not word:\n return \n\n self.terns_dict[word.lower()] = word\n return\n\n\n def search_terms(self, word):\n \"\"\" \n Parameters\n -----------\n word: str \n Lookup this word in the terms dictionary \n\n Returns\n -------\n list \n A sorted list of tuples (term, distance). The 'distance' is edit distance \n from search term. The tuples are sorted in ascending order based on edit \n distance\n \"\"\"\n norm_word = word.lower()\n\n if norm_word in self.terms_dict:\n return [ (self.terms_dict.get(norm_word), 0) ]\n\n suggested_terms = list()\n for trigram in TermDictionary.get_trigrams(norm_word):\n suggested_terms.extend(self.trigram_dict.get(trigram, []))\n\n suggested_terms = set(suggested_terms)\n candidate_words = [ (self.terms_dict.get(term), levenshtein_distance(term, norm_word)) \n for term in suggested_terms ]\n candidate_words.sort(key=lambda x: (truncated_jaccard_dist(x[0], word), x[1]))\n return candidate_words\n\n\nif __name__ == '__main__':\n from test_vocab import *\n from pprint import pprint\n\n test_words = ['evpnRecev', 'evpnRecevd', 'evpRecd', 'evpnRecv', 'evpnRcv', 'evpnRx', 'evpn*Rx', 'intf', 'mac', 'addr', 'ipaddr']\n termDict = TermDictionary(vocab)\n\n for word in test_words:\n suggested_words = termDict.search_terms(word)\n print(\"Query Word: {}, Suggested: {}\\n\".format(word, suggested_words))\n\n"
}
] | 3 |
naxo100/KaXim-Tutorial
|
https://github.com/naxo100/KaXim-Tutorial
|
9952ad8bd1a93ee78b451741ea29f70fdea48e82
|
86e6983e642d8623897405ffbbd1ca80529e312f
|
1e5367c01d41be4c725c9c910943786d315473c2
|
refs/heads/main
| 2023-04-23T21:14:05.843298 | 2021-05-06T17:47:14 | 2021-05-06T17:47:14 | 364,453,139 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.7224669456481934,
"alphanum_fraction": 0.7224669456481934,
"avg_line_length": 12.352941513061523,
"blob_id": "1737762be0262f464868ff2fc48149fb3302ca08",
"content_id": "61c8e45c8374321fe21c3dcc9ef75d006b7e0ebb",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 227,
"license_type": "no_license",
"max_line_length": 43,
"num_lines": 17,
"path": "/postBuild",
"repo_name": "naxo100/KaXim-Tutorial",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\nset -ex\n\nmkdir bin\ngit clone https://github.com/DLab/KaXim.git\ncd KaXim/src/grammar\nmake\ncd -\ncd KaXim/Release\nmake all\ncp KaXim ~/bin/KaXim\n\ncd ../Debug\nmake all\ncp KaXim ~/bin/KaXim-Debug\n\nexport PATH=~/bin:$PATH\n"
},
{
"alpha_fraction": 0.7348066568374634,
"alphanum_fraction": 0.7624309659004211,
"avg_line_length": 59.33333206176758,
"blob_id": "63885edb8521a6385dcb98d2a6326c029e9b8067",
"content_id": "b4ed7ba70bf38816473ff4f02a7e09c8e5f1c524",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 181,
"license_type": "no_license",
"max_line_length": 162,
"num_lines": 3,
"path": "/README.md",
"repo_name": "naxo100/KaXim-Tutorial",
"src_encoding": "UTF-8",
"text": "# KaXim-Tutorial\n\nUse [](https://mybinder.org/v2/gh/naxo100/KaXim-Tutorial/main?filepath=1-Intro.ipynb) to run this tutorial directly.\n"
},
{
"alpha_fraction": 0.6263194680213928,
"alphanum_fraction": 0.6410978436470032,
"avg_line_length": 25.05555534362793,
"blob_id": "e097ce04706b0cafbf98696c40a2624016533c6d",
"content_id": "b2068695056ed3bebde8379a2609fc9fbcb7d7fe",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1421,
"license_type": "no_license",
"max_line_length": 124,
"num_lines": 54,
"path": "/script/plotting_kappa.py",
"repo_name": "naxo100/KaXim-Tutorial",
"src_encoding": "UTF-8",
"text": "\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nimport glob\nimport os\n\n\ndef read_output(filename,xlabel = \"Time\"):\n\tdf = pd.read_csv(filename,sep=\"\\t\",index_col=0,comment=None)\n\tdf.axes[0].name = xlabel\n\treturn df\n\ndef plot_file(filename,ylbl = \"Population\"):\n\tdf = read_output(filename)\n\treturn df.plot(title = filename, ylabel = ylbl)\n\ndef plot_files(filenames = [],filter = \"*\",xlabel=\"Time\",ylabel=\"Population\",share_xy = [False,False], subplot_name = None):\n\tfilenames = filenames + glob.glob(filter)\n\tfilenames.sort()\n\tsize = len(filenames)\n\tsize_h = int(np.ceil(np.sqrt(size)))\n\tsize_w = int(np.ceil(size/size_h))\n\tif(size == 2):\n\t\tsize_h = 2\n\t\tsize_w = 1\n\t\n\ths = 0.15\n\tws = 0.4\n\tif(share_xy[0] != False):\n\t\tif(subplot_name == None):\n\t\t\ths = 0\n\t\telse:\n\t\t\ths = 0.3\n\tif(share_xy[1] != False):\n\t\tws = 0\n\tfig = plt.figure()\n\tgs = fig.add_gridspec(size_w, size_h, hspace=hs, wspace=ws)\n\taxs = gs.subplots(sharex = share_xy[0],sharey = share_xy[1])\n\tfig.supxlabel(xlabel)\n\tfig.supylabel(ylabel)\n\t#frames = list()\n\ti = 0\n\tfor filename in filenames:\n\t\tif(subplot_name == \"\"):\n\t\t\tsub_title = os.path.basename(filename)\n\t\telif(subplot_name == None):\n\t\t\tsub_title = None\n\t\telse:\n\t\t\tsub_title = subplot_name + \" \" + str(i)\n\t\tdf = read_output(filename)\n\t\tdf.plot(title = sub_title, ylabel = None,xlabel = \"\", ax = axs[i//size_h,i%size_h],legend = i == size_h-1)\n\t\ti = i+1\n\t\n\treturn fig,axs\n\t\t\n\n\n\t\t\n\n\n\n\n\n"
}
] | 3 |
om-12/srm-bug-byte
|
https://github.com/om-12/srm-bug-byte
|
61126e110aa0ce83dd54cc77b658788f9acf8481
|
de7ba8f9aabd4111fa020962741ade61cd385d19
|
81236511b102c281b048180fbff9f464bcf92edc
|
refs/heads/master
| 2023-04-07T12:59:28.474501 | 2021-04-07T08:13:15 | 2021-04-07T08:13:15 | 281,191,120 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.673394501209259,
"alphanum_fraction": 0.673394501209259,
"avg_line_length": 33.0625,
"blob_id": "448cce6b866ea7e08cd646173c416b927acf7c9b",
"content_id": "6a898d93689a67737ef564ec13974e5ab244d14a",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 545,
"license_type": "no_license",
"max_line_length": 61,
"num_lines": 16,
"path": "/first/urls.py",
"repo_name": "om-12/srm-bug-byte",
"src_encoding": "UTF-8",
"text": "from django.urls import path\n\nfrom . import views\nurlpatterns=[\n \n path('',views.index,name='index'),\n path('register',views.register,name='register'),\n path('login',views.login,name='login'),\n path('logout',views.logout,name='logout'),\n path('donate',views.donate,name='donate'),\n path('about',views.about,name='about'),\n path('feedback',views.feedback,name='feedback'),\n path('news',views.news,name='news'),\n path('participate',views.participate,name='participate'),\n path('events',views.events,name='events'),\n]\n"
},
{
"alpha_fraction": 0.8010932207107544,
"alphanum_fraction": 0.8062556982040405,
"avg_line_length": 69.06382751464844,
"blob_id": "cc50cf9662458c6ef24880d840734d126aa56406",
"content_id": "5a6a73b35164b2197f37be4074c0a5faafd8946f",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Markdown",
"length_bytes": 3299,
"license_type": "no_license",
"max_line_length": 1171,
"num_lines": 47,
"path": "/README.md",
"repo_name": "om-12/srm-bug-byte",
"src_encoding": "UTF-8",
"text": "# srm-bug-byte\nsrm developing challenge\n\n\nINTRODUCTION \nCharity is an act of kindness, where a person who has financially more than enough of what he or she needs contributes a part of his or her surplus income for the fulfilment of the needs of those who are less capable. The majority of millitary organisations has experienced difficulties in getting funds or other required things.In current scenerio ,in a pandemic situation people are coming forward and helping the needy one and there is no site which helps in funding of all kind of army organisations. Getting donor is a very hard task, and sometime dealing with some donor’s conditions can be a big challenge for any funds related to indian army to fulfill it. This charity management system will help defence funds to find donors easily. This system has three modules namely, Admin, members and Donor. Admin can login using credentials and manage the request raised by members by approving or rejecting it. members can view the previous events list and donation report. Donor can simply donate without any preregisteration and can choose whether they want to disclose their name or not.This site will also help in fund raising events and ofcourse online fundraising.\n\n\nWHY THIS IDEA?\n UK fundraising reports that “17% of online donations were made on a mobile device in 2016 versus 14% in 2015″.One of the big reasons are when you will search for the sites like ours(donate to army organisations) you will find that there only 2 to 3 sites which are currently active and most of them are very specific ,for eg-national defence fund,\n so we think that our initiative will not only make donations easier for several organisations but also encourage people by managing and hosting events with the help of our members.\n \n \n\n \n#features of website\n-> user registeration by google,facebook\n->sending email to user gmail accounts\n->donations to direct funds\n->several funds at one place\n\n#features we are expecting to accomplish before final project submission are:-\n1) we will include paypal payment gateway.\n2) we will include mailchimp so that our prominent members could get updates about past and upcoming events.\n3) Histories of donors and great efforts of our members will be updated to our websites pages regularly which will encourage others\n\n\nBUGS WE ARE FACING\nfacebook registeration because facebook had changed their security settings for http including domains.\n\n\n\n\n\n\n\nHELP TO UNDERSTAND THE WEBSITE\n\nHOMEPAGE - this page consist of navigations to all other contents of site and contains donate now button after motivating by the images.\nNEWS- it will contain the data which is recent and related to our website works and current affairs in indian army organisation.\nevents-it will contain the type of events we can arrange for fundraising.\nparticipation-initially we thought that we could complete this part of project but we are only able to complete the front end\nour site supports a good user login register page.\nwe had tried to get the feed backs of user to let us know what changes we need to do.\nwe have designed the model but we need to do alot because it is a dynamic site as you can understand.\nwe have tried for payment gateways too but we did not succeed because late we came to know that we need mcc.\nTHANKYOU\n"
},
{
"alpha_fraction": 0.6719056963920593,
"alphanum_fraction": 0.71512770652771,
"avg_line_length": 34.068965911865234,
"blob_id": "2bf4a67692b43fb874dc4a9ea56512a43696a0c0",
"content_id": "d20312b68d08704115ccd816a52a8278d58b10d9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1018,
"license_type": "no_license",
"max_line_length": 87,
"num_lines": 29,
"path": "/first/models.py",
"repo_name": "om-12/srm-bug-byte",
"src_encoding": "UTF-8",
"text": "\n# Create your models here.\nfrom django.db import models\n\n# Create your models here.\n\n# this is to get the information from the users and their exprience about this platform\n\nclass Feedback(models.Model) :\n name = models.CharField(max_length=100)\n email = models.EmailField(max_length=254)\n subject = models.CharField(max_length=200)\n message = models.CharField(max_length=200)\n def _str_(self):\n return self.name\n\nclass Donate(models.Model) :\n fname = models.CharField(max_length=100)\n lname = models.CharField(max_length=100)\n organization = models.CharField(max_length=100)\n email = models.EmailField(max_length=254)\n phonenumber = models.CharField(max_length=100)\n address1 = models.CharField(max_length=254)\n address2 = models.CharField(max_length=254)\n city=models.CharField(max_length=100)\n state=models.CharField(max_length=100)\n amount = models.IntegerField()\n comments = models.CharField(max_length=500)\n def __str__(self):\n return self.fname\n"
},
{
"alpha_fraction": 0.6541686058044434,
"alphanum_fraction": 0.6588262915611267,
"avg_line_length": 36.675437927246094,
"blob_id": "f28be022732a653743a3c624adf5148e56e0bd40",
"content_id": "cd0126010fd026d409b1f46c8bab0a08c3307b42",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 4294,
"license_type": "no_license",
"max_line_length": 202,
"num_lines": 114,
"path": "/first/views.py",
"repo_name": "om-12/srm-bug-byte",
"src_encoding": "UTF-8",
"text": "from django.shortcuts import render, redirect, get_object_or_404\nfrom django.contrib import messages\nfrom django.http import HttpResponse\n\nfrom django.contrib.auth.models import User, auth\nfrom django.contrib.sessions.models import Session\nfrom django.contrib.auth.decorators import login_required\nfrom django.core.mail import send_mail\nfrom .models import Donate\nfrom first.models import Feedback\n\ndef index(request):\n \n return render(request,'index.html');\n\ndef login(request) :\n if request.method == 'POST' :\n username = request.POST['username']\n password = request.POST['password']\n # by writing this we are checking whether the entered username and password are of the same user or not \n user = auth.authenticate(username=username,password=password)\n if user is not None :\n request.session['member_id'] = user.id\n auth.login(request,user)\n return redirect('/')\n else :\n messages.info(request,'invalid credentials')\n return redirect('login')\n \n else :\n return render(request,\"login.html\")\n\n\n\ndef logout(request) :\n auth.logout(request)\n request.session['member_id'] = 0\n return redirect('/')\n\n\n\ndef register(request):\n if request.method == 'POST' :\n username = request.POST['username']\n email = request.POST['email']\n password1 = request.POST['password1']\n password2 = request.POST['password2']\n # by writing this condition we are checking that if password1 and password2 are equal or not \n if password1==password2 :\n # by writing this condition we are checking that if this username is already registered or not\n if User.objects.filter(username=username).exists() :\n messages.info(request,'Username Taken')\n return redirect('register')\n # by writing this condition we are checking that if this email is already registered or not\n elif User.objects.filter(email=email).exists() :\n messages.info(request,'email taken already')\n return redirect('register')\n else :\n user =User.objects.create_user(username=username,email=email,password=password1)\n # by writing this only we are hitting the database to store the information\n user.save()\n print('user created')\n return redirect('login') \n else :\n messages.info(request,'password not matching')\n return redirect('register')\n return('/') \n else :\n return render(request,'reg.html')\n\n\ndef donate(request):\n if request.method=='POST':\n fname = request.POST['fname']\n lname = request.POST['lname']\n organization= request.POST['organization']\n email = request.POST['email']\n phonenumber= request.POST['phonenumber']\n address1 = request.POST['address1']\n address2 = request.POST['address2']\n city=request.POST['city']\n state=request.POST['state']\n amount=request.POST['amount']\n comments=request.POST['comments']\n donation = Donate(fname=fname,lname=lname,organization=organization,email=email,phonenumber=phonenumber,address1=address1,address2=address2,city=city,state=state,amount=amount,comments=comments)\n donation.save()\n return render(request,\"form.html\")\n\n# this function booking is to connect our app first to the file about.html\ndef about(request) :\n return render(request,\"about.html\") \n\ndef news(request) :\n return render(request,\"newsAndParticipate.html\") \n\n# by writing we are applying the condition that if user is logged in then only call feedback function\n@login_required(login_url='login')\ndef feedback(request) :\n name = request.POST['name']\n email = request.POST['email']\n subject = request.POST['subject']\n message = request.POST['message']\n feed = Feedback(name=name,email=email,subject=subject,message=message)\n # by writing this only we are hitting the database to store the information\n feed.save() \n return redirect('index')\n\n\ndef participate(request):\n return render(request,'participate.html')\n\n\ndef events(request):\n return render(request,'events.html')"
}
] | 4 |
sauloantuness/ml-tp1
|
https://github.com/sauloantuness/ml-tp1
|
2689288beeaabb4b58ec58738c5972a97f574550
|
efc5fb6e0b0e7506537929d940556a7ea23c67d2
|
d87e301ef9c8606b0bfced546dfdf7b55777c263
|
refs/heads/master
| 2021-01-18T19:44:17.707072 | 2017-04-04T17:26:20 | 2017-04-04T17:26:20 | 86,910,246 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.5438520908355713,
"alphanum_fraction": 0.5743765830993652,
"avg_line_length": 23.755319595336914,
"blob_id": "c97e00ff95019104ec39289118ac07e520619d2d",
"content_id": "bf4dd829cec03fd38c02a591a0e8571a412837f9",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 2326,
"license_type": "no_license",
"max_line_length": 84,
"num_lines": 94,
"path": "/code.py",
"repo_name": "sauloantuness/ml-tp1",
"src_encoding": "UTF-8",
"text": "import numpy as np\nimport pandas as pd\n\n\ndef compute_cost(X, y, beta):\n total_cost = 0\n m, _ = X.shape\n for i in range(m):\n x_i = X[i, 1]\n y_i = y[i]\n b_0 = beta[0, 0]\n b_1 = beta[0, 1]\n total_cost += (b_0 + b_1 * x_i - y_i) ** 2\n\n return total_cost / (2 * m)\n\n\ndef gradient_descent2(X, y, theta, alpha, iters):\n '''\n alpha: learning rate\n iters: number of iterations\n OUTPUT:\n theta: learned parameters\n cost: a vector with the cost at each training iteration\n '''\n # temp = np.matrix(np.zeros(theta.shape))\n # parameters = int(theta.ravel().shape[1])\n cost = np.zeros(iters)\n m, _ = X.shape\n\n for i in range(iters):\n gradient_b0 = 0\n gradient_b1 = 0\n\n for j in range(m):\n gradient_b0 += (theta[0, 0] + theta[0, 1] * X[j, 1] - y[j, 0])\n gradient_b1 += X[j, 1] * (theta[0, 0] + theta[0, 1] * X[j, 1] - y[j, 0])\n\n gradient_b0 = gradient_b0 * 2.0 / m\n gradient_b1 = gradient_b1 * 2.0 / m\n\n theta[0, 0] = theta[0, 0] - alpha * gradient_b0\n theta[0, 1] = theta[0, 1] - alpha * gradient_b1\n\n cost[i] = compute_cost(X, y, theta)\n\n return theta, cost\n\ndef gradient_descent(X, y, theta, alpha, iterations):\n \"\"\"\n gradient_descent Performs gradient descent to learn theta\n theta = GRADIENTDESENT(X, y, theta, alpha, num_iters) updates theta by \n taking num_iters gradient steps with learning rate alpha\n \"\"\"\n cost_history = [0] * iterations\n m, _ = X.shape\n\n for iteration in range(iterations):\n hypothesis = X.dot(theta.T)\n loss = hypothesis - y\n gradient = X.T.dot(loss) / m\n theta = theta - alpha * gradient\n cost = compute_cost(X, y, theta)\n cost_history[iteration] = cost\n\n return theta, cost_history\n\n\ndata = pd.read_csv('ex1data1.txt', header=None, names=['Population', 'Profit'])\ndata.head()\n\ndata.insert(0, 'beta zero', 1)\n\n# set X (training data) and y (target variable)\ncols = data.shape[1]\nX = data.iloc[:, 0:cols - 1]\ny = data.iloc[:, cols - 1:cols]\n\nX.head()\ny.head()\n\nX = np.matrix(X.values)\ny = np.matrix(y.values)\nbeta = np.matrix(np.array([0.0, 0.0]))\n\nX.shape, beta.shape, y.shape\n\nalpha = 0.01\niters = 1500\n\ng, cost = gradient_descent(X, y, beta, alpha, iters)\n\nprint g\nprint cost"
}
] | 1 |
gnu-andrew/fixes-overlay
|
https://github.com/gnu-andrew/fixes-overlay
|
1ec07b50cc6e20d6ed5c0c01d83e555005ca512c
|
4ebf394f62cc6e6c51944803401ec15e51d87eaf
|
615ab182f6b2b26a3cae28746df99ae216cb5cbc
|
refs/heads/master
| 2021-01-16T09:30:04.198138 | 2020-06-16T04:20:28 | 2020-06-16T04:20:28 | 243,062,172 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.32608696818351746,
"alphanum_fraction": 0.3478260934352875,
"avg_line_length": 10.5,
"blob_id": "1a543f30e8543fbebb7a67ebd8b06f667bfdc0bd",
"content_id": "d3bedc29b7f4a0ca4f0ff282349f43b6ae4d9363",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 92,
"license_type": "no_license",
"max_line_length": 32,
"num_lines": 8,
"path": "/www-client/chromium/files/experimental.gyp",
"repo_name": "gnu-andrew/fixes-overlay",
"src_encoding": "UTF-8",
"text": "{\n 'targets': [\n {\n 'target_name': 'i18n_api',\n 'type': 'none',\n },\n ],\n}\n"
},
{
"alpha_fraction": 0.6852940917015076,
"alphanum_fraction": 0.6926470398902893,
"avg_line_length": 26.200000762939453,
"blob_id": "5be5d25be14906458eb93cd7275c47f73c55fce4",
"content_id": "761e56148d9815bc504e305bb038a49616c15293",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Shell",
"length_bytes": 680,
"license_type": "no_license",
"max_line_length": 95,
"num_lines": 25,
"path": "/www-client/chromium/files/chromium-launcher-r1.sh",
"repo_name": "gnu-andrew/fixes-overlay",
"src_encoding": "UTF-8",
"text": "#!/bin/bash\n#\n# Copyright (c) 2009 The Chromium Authors. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\n# Let the wrapped binary know that it has been run through the wrapper\nexport CHROME_WRAPPER=\"`readlink -f \"$0\"`\"\n\nPROGDIR=\"`dirname \"$CHROME_WRAPPER\"`\"\n\ncase \":$PATH:\" in\n *:$PROGDIR:*)\n # $PATH already contains $PROGDIR\n ;;\n *)\n # Append $PROGDIR to $PATH\n export PATH=\"$PATH:$PROGDIR\"\n ;;\nesac\n\n# Set the .desktop file name\nexport CHROME_DESKTOP=\"chromium-browser-chromium.desktop\"\n\nexec -a \"chromium-browser\" \"$PROGDIR/chrome\" --extra-plugin-dir=/usr/lib/nsbrowser/plugins \"$@\"\n"
},
{
"alpha_fraction": 0.39423078298568726,
"alphanum_fraction": 0.39423078298568726,
"avg_line_length": 18.809524536132812,
"blob_id": "cdb2f5404c21ca8375ce9b753577df1df6a9cb78",
"content_id": "3abd34865ada87cf81e8b542a6ca7cb4acc4b7e3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 416,
"license_type": "no_license",
"max_line_length": 65,
"num_lines": 21,
"path": "/www-client/chromium/files/flac.gyp",
"repo_name": "gnu-andrew/fixes-overlay",
"src_encoding": "UTF-8",
"text": "{\n 'targets': [\n {\n 'target_name': 'libflac',\n 'type': 'settings',\n 'direct_dependent_settings': {\n 'defines': [\n 'USE_SYSTEM_FLAC',\n ],\n },\n 'link_settings': {\n 'ldflags': [\n '<!@(pkg-config --libs-only-L --libs-only-other flac)',\n ],\n 'libraries': [\n '<!@(pkg-config --libs-only-l flac)',\n ],\n },\n },\n ],\n}\n"
}
] | 3 |
Dhruv282008/ATM-PYTHON
|
https://github.com/Dhruv282008/ATM-PYTHON
|
4191d99c64d703f77fdbf1d11c9f540d41026163
|
64355adc5a3b799a8c875418b84cb0994ca729b8
|
203f1cb13c15f786f9a103fc4aea625a1e5894ac
|
refs/heads/main
| 2023-07-07T12:42:04.637599 | 2021-08-16T09:10:39 | 2021-08-16T09:10:39 | 396,214,841 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.49450549483299255,
"alphanum_fraction": 0.5098901391029358,
"avg_line_length": 28.33333396911621,
"blob_id": "9c7362af14544104eaf409b320e124c52cef3659",
"content_id": "797e974af54a21d6a50b370be7786901b22b8bbe",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 455,
"license_type": "no_license",
"max_line_length": 71,
"num_lines": 15,
"path": "/Atm.py",
"repo_name": "Dhruv282008/ATM-PYTHON",
"src_encoding": "UTF-8",
"text": "class Atm():\r\n def __init__(self, PIN):\r\n self.PIN = PIN\r\n self.uinput = input(\"Enter your PIN number: \")\r\n \r\n if(self.uinput != self.PIN):\r\n print(\"Incorrect Pin\")\r\n\r\n else:\r\n uinput2 = self.uinput2 = input(\"Enter Your Bank Balance: \")\r\n print(uinput2)\r\n print(input(\"Enter Amount To Withdraw: \"))\r\n print(\"Money Withdrawn Successfully!\")\r\n\r\natm = Atm(\"1234\")\r\n"
}
] | 1 |
Saby2002/arinsnetwork_Automation
|
https://github.com/Saby2002/arinsnetwork_Automation
|
4dc2b933deefe9317f73143e5998aa3749f518d7
|
f77ab7f2e849a6ca5247f7b3eb171a4362a7423b
|
1cc868f3beebd9b875f2d5a1c533a06875800db7
|
refs/heads/master
| 2023-06-25T21:44:18.783505 | 2021-07-26T19:09:56 | 2021-07-26T19:09:56 | 389,741,652 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.6755824089050293,
"alphanum_fraction": 0.7057808637619019,
"avg_line_length": 47.29166793823242,
"blob_id": "f874656fd846b68a671408b2747e3d7fd541de7b",
"content_id": "5a76ab142ed465a9ec1d1765f1c14679875abdc4",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Dockerfile",
"length_bytes": 1159,
"license_type": "no_license",
"max_line_length": 79,
"num_lines": 24,
"path": "/ProjectA/Dockerfile",
"repo_name": "Saby2002/arinsnetwork_Automation",
"src_encoding": "UTF-8",
"text": "FROM alpine:latest\nLABEL maintainer=\"[email protected]\"\nLABEL relation=\"https://arinsnetwork.com\"\nENV REFRESHED_AT 2020_05_15\nENV FTP_USERNAME dcf_helper\nENV FTP_PASS aq1sw2de3fr4\nRUN apk update; apk add vsftpd\nRUN echo \"local_enable=YES\" >> /etc/vsftpd/vsftpd.conf \\\n&& echo \"chroot_local_user=YES\" >> /etc/vsftpd/vsftpd.conf \\\n&& echo \"allow_writeable_chroot=YES\" >> /etc/vsftpd/vsftpd.conf \\\n&& echo \"write_enable=YES\" >> /etc/vsftpd/vsftpd.conf \\\n&& echo \"local_umask=022\" >> /etc/vsftpd/vsftpd.conf \\\n&& echo \"passwd_chroot_enable=yes\" >> /etc/vsftpd/vsftpd.conf \\\n&& echo 'pasv_enable=Yes' >> /etc/vsftpd/vsftpd.conf \\\n&& echo 'pasv_min_port=50000' >> /etc/vsftpd/vsftpd.conf \\\n&& echo 'pasv_max_port=50050' >> /etc/vsftpd/vsftpd.conf \\\n&& echo 'seccomp_sandbox=NO' >> /etc/vsftpd/vsftpd.conf \\\n&& sed -i \"s/anonymous_enable=YES/anonymous_enable=NO/\" /etc/vsftpd/vsftpd.conf\nRUN mkdir -p \"/var/ftp/files\"\nRUN adduser -h \"/var/ftp/files\" -s \"/sbin/nologin\" -D $FTP_USERNAME\nRUN echo \"$FTP_USERNAME:$FTP_PASS\" | /usr/sbin/chpasswd\nRUN chown -R $FTP_USERNAME:nogroup \"/var/ftp/files\"\nEXPOSE 20 21 50000-50050\nENTRYPOINT [\"/usr/sbin/vsftpd\", \"/etc/vsftpd/vsftpd.conf\"]\n"
},
{
"alpha_fraction": 0.48048779368400574,
"alphanum_fraction": 0.70243901014328,
"avg_line_length": 16.08333396911621,
"blob_id": "22a3c1002c7c082e2fe5a42c3c4b6501de8fdb21",
"content_id": "377057b4f5c381749cc009e7a82e5f448301bd97",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Text",
"length_bytes": 410,
"license_type": "no_license",
"max_line_length": 25,
"num_lines": 24,
"path": "/ProjectA/Batfish/requirement.txt",
"repo_name": "Saby2002/arinsnetwork_Automation",
"src_encoding": "UTF-8",
"text": "attrs==21.2.0\nbidict==0.21.2\ncertifi==2021.5.30\ncharset-normalizer==2.0.3\nConfigArgParse==0.15.2\ndeepdiff==5.5.0\nDeprecated==1.2.12\nidna==3.2\nipaddress==1.0.23\nnetconan==0.12.2\nnumpy==1.21.1\nordered-set==4.0.2\npandas==1.1.5\npasslib==1.7.4\npybatfish==0.36.0\npython-dateutil==2.8.2\npytz==2021.1\nPyYAML==5.4.1\nrequests==2.26.0\nrequests-toolbelt==0.9.1\nsimplejson==3.17.3\nsix==1.16.0\nurllib3==1.26.6\nwrapt==1.12.1\n"
},
{
"alpha_fraction": 0.7306010723114014,
"alphanum_fraction": 0.737158477306366,
"avg_line_length": 37.10416793823242,
"blob_id": "6c99478d7a97fd864c127c9c6ce453033d9296e5",
"content_id": "ab77c03a674ad6564d43b27e62d545b079ee21b7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 1830,
"license_type": "no_license",
"max_line_length": 165,
"num_lines": 48,
"path": "/ProjectA/Batfish/main.py",
"repo_name": "Saby2002/arinsnetwork_Automation",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\n'''\nbf_session is used to provide details for communication with the batfish application (IP address, API token etc)\nbf_init_snapshot is used to upload your configuration files into the batfish application.\nload_questions is used to initialise the Batfish questions\nbfq is used to ask the question and receive the response\nOS standard lib, which allow us to work with directories at linux host\n\nCreate Variable, which we need to for our python script to work:\n bf_address : IP address we are using to connect to Batfish\n As it is running in the Docker container and its TCP ports are NATed to all interfaces, we can use localhost (127.0.0.1) to connect to it. \n\nBody Part : \n connectivity data from bf_address to bf_session.host property\n load confiiguration using bf_init_snapshot() using argument snapshot_path\n Initialising Batfish questions without arguments using load_questions()\n Collect the answer() to the question nodeProperties() sent via bfq class, which is converted to Pandas Dataframe using Frame() method and stored in a variable r.\n printing the answer r in the STDOUT with print() function\n'''\n\nfrom pybatfish.client.commands import bf_init_snapshot, bf_session\nfrom pybatfish.question.question import load_questions\nfrom pybatfish.question import bfq\nimport os\n\nbf_address = \"127.0.0.1\"\nsnapshot_path = \"./snapshot\"\noutput_dir = \"./output\"\n\nif __name__ == \"__main__\":\n \n # Setting host to connect\n bf_session.host = bf_address\n \n # Loading configs and questions\n bf_init_snapshot(snapshot_path, overwrite=True)\n load_questions()\n\n # Running Questions\n r = bfq.nodeProperties().answer().frame()\n print(r)\n\n # Saving output\n if not os.path.exists(output_dir):\n os.mkdir(output_dir)\n\n r.to_csv(f\"{output_dir}/results.csv\")\n\n"
},
{
"alpha_fraction": 0.6113983392715454,
"alphanum_fraction": 0.6152141690254211,
"avg_line_length": 31.626506805419922,
"blob_id": "3c0bf343ffa70d964f0bd0b1f49c3c41d53309bf",
"content_id": "cc586a40b3e29b09990f25e8c211d296ca72b9e3",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 8124,
"license_type": "no_license",
"max_line_length": 276,
"num_lines": 249,
"path": "/ProjectA/Batfish/batfish_env/lib/python3.8/site-packages/netconan/netconan.py",
"repo_name": "Saby2002/arinsnetwork_Automation",
"src_encoding": "UTF-8",
"text": "\"\"\"Handle invoking netconan from the command line.\"\"\"\n# Copyright 2018 Intentionet\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import absolute_import\n\nimport argparse\nimport logging\nimport sys\n\nimport configargparse\n\nfrom . import __version__\nfrom .anonymize_files import anonymize_files\nfrom .ip_anonymization import IpAnonymizer\n\n\ndef host_bits(x):\n \"\"\"Argparse type function for --preserve-host-bits.\"\"\"\n # the name of this function is used for error message, apparently.\n val = int(x)\n if val < 0 or val > 32:\n raise argparse.ArgumentError(\"valid values are [0, 32]\")\n return val\n\n\ndef _parse_args(argv):\n \"\"\"Parse arguments from the given list.\"\"\"\n parser = configargparse.ArgParser(\n # Replace the default config file help with custom message\n # To fix some syntax issues\n add_config_file_help=False,\n description=\"\"\"\n Args that can start with '--' can also be set in a config file (specified\n via -c). If an arg is specified in more than one place, then command line\n values override config file values which override defaults. Config file\n syntax allows: key=value, flag=true, stuff=[a,b,c] (for more details, see\n here https://goo.gl/R74nmi).\n \"\"\",\n )\n\n parser.add_argument(\n \"--version\",\n action=\"version\",\n version=__version__,\n help=\"Print version number and exit\",\n )\n parser.add_argument(\n \"-a\",\n \"--anonymize-ips\",\n action=\"store_true\",\n default=False,\n help=\"Anonymize IP addresses\",\n )\n parser.add_argument(\n \"-c\",\n \"--config\",\n is_config_file=True,\n help=\"Netconan configuration file with defaults for these CLI parameters\",\n )\n parser.add_argument(\n \"-d\",\n \"--dump-ip-map\",\n default=None,\n help=\"Dump IP address anonymization map to specified file\",\n )\n parser.add_argument(\n \"-i\",\n \"--input\",\n required=True,\n help=\"Input file or directory containing files to anonymize\",\n )\n parser.add_argument(\n \"-l\",\n \"--log-level\",\n default=\"INFO\",\n choices=[\"DEBUG\", \"INFO\", \"WARNING\", \"ERROR\", \"CRITICAL\"],\n help=\"Determines what level of logs to display\",\n )\n parser.add_argument(\n \"-n\",\n \"--as-numbers\",\n default=None,\n help=\"List of comma separated AS numbers to anonymize\",\n )\n parser.add_argument(\n \"-o\",\n \"--output\",\n required=True,\n help=\"Output file or directory where anonymized files are placed\",\n )\n parser.add_argument(\n \"-p\",\n \"--anonymize-passwords\",\n action=\"store_true\",\n default=False,\n help=\"Anonymize password and snmp community lines\",\n )\n parser.add_argument(\n \"-r\",\n \"--reserved-words\",\n default=None,\n help=\"List of comma separated words that should not be anonymized\",\n )\n parser.add_argument(\n \"-s\",\n \"--salt\",\n default=None,\n help=\"Salt for IP and sensitive keyword anonymization\",\n )\n parser.add_argument(\n \"-u\",\n \"--undo\",\n action=\"store_true\",\n default=False,\n help=\"Undo reversible anonymization (must specify salt)\",\n )\n parser.add_argument(\n \"-w\",\n \"--sensitive-words\",\n default=None,\n help=\"List of comma separated keywords to anonymize\",\n )\n parser.add_argument(\n \"--preserve-prefixes\",\n default=\",\".join(IpAnonymizer.DEFAULT_PRESERVED_PREFIXES),\n help=\"List of comma separated IP prefixes to preserve. Specified prefixes are preserved, but the host bits within those prefixes are still anonymized. To preserve prefixes and host bits in specified blocks, use --preserve-addresses instead\",\n )\n parser.add_argument(\n \"--preserve-addresses\",\n default=None,\n help=\"List of comma separated IP addresses or networks to preserve. Prefixes and host bits within those networks are preserved. To preserve just prefixes and anonymize host bits, use --preserve-prefixes\",\n )\n parser.add_argument(\n \"--preserve-private-addresses\",\n action=\"store_true\",\n default=False,\n help=\"Preserve private-use IP addresses. Prefixes and host bits within the private-use IP networks are preserved. To preserve specific addresses or networks, use --preserve-addresses instead. To preserve just prefixes and anonymize host bits, use --preserve-prefixes\",\n )\n parser.add_argument(\n \"--preserve-host-bits\",\n type=host_bits,\n default=8,\n help=\"Preserve the trailing bits of IP addresses, aka the host bits of a network. Set this value large enough to represent the largest interface network (e.g., 8 for a /24 or 12 for a /20) or NAT pool.\",\n )\n return parser.parse_args(argv)\n\n\ndef main(argv=sys.argv[1:]):\n \"\"\"Netconan tool entry point.\"\"\"\n args = _parse_args(argv)\n\n if not args.input:\n raise ValueError(\"Input must be specified\")\n\n log_level = logging.getLevelName(args.log_level)\n logging.basicConfig(format=\"%(levelname)s %(message)s\", level=log_level)\n\n if not args.output:\n raise ValueError(\"Output must be specified\")\n\n if args.undo:\n if args.anonymize_ips:\n raise ValueError(\n \"Cannot anonymize and undo anonymization, select only one.\"\n )\n if args.salt is None:\n raise ValueError(\n \"Salt used for anonymization must be specified in order to undo anonymization.\"\n )\n\n if args.dump_ip_map is not None:\n if not args.anonymize_ips:\n raise ValueError(\n \"Can only dump IP address map when anonymizing IP addresses.\"\n )\n\n as_numbers = None\n if args.as_numbers is not None:\n as_numbers = args.as_numbers.split(\",\")\n\n reserved_words = None\n if args.reserved_words is not None:\n reserved_words = args.reserved_words.split(\",\")\n\n sensitive_words = None\n if args.sensitive_words is not None:\n sensitive_words = args.sensitive_words.split(\",\")\n\n preserve_prefixes = None\n if args.preserve_prefixes is not None:\n preserve_prefixes = args.preserve_prefixes.split(\",\")\n\n preserve_addresses = None\n if args.preserve_addresses is not None:\n preserve_addresses = args.preserve_addresses.split(\",\")\n\n if args.preserve_private_addresses:\n addrs = list(IpAnonymizer.RFC_1918_NETWORKS)\n # Merge private addresses with explicitly preserved addresses\n preserve_addresses = (\n addrs if preserve_addresses is None else (preserve_addresses + addrs)\n )\n\n if not any(\n [\n as_numbers,\n sensitive_words,\n args.anonymize_passwords,\n args.anonymize_ips,\n args.undo,\n ]\n ):\n logging.warning(\n \"No anonymization options turned on, \"\n \"no output file(s) will be generated.\"\n )\n else:\n anonymize_files(\n args.input,\n args.output,\n args.anonymize_passwords,\n args.anonymize_ips,\n args.salt,\n args.dump_ip_map,\n sensitive_words,\n args.undo,\n as_numbers,\n reserved_words,\n preserve_prefixes,\n preserve_addresses,\n preserve_suffix_v4=args.preserve_host_bits,\n preserve_suffix_v6=args.preserve_host_bits,\n )\n\n\nif __name__ == \"__main__\":\n main()\n"
},
{
"alpha_fraction": 0.41163188219070435,
"alphanum_fraction": 0.42497992515563965,
"avg_line_length": 38.07646942138672,
"blob_id": "4c0aba22778b7f191b35766b6bd6eef43844c576",
"content_id": "6de152d02f9416e60ca1e3d7b1e9c35199a5ab41",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 19928,
"license_type": "no_license",
"max_line_length": 143,
"num_lines": 510,
"path": "/Python/Cisco_pyATS/pyats_env/lib/python3.6/site-packages/genie/libs/sdk/apis/nxos/health/health.py",
"repo_name": "Saby2002/arinsnetwork_Automation",
"src_encoding": "UTF-8",
"text": "\"\"\"Common health functions for platform\"\"\"\n\n# Python\nimport os\nimport logging\nimport re\n\n# pyATS\nfrom pyats.easypy import runtime\n\n# Genie\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError, SchemaMissingKeyError\nfrom genie.utils import Dq\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef health_cpu(device,\n command='show processes cpu',\n processes=None,\n check_key='one_sec',\n output=None,\n health=True):\n '''Get cpu load on device\n\n Args:\n device (`obj`): Device object\n command (`str`): Override show command\n Default to `show processes cpu`\n processes (`list`): List of processes to check\n check_key (`str`): Key to check in parsed output\n Default to `one_sec`\n output (`str`): Output of show command\n Returns:\n cpu_load_dict (`dict`): Cpu load dictionary on the device\n example:\n {\n \"health_data\": [\n {\n \"process\": \"OMP\",\n \"value\": 0.0,\n },\n {\n \"process\": \"NAT-ROUTE\",\n \"value\": 0.0,\n }\n ]\n }\n '''\n\n cpu_load_dict = {}\n\n try:\n parsed = device.parse(command, output=output)\n except SchemaEmptyParserError as e:\n log.error(\"Command '{cmd}' did not return any output\\n{msg}\".\\\n format(cmd=command, msg=str(e)))\n return None\n\n all_processes = parsed.q.get_values('process')\n\n if processes or all_processes:\n for ps_item in processes or all_processes:\n # To get process id\n # {\n # (snip))\n # \"index\": {\n # 1: {\n # \"process\": \"init\",\n # (snip)\n # \"one_sec\": 0.0,\n indexes = parsed.q.contains_key_value(\n 'process', ps_item, value_regex=True).get_values('index')\n for index in indexes:\n process = parsed.q.contains_key_value('index',\n index).get_values(\n 'process', 0)\n cpu_load_dict.update({\n process:\n parsed.q.contains_key_value('index', index).get_values(\n check_key, 0)\n })\n\n # if health is True, change the dict\n # from:\n # cpu_load_dict = {\n # \"OMP\": 0.0,\n # \"NAT-ROUTE\": 0.0,\n # }\n # to:\n # cpu_load_dict = {\n # \"health_data\": [\n # {\n # \"process\": \"OMP\",\n # \"value\": 0.0,\n # },\n # {\n # \"process\": \"NAT-ROUTE\",\n # \"value\": 0.0,\n # }\n # ]\n # }\n if health:\n health_data = {}\n health_data.setdefault('health_data', [])\n for k, v in cpu_load_dict.items():\n health_data['health_data'].append({'process': k, 'value': v})\n cpu_load_dict = health_data\n\n return cpu_load_dict\n\n\ndef health_memory(device,\n command='show processes memory',\n processes=None,\n check_key='all_mem_alloc',\n output=None,\n health=True):\n '''Get memory usage on device\n\n Args:\n device (`obj`): Device object\n command (`str`): Override show command\n Default to `show processes memory`\n processes (`list`): List of processes to check\n If both processes and check_key are given,\n processes are preferred.\n check_key (`str`): Key to check in parsed output\n Default to `all_mem_alloc`\n output (`str`): Output of show command\n Returns:\n memory_usage_dict (`dict`): memory usage dict on the device (percentage)\n example:\n {\n \"health_data\": [\n {\n \"process\": \"libvirtd\",\n \"value\": 0.0012294695662956926,\n },\n {\n \"process\": \"inotifywait\",\n \"value\": 0.0012294695662956926,\n }\n ]\n }\n '''\n\n process = ''\n regex_items = []\n memory_usage_dict = {}\n try:\n parsed = device.parse(command, output=output)\n except SchemaEmptyParserError as e:\n log.error(\"Command '{cmd}' did not return any output\\n{msg}\".\\\n format(cmd=command, msg=str(e)))\n return None\n\n all_processes = parsed.q.get_values('process')\n if isinstance(processes, list):\n for item in processes:\n regex_items += parsed.q.contains_key_value(\n 'process', item, value_regex=True).get_values('process')\n\n if regex_items:\n processes = regex_items\n\n if processes or all_processes:\n for ps_item in processes or all_processes:\n # To get process id\n # {\n # 'all_mem_alloc': 4646178816,\n # 'pid': {\n # 1: {\n # 'index': {\n # 1: {\n # 'mem_alloc': 188416,\n # 'mem_limit': 0,\n # 'mem_used': 4308992,\n # 'pid': 1,\n # 'process': 'init',\n # 'stack_base_ptr': 'ffffffff/ffffffff'\n # (snip)\n pids = parsed.q.contains_key_value(\n 'process', ps_item, value_regex=True).get_values('pid')\n memory_holding = 0\n for pid in pids:\n # use `sum` because it's possible one pid returns multiple `holding`\n memory_holding += sum(\n parsed.q.contains_key_value('pid',\n pid).get_values('mem_alloc'))\n\n if parsed.get(check_key, 0) == 0:\n memory_usage = 0\n else:\n memory_usage = memory_holding / parsed[check_key]\n\n memory_usage_dict.update({ps_item: memory_usage * 100})\n\n # if health is True, change the dict\n # from:\n # memory_usage_dict = {\n # \"libvirtd\": 0.0012294695662956926,\n # \"inotifywait\": 0.0012294695662956926,\n # }\n # to:\n # memory_usage_dict = {\n # \"health_data\": [\n # {\n # \"process\": \"libvirtd\",\n # \"value\": 0.0012294695662956926,\n # },\n # {\n # \"process\": \"inotifywait\",\n # \"value\": 0.0012294695662956926,\n # }\n # ]\n # }\n if health:\n health_data = {}\n health_data.setdefault('health_data', [])\n for k, v in memory_usage_dict.items():\n health_data['health_data'].append({'process': k, 'value': v})\n memory_usage_dict = health_data\n\n return memory_usage_dict\n\n\ndef health_logging(device,\n command='show logging logfile',\n files=None,\n keywords=None,\n output=None,\n num_of_logs=False,\n health=True):\n '''Get logging messages\n Args:\n device (`obj`): Device object\n command (`str`): show command. Default to 'show logging logfile'\n files (`list`): Not applicable on this platform\n keywords (`list`): List of keywords to match. Default to None\n output (`str`): Output of show command. Default to None\n num_of_logs (`bool`): flag to return number of log messages\n Default to False\n Returns:\n logs (`dict`): return health_data format.\n ex.)\n {\n \"health_data\": {\n \"num_of_logs\": 1,\n \"logs\": [\n {\n \"line\": \"-Traceback= D667B8 D66F04 41463C 40FFF8 411834 423A6C A6E428 A64EF8 (EEHYP_CS_801-1)\",\n \"decode\": \"<decode output>\" # Optional\n }\n ]\n }\n }\n '''\n\n # check keywords and create strings for `include` option\n kw = ''\n if isinstance(keywords, list):\n kw = '|'.join(keywords)\n\n try:\n parsed = device.parse(command, include=kw)\n except SchemaEmptyParserError:\n parsed = {}\n\n # Get value of 'logs' if it exists else '[]'\n logs = parsed.setdefault('logs', [])\n\n if health:\n health_data = {}\n health_data.setdefault('health_data', {})\n health_data['health_data'].setdefault('num_of_logs', len(logs))\n health_logs = health_data['health_data'].setdefault('logs', [])\n for item in logs:\n health_logs.append({'line': item})\n return health_data\n\n if num_of_logs:\n return len(logs)\n\n return logs\n\n\ndef health_core(device,\n default_dir=None,\n output=None,\n keyword=['_core.'],\n num_of_cores=False,\n decode=False,\n decode_timeout=300,\n remote_device=None,\n remote_path=None,\n remote_via=None,\n vrf=None,\n archive=False,\n delete_core=False,\n health=True):\n '''Get the default directory of this device\n\n Args:\n device (`obj`) : Device object\n default_dir (`str` or `list`) : N/A. location will be identified\n from show cores command\n output (`str`) : Output of `dir` command. Default to None\n keyword (`list`): List of keywords to search\n num_of_cores (`bool`): flag to return number of core files\n Default to False\n remote_device (`str`): remote device in testbed yaml\n Default to None\n remote_path (`str`): path with/without file on remote device\n Default to None\n remote_via (`str`) : specify connection to get ip\n Default to None\n vrf (`str`): use vrf where scp find route to remote device\n Default to None\n archive (`bool`): flag to save the decode output as file in archive\n Defaults to False\n delete_core (`bool`): flag to delete core files only when copying to\n remove_device is successfully done\n Defaults to False\n ### CISCO INTERNAL ###\n decode (`bool`): flag to enable for decoding core\n copy core file to remote_server and decode on remote_server\n decode_timeout (`int`): timeout to execute decode script\n Default to 300\n Returns:\n all_corefiles (`dict`): return health_data format.\n ex.)\n {\n \"health_data\": {\n \"num_of_cores\": 1,\n \"core_files\": [\n {\n \"filename\": \"asr-MIB-1_RP_1_nginx_23178_20210317-175351-UTC.core.gz\",\n \"decode\": \"\"\"\n <decode output>\n \"\"\"\n }\n ]\n }\n }\n '''\n # store found core file name\n all_corefiles = []\n # store core file name which is successfully copied\n copied_files = []\n # store found core file location\n dirs = []\n health_data = {}\n health_corefiles = {}\n remote_device_alias = []\n\n parsed = {}\n try:\n parsed = device.parse('show cores', output=output)\n # example:\n # {\n # \"date\": {\n # \"2020-08-20 05:49:09\": {\n # \"pid\": {\n # 18234: {\n # \"instance\": 1,\n # \"module\": 1,\n # \"process_name\": \"bgp-65000\",\n # \"vdc\": 1\n # }\n # }\n # }\n # }\n # }\n except SchemaEmptyParserError:\n # empty is possible. so pass instead of exception\n pass\n\n if parsed:\n for entry in parsed['date']:\n for pid in parsed['date'][entry]['pid'].keys():\n loc = '{module}/{pid}/{instance}'.format(\n module=str(parsed['date'][entry]['pid'][pid]['module']),\n pid=str(pid),\n instance=str(\n parsed['date'][entry]['pid'][pid]['instance']))\n dirs.append(loc)\n if health:\n health_corefiles.setdefault(loc, {})\n\n log.debug('dirs: {dirs}'.format(dirs=dirs))\n\n if remote_device:\n # convert from device name to device object\n if remote_device in device.testbed.devices:\n remote_device = device.testbed.devices[remote_device]\n # check connected_alias for remote_device\n remote_device_alias = [\n i for i in remote_device.api.get_connected_alias().keys()\n ]\n else:\n log.warn(\n 'remote device {rd} was not found.'.format(rd=remote_device))\n\n corefiles = []\n\n # initialize health_corefiles again to store with core file name\n if health:\n health_corefiles = {}\n # copy core file to remote device\n for corefile in dirs:\n log.info('Copying {s} to remote device {rd}'.format(s=corefile,\n rd=remote_device))\n if not (remote_device and remote_path):\n log.warn('`remote_device` or/and `remote_path` are missing')\n return len(dirs) if num_of_cores else dirs\n local_path = \"core://{cf}\".format(cf=corefile)\n copied_files = device.api.scp(local_path=local_path,\n remote_path=remote_path,\n remote_device=remote_device.name,\n remote_via=remote_via,\n vrf=vrf,\n return_filename=True)\n if not copied_files:\n log.warn('SCP has failed to copy core file to remote device {rd}.'.\n format(rd=remote_device.name))\n if health:\n health_corefiles.setdefault(copied_files[0], {})\n # decode core file\n if decode:\n # connect to remote_device if not connected\n if not remote_device_alias:\n # if no connected alias, connect\n try:\n remote_device.connect()\n except Exception as e:\n log.warn(\n \"Remote device {d} was not connected and failed to connect : {e}\"\n .format(d=remote_device.name, e=e))\n return len(dirs) if num_of_cores else dirs\n for core in copied_files:\n try:\n fullpath = \"{rp}/{core}\".format(rp=remote_path, core=core)\n decode_output = remote_device.api.analyze_core_by_ucd(\n core_file=\"{fp}\".format(fp=fullpath),\n timeout=decode_timeout)\n if health:\n health_corefiles[core].setdefault('decode', decode_output)\n # archive decode output\n if archive:\n with open(\n '{folder}/{fn}'.format(\n folder=runtime.directory,\n fn='core_decode_{file}'.format(file=core)),\n 'w') as f:\n print(decode_output, file=f)\n log.info(\n 'Saved decode output as archive:{folder}/{fn}'\n .format(\n folder=runtime.directory,\n fn='core_decode_{file}'.format(file=core)))\n except Exception as e:\n log.warning('decode core file is failed : {e}'.format(e=e))\n # delete core files\n if delete_core:\n for corefile in dirs:\n module = corefile.split('/')[0]\n try:\n log.info(\n 'Deleting copied file on module-{m}.'.format(m=module))\n device.execute(\n 'delete logflash://module-{m}/core/* no-prompt'.format(\n m=module))\n log.info(\n 'Core files on module-{m} was successfully deleted'.\n format(m=module))\n except Exception as e:\n log.warn(\n 'deleting core files on module-{m} failed. {e}'.format(\n m=module, e=e))\n return []\n\n # clear show cores history\n if copied_files:\n try:\n device.execute('clear cores')\n except Exception as e:\n log.warn('Failed to execute `clear cores`.'.format(\n rd=remote_device.name))\n return len(dirs) if num_of_cores else dirs\n else:\n log.info('No core file was copied. So `clear cores` was not executed.')\n\n if health:\n health_data.setdefault('health_data', {})\n health_data['health_data'].setdefault('num_of_cores', len(dirs))\n health_data['health_data'].setdefault('corefiles', [])\n for filename in health_corefiles:\n if 'decode' in health_corefiles[filename]:\n health_data['health_data']['corefiles'].append({'filename': filename, 'decode': health_corefiles[filename]['decode']})\n else:\n health_data['health_data']['corefiles'].append({'filename': filename})\n return health_data\n\n if num_of_cores:\n return len(dirs)\n return dirs"
},
{
"alpha_fraction": 0.7399103045463562,
"alphanum_fraction": 0.7466367483139038,
"avg_line_length": 18.30434799194336,
"blob_id": "e30754c44c3bfb66c0d935da448842ee0ce3d1e5",
"content_id": "0cbd892bc12737f24e4c7ff5cae90c321215dda7",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 446,
"license_type": "no_license",
"max_line_length": 67,
"num_lines": 23,
"path": "/Python/Cisco_pyATS/demo_pyats.py",
"repo_name": "Saby2002/arinsnetwork_Automation",
"src_encoding": "UTF-8",
"text": "#!/usr/bin/env python\n\nfrom genie import testbed\nfrom pprint import pprint\n\n\n# Load the testbed \ntestbed = testbed.load(\"./testbeds/external_testbed.yml\")\n\n# Select the device we want to test\ndevice = testbed.devices[\"XR1\"]\n\ndevice.connect()\n\n# Parse 1st command \nip_interface_brief_output = device.parse(\"show ip interface brief\")\n\npprint(ip_interface_brief_output)\n\n\n# Parse 2nd command\nversion = device.parse(\"show version\")\npprint(version)\n\n\n"
}
] | 6 |
BisheshKatwal/class_dec14
|
https://github.com/BisheshKatwal/class_dec14
|
feab52e8ae2e28a1d7500dcd268c71799e86b064
|
dfeb7ef0910280ba783f342a3b32d925816ad570
|
4c7c892d6569adde0409ce5e101c2feb14c95671
|
refs/heads/master
| 2021-08-29T16:07:59.170296 | 2017-12-14T08:36:41 | 2017-12-14T08:36:41 | 114,222,637 | 0 | 0 | null | null | null | null | null |
[
{
"alpha_fraction": 0.4588235318660736,
"alphanum_fraction": 0.5176470875740051,
"avg_line_length": 8.55555534362793,
"blob_id": "a77e2f38e105bc3cfd55abd84fe4968b57f45cf9",
"content_id": "741c7158ac8bf825bfbbaee9de85e43d58e72b2c",
"detected_licenses": [],
"is_generated": false,
"is_vendor": false,
"language": "Python",
"length_bytes": 85,
"license_type": "no_license",
"max_line_length": 19,
"num_lines": 9,
"path": "/dec14_class.py",
"repo_name": "BisheshKatwal/class_dec14",
"src_encoding": "UTF-8",
"text": "t=()\nt=t+(3,4,\"a\")\nprint(t)\nprint(t.index(4))\n\n\ns= set()\ns.update([\"a\",1.5])\nprint(s)"
}
] | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.